Added unit test for lexer

This commit is contained in:
2022-08-10 11:28:29 -04:00
parent 245798f33d
commit 3f16d34aa1
6 changed files with 56 additions and 4 deletions

View File

@@ -79,6 +79,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
}
case '\n':
// line break
// TODO: if last line was blank, (ony whitespace) discard.
lexer.addToken (Token {
kind: TokenKindNewline,
})

33
lexer/lexer_test.go Normal file
View File

@@ -0,0 +1,33 @@
package lexer
import "testing"
import "github.com/sashakoshka/arf/file"
func TestTokenizeAll (test *testing.T) {
file, err := file.Open("tests/parser/all")
if err != nil {
test.Log(err)
test.Fail()
}
tokens, err := Tokenize(file)
if err != nil {
test.Log(err)
test.Fail()
}
correct := []Token {
Token { kind: TokenKindSeparator, },
}
if len(tokens) != len(correct) {
test.Log("lexed", tokens, "tokens, want", correct)
}
for index, token := range tokens {
if !token.Equals(correct[index]) {
test.Log("token", index, "not equal")
test.Fail()
}
}
}

View File

@@ -73,6 +73,11 @@ func (token Token) Value () (value any) {
return token.value
}
// Equals returns whether this token is equal to another token
func (token Token) Equals (testToken Token) (match bool) {
return token == testToken
}
// Location returns the location of the token in its file.
func (token Token) Location () (location file.Location) {
return token.location