Added number lexing unit test

This commit is contained in:
Sasha Koshka 2022-08-11 04:23:06 -05:00
parent 6acc11d046
commit 7a0ed247e2
2 changed files with 67 additions and 23 deletions

View File

@ -4,8 +4,8 @@ import "testing"
import "github.com/sashakoshka/arf/file" import "github.com/sashakoshka/arf/file"
import "github.com/sashakoshka/arf/types" import "github.com/sashakoshka/arf/types"
func TestTokenizeAll (test *testing.T) { func checkTokenSlice (filePath string, correct []Token, test *testing.T) {
file, err := file.Open("../tests/lexer/all") file, err := file.Open(filePath)
if err != nil { if err != nil {
test.Log(err) test.Log(err)
test.Fail() test.Fail()
@ -26,7 +26,28 @@ func TestTokenizeAll (test *testing.T) {
return return
} }
correct := []Token { if len(tokens) != len(correct) {
test.Log("lexed", len(tokens), "tokens, want", len(correct))
test.Fail()
return
}
test.Log("token slice length match", len(tokens), "=", len(correct))
for index, token := range tokens {
if !token.Equals(correct[index]) {
test.Log("token", index, "not equal")
test.Log (
"have", token.Describe(),
"want", correct[index].Describe())
test.Fail()
return
}
}
test.Log("token slice content match")
}
func TestTokenizeAll (test *testing.T) {
checkTokenSlice("../tests/lexer/all", []Token {
Token { kind: TokenKindSeparator }, Token { kind: TokenKindSeparator },
Token { kind: TokenKindPermission, value: types.Permission { Token { kind: TokenKindPermission, value: types.Permission {
Internal: types.ModeRead, Internal: types.ModeRead,
@ -65,24 +86,34 @@ func TestTokenizeAll (test *testing.T) {
Token { kind: TokenKindBinaryAnd }, Token { kind: TokenKindBinaryAnd },
Token { kind: TokenKindLogicalAnd }, Token { kind: TokenKindLogicalAnd },
Token { kind: TokenKindNewline }, Token { kind: TokenKindNewline },
}, test)
} }
if len(tokens) != len(correct) { func TestTokenizeNumbers (test *testing.T) {
test.Log("lexed", len(tokens), "tokens, want", len(correct)) checkTokenSlice("../tests/lexer/numbers", []Token {
test.Fail() Token { kind: TokenKindUInt, value: uint64(83628266) },
return Token { kind: TokenKindNewline },
} Token { kind: TokenKindUInt, value: uint64(83628266) },
test.Log("token slice length match", len(tokens), "=", len(correct)) Token { kind: TokenKindNewline },
Token { kind: TokenKindUInt, value: uint64(83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindUInt, value: uint64(83628266) },
Token { kind: TokenKindNewline },
for index, token := range tokens { Token { kind: TokenKindInt, value: int64(-83628266) },
if !token.Equals(correct[index]) { Token { kind: TokenKindNewline },
test.Log("token", index, "not equal") Token { kind: TokenKindInt, value: int64(-83628266) },
test.Log ( Token { kind: TokenKindNewline },
"have", token.Describe(), Token { kind: TokenKindInt, value: int64(-83628266) },
"want", correct[index].Describe()) Token { kind: TokenKindNewline },
test.Fail() Token { kind: TokenKindInt, value: int64(-83628266) },
return Token { kind: TokenKindNewline },
}
} Token { kind: TokenKindFloat, value: float64(0.123478) },
test.Log("token slice content match") Token { kind: TokenKindNewline },
Token { kind: TokenKindFloat, value: float64(234.3095) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindFloat, value: float64(-2.312) },
Token { kind: TokenKindNewline },
}, test)
} }

13
tests/lexer/numbers Normal file
View File

@ -0,0 +1,13 @@
83628266
0b100111111000001000011101010
0x4Fc10Ea
0477010352
-83628266
-0b100111111000001000011101010
-0x4Fc10Ea
-0477010352
0.123478
234.3095
-2.312