This repository has been archived on 2024-02-27. You can view files and clone it, but cannot push or open issues or pull requests.
arf/lexer/lexer_test.go

158 lines
5.1 KiB
Go
Raw Normal View History

2022-08-10 09:28:29 -06:00
package lexer
import "testing"
import "github.com/sashakoshka/arf/file"
2022-08-10 10:52:20 -06:00
import "github.com/sashakoshka/arf/types"
2022-08-10 09:28:29 -06:00
2022-08-11 03:23:06 -06:00
func checkTokenSlice (filePath string, correct []Token, test *testing.T) {
file, err := file.Open(filePath)
2022-08-10 09:28:29 -06:00
if err != nil {
test.Log(err)
test.Fail()
2022-08-10 10:58:47 -06:00
return
2022-08-10 09:28:29 -06:00
}
tokens, err := Tokenize(file)
2022-08-10 13:02:08 -06:00
// print all tokens
2022-08-11 02:58:45 -06:00
for index, token := range tokens {
test.Log(index, "\tgot token:", token.Describe())
2022-08-10 13:02:08 -06:00
}
if err != nil {
test.Log("returned error:")
test.Log(err.Error())
2022-08-10 09:28:29 -06:00
test.Fail()
2022-08-10 10:58:47 -06:00
return
2022-08-10 09:28:29 -06:00
}
2022-08-11 03:23:06 -06:00
if len(tokens) != len(correct) {
test.Log("lexed", len(tokens), "tokens, want", len(correct))
test.Fail()
return
}
test.Log("token slice length match", len(tokens), "=", len(correct))
for index, token := range tokens {
if !token.Equals(correct[index]) {
test.Log("token", index, "not equal")
test.Log (
"have", token.Describe(),
"want", correct[index].Describe())
test.Fail()
return
}
}
test.Log("token slice content match")
}
func TestTokenizeAll (test *testing.T) {
checkTokenSlice("../tests/lexer/all", []Token {
2022-08-10 09:43:21 -06:00
Token { kind: TokenKindSeparator },
2022-08-10 10:52:20 -06:00
Token { kind: TokenKindPermission, value: types.Permission {
Internal: types.ModeRead,
External: types.ModeWrite,
}},
2022-08-10 09:43:21 -06:00
Token { kind: TokenKindReturnDirection },
2022-08-11 02:58:45 -06:00
Token { kind: TokenKindInt, value: int64(-349820394) },
Token { kind: TokenKindUInt, value: uint64(932748397) },
2022-08-10 09:43:21 -06:00
Token { kind: TokenKindFloat, value: 239485.37520 },
Token { kind: TokenKindString, value: "hello world!\n" },
2022-08-10 09:43:21 -06:00
Token { kind: TokenKindRune, value: 'E' },
Token { kind: TokenKindName, value: "helloWorld" },
Token { kind: TokenKindColon },
Token { kind: TokenKindDot },
Token { kind: TokenKindLBracket },
Token { kind: TokenKindRBracket },
Token { kind: TokenKindLBrace },
Token { kind: TokenKindRBrace },
2022-08-11 02:47:42 -06:00
Token { kind: TokenKindNewline },
2022-08-10 09:43:21 -06:00
Token { kind: TokenKindPlus },
Token { kind: TokenKindMinus },
Token { kind: TokenKindIncrement },
Token { kind: TokenKindDecrement },
Token { kind: TokenKindAsterisk },
Token { kind: TokenKindSlash },
Token { kind: TokenKindAt },
Token { kind: TokenKindExclamation },
Token { kind: TokenKindPercent },
Token { kind: TokenKindTilde },
Token { kind: TokenKindLessThan },
Token { kind: TokenKindLShift },
Token { kind: TokenKindGreaterThan },
Token { kind: TokenKindRShift },
Token { kind: TokenKindBinaryOr },
Token { kind: TokenKindLogicalOr },
Token { kind: TokenKindBinaryAnd },
Token { kind: TokenKindLogicalAnd },
2022-08-11 02:47:42 -06:00
Token { kind: TokenKindNewline },
2022-08-11 03:23:06 -06:00
}, test)
}
2022-08-10 09:28:29 -06:00
2022-08-11 03:23:06 -06:00
func TestTokenizeNumbers (test *testing.T) {
checkTokenSlice("../tests/lexer/numbers", []Token {
Token { kind: TokenKindUInt, value: uint64(83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindUInt, value: uint64(83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindUInt, value: uint64(83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindUInt, value: uint64(83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindInt, value: int64(-83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindInt, value: int64(-83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindInt, value: int64(-83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindInt, value: int64(-83628266) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindFloat, value: float64(0.123478) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindFloat, value: float64(234.3095) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindFloat, value: float64(-2.312) },
Token { kind: TokenKindNewline },
}, test)
2022-08-10 09:28:29 -06:00
}
func TestTokenizeText (test *testing.T) {
checkTokenSlice("../tests/lexer/text", []Token {
Token { kind: TokenKindString, value: "hello world!\a\b\f\n\r\t\v'\"\\" },
Token { kind: TokenKindNewline },
Token { kind: TokenKindRune, value: '\a' },
Token { kind: TokenKindRune, value: '\b' },
Token { kind: TokenKindRune, value: '\f' },
Token { kind: TokenKindRune, value: '\n' },
Token { kind: TokenKindRune, value: '\r' },
Token { kind: TokenKindRune, value: '\t' },
Token { kind: TokenKindRune, value: '\v' },
Token { kind: TokenKindRune, value: '\'' },
Token { kind: TokenKindRune, value: '"' },
Token { kind: TokenKindRune, value: '\\' },
Token { kind: TokenKindNewline },
Token { kind: TokenKindString, value: "hello world \x40\u0040\U00000040!" },
Token { kind: TokenKindNewline },
}, test)
}
2022-08-11 11:47:50 -06:00
func TestTokenizeIndent (test *testing.T) {
checkTokenSlice("../tests/lexer/indent", []Token {
Token { kind: TokenKindName, value: "line1" },
Token { kind: TokenKindNewline },
Token { kind: TokenKindIndent, value: 1 },
Token { kind: TokenKindName, value: "line2" },
Token { kind: TokenKindNewline },
Token { kind: TokenKindIndent, value: 4 },
Token { kind: TokenKindName, value: "line3" },
Token { kind: TokenKindNewline },
Token { kind: TokenKindName, value: "line4" },
Token { kind: TokenKindNewline },
Token { kind: TokenKindIndent, value: 2 },
Token { kind: TokenKindName, value: "line5" },
Token { kind: TokenKindNewline },
}, test)
}