This repository has been archived on 2024-02-27. You can view files and clone it, but cannot push or open issues or pull requests.
arf/lexer/lexer_test.go

85 lines
2.3 KiB
Go
Raw Normal View History

2022-08-10 09:28:29 -06:00
package lexer
import "testing"
import "github.com/sashakoshka/arf/file"
2022-08-10 10:52:20 -06:00
import "github.com/sashakoshka/arf/types"
2022-08-10 09:28:29 -06:00
func TestTokenizeAll (test *testing.T) {
2022-08-10 10:58:47 -06:00
file, err := file.Open("../tests/lexer/all")
2022-08-10 09:28:29 -06:00
if err != nil {
test.Log(err)
test.Fail()
2022-08-10 10:58:47 -06:00
return
2022-08-10 09:28:29 -06:00
}
tokens, err := Tokenize(file)
2022-08-10 13:02:08 -06:00
// print all tokens
for _, token := range tokens {
test.Log("got token:", token.Describe())
}
test.Log("resulting error:")
test.Log(err.Error())
2022-08-10 09:43:21 -06:00
if err == nil {
2022-08-10 10:58:47 -06:00
test.Log("Tokenize() should have returned an error")
2022-08-10 09:28:29 -06:00
test.Fail()
2022-08-10 10:58:47 -06:00
return
2022-08-10 09:28:29 -06:00
}
correct := []Token {
2022-08-10 09:43:21 -06:00
Token { kind: TokenKindSeparator },
2022-08-10 10:52:20 -06:00
Token { kind: TokenKindPermission, value: types.Permission {
Internal: types.ModeRead,
External: types.ModeWrite,
}},
2022-08-10 09:43:21 -06:00
Token { kind: TokenKindReturnDirection },
Token { kind: TokenKindInt, value: -349820394 },
Token { kind: TokenKindUInt, value: 932748397 },
Token { kind: TokenKindFloat, value: 239485.37520 },
Token { kind: TokenKindString, value: "hello world\n" },
Token { kind: TokenKindRune, value: 'E' },
Token { kind: TokenKindName, value: "helloWorld" },
Token { kind: TokenKindColon },
Token { kind: TokenKindDot },
Token { kind: TokenKindLBracket },
Token { kind: TokenKindRBracket },
Token { kind: TokenKindLBrace },
Token { kind: TokenKindRBrace },
Token { kind: TokenKindPlus },
Token { kind: TokenKindMinus },
Token { kind: TokenKindIncrement },
Token { kind: TokenKindDecrement },
Token { kind: TokenKindAsterisk },
Token { kind: TokenKindSlash },
Token { kind: TokenKindAt },
Token { kind: TokenKindExclamation },
Token { kind: TokenKindPercent },
Token { kind: TokenKindTilde },
Token { kind: TokenKindLessThan },
Token { kind: TokenKindLShift },
Token { kind: TokenKindGreaterThan },
Token { kind: TokenKindRShift },
Token { kind: TokenKindBinaryOr },
Token { kind: TokenKindLogicalOr },
Token { kind: TokenKindBinaryAnd },
Token { kind: TokenKindLogicalAnd },
2022-08-10 09:28:29 -06:00
}
if len(tokens) != len(correct) {
2022-08-10 10:58:47 -06:00
test.Log("lexed", len(tokens), "tokens, want", len(correct))
test.Fail()
return
2022-08-10 09:28:29 -06:00
}
2022-08-10 10:58:47 -06:00
test.Log("token slice length match", len(tokens), "=", len(correct))
2022-08-10 09:28:29 -06:00
for index, token := range tokens {
if !token.Equals(correct[index]) {
test.Log("token", index, "not equal")
test.Fail()
2022-08-10 10:58:47 -06:00
return
2022-08-10 09:28:29 -06:00
}
}
2022-08-10 10:58:47 -06:00
test.Log("token slice content match")
2022-08-10 09:28:29 -06:00
}