Added all tokens to unit test

This commit is contained in:
Sasha Koshka 2022-08-10 11:43:21 -04:00
parent 3f16d34aa1
commit 488220f3a7
5 changed files with 41 additions and 11 deletions

View File

@ -4,20 +4,52 @@ import "testing"
import "github.com/sashakoshka/arf/file" import "github.com/sashakoshka/arf/file"
func TestTokenizeAll (test *testing.T) { func TestTokenizeAll (test *testing.T) {
file, err := file.Open("tests/parser/all") file, err := file.Open("tests/lexer/all")
if err != nil { if err != nil {
test.Log(err) test.Log(err)
test.Fail() test.Fail()
} }
tokens, err := Tokenize(file) tokens, err := Tokenize(file)
if err != nil { if err == nil {
test.Log(err) test.Log("Tokenize() have returned an error")
test.Fail() test.Fail()
} }
correct := []Token { correct := []Token {
Token { kind: TokenKindSeparator, }, Token { kind: TokenKindSeparator },
Token { kind: TokenKindPermission /* TODO: value */ },
Token { kind: TokenKindReturnDirection },
Token { kind: TokenKindInt, value: -349820394 },
Token { kind: TokenKindUInt, value: 932748397 },
Token { kind: TokenKindFloat, value: 239485.37520 },
Token { kind: TokenKindString, value: "hello world\n" },
Token { kind: TokenKindRune, value: 'E' },
Token { kind: TokenKindName, value: "helloWorld" },
Token { kind: TokenKindColon },
Token { kind: TokenKindDot },
Token { kind: TokenKindLBracket },
Token { kind: TokenKindRBracket },
Token { kind: TokenKindLBrace },
Token { kind: TokenKindRBrace },
Token { kind: TokenKindPlus },
Token { kind: TokenKindMinus },
Token { kind: TokenKindIncrement },
Token { kind: TokenKindDecrement },
Token { kind: TokenKindAsterisk },
Token { kind: TokenKindSlash },
Token { kind: TokenKindAt },
Token { kind: TokenKindExclamation },
Token { kind: TokenKindPercent },
Token { kind: TokenKindTilde },
Token { kind: TokenKindLessThan },
Token { kind: TokenKindLShift },
Token { kind: TokenKindGreaterThan },
Token { kind: TokenKindRShift },
Token { kind: TokenKindBinaryOr },
Token { kind: TokenKindLogicalOr },
Token { kind: TokenKindBinaryAnd },
Token { kind: TokenKindLogicalAnd },
} }
if len(tokens) != len(correct) { if len(tokens) != len(correct) {

View File

@ -14,12 +14,12 @@ const (
TokenKindReturnDirection TokenKindReturnDirection
TokenKindInt TokenKindInt
TokenKindUInt
TokenKindFloat TokenKindFloat
TokenKindString TokenKindString
TokenKindRune TokenKindRune
TokenKindName TokenKindName
TokenKindSymbol
TokenKindColon TokenKindColon
TokenKindDot TokenKindDot
@ -31,6 +31,8 @@ const (
TokenKindPlus TokenKindPlus
TokenKindMinus TokenKindMinus
TokenKindIncrement
TokenKindDecrement
TokenKindAsterisk TokenKindAsterisk
TokenKindSlash TokenKindSlash

2
tests/lexer/all Normal file
View File

@ -0,0 +1,2 @@
--- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.[]{}
+ - ++ -- * / @ ! % ~ < << > >> | || & &&

View File

@ -1,6 +0,0 @@
# normal tokens
--- rn rr nw -> 239 -120 + - / * -- ++ [{[skadjsk 2039.32]}] 0x5FC2D
"skdjlsakdj" '\n'
# erroneous tokens (poorly spaced)
---rnrrnw->239-120+-/*--++