From 7a0ed247e21acfbaa8936993445ed7af2e89300b Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Thu, 11 Aug 2022 04:23:06 -0500 Subject: [PATCH] Added number lexing unit test --- lexer/lexer_test.go | 77 +++++++++++++++++++++++++++++++-------------- tests/lexer/numbers | 13 ++++++++ 2 files changed, 67 insertions(+), 23 deletions(-) create mode 100644 tests/lexer/numbers diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 9143521..fa72945 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -4,8 +4,8 @@ import "testing" import "github.com/sashakoshka/arf/file" import "github.com/sashakoshka/arf/types" -func TestTokenizeAll (test *testing.T) { - file, err := file.Open("../tests/lexer/all") +func checkTokenSlice (filePath string, correct []Token, test *testing.T) { + file, err := file.Open(filePath) if err != nil { test.Log(err) test.Fail() @@ -26,7 +26,28 @@ func TestTokenizeAll (test *testing.T) { return } - correct := []Token { + if len(tokens) != len(correct) { + test.Log("lexed", len(tokens), "tokens, want", len(correct)) + test.Fail() + return + } + test.Log("token slice length match", len(tokens), "=", len(correct)) + + for index, token := range tokens { + if !token.Equals(correct[index]) { + test.Log("token", index, "not equal") + test.Log ( + "have", token.Describe(), + "want", correct[index].Describe()) + test.Fail() + return + } + } + test.Log("token slice content match") +} + +func TestTokenizeAll (test *testing.T) { + checkTokenSlice("../tests/lexer/all", []Token { Token { kind: TokenKindSeparator }, Token { kind: TokenKindPermission, value: types.Permission { Internal: types.ModeRead, @@ -65,24 +86,34 @@ func TestTokenizeAll (test *testing.T) { Token { kind: TokenKindBinaryAnd }, Token { kind: TokenKindLogicalAnd }, Token { kind: TokenKindNewline }, - } - - if len(tokens) != len(correct) { - test.Log("lexed", len(tokens), "tokens, want", len(correct)) - test.Fail() - return - } - test.Log("token slice length match", len(tokens), "=", len(correct)) - - for index, token := range tokens { - if !token.Equals(correct[index]) { - test.Log("token", index, "not equal") - test.Log ( - "have", token.Describe(), - "want", correct[index].Describe()) - test.Fail() - return - } - } - test.Log("token slice content match") + }, test) +} + +func TestTokenizeNumbers (test *testing.T) { + checkTokenSlice("../tests/lexer/numbers", []Token { + Token { kind: TokenKindUInt, value: uint64(83628266) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindUInt, value: uint64(83628266) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindUInt, value: uint64(83628266) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindUInt, value: uint64(83628266) }, + Token { kind: TokenKindNewline }, + + Token { kind: TokenKindInt, value: int64(-83628266) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindInt, value: int64(-83628266) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindInt, value: int64(-83628266) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindInt, value: int64(-83628266) }, + Token { kind: TokenKindNewline }, + + Token { kind: TokenKindFloat, value: float64(0.123478) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindFloat, value: float64(234.3095) }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindFloat, value: float64(-2.312) }, + Token { kind: TokenKindNewline }, + }, test) } diff --git a/tests/lexer/numbers b/tests/lexer/numbers new file mode 100644 index 0000000..5152602 --- /dev/null +++ b/tests/lexer/numbers @@ -0,0 +1,13 @@ +83628266 +0b100111111000001000011101010 +0x4Fc10Ea +0477010352 + +-83628266 +-0b100111111000001000011101010 +-0x4Fc10Ea +-0477010352 + +0.123478 +234.3095 +-2.312