diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index fa72945..2dca3f8 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -117,3 +117,21 @@ func TestTokenizeNumbers (test *testing.T) { Token { kind: TokenKindNewline }, }, test) } + +func TestTokenizeText (test *testing.T) { + checkTokenSlice("../tests/lexer/text", []Token { + Token { kind: TokenKindString, value: "hello world!\a\b\f\n\r\t\v'\"\\" }, + Token { kind: TokenKindNewline }, + Token { kind: TokenKindRune, value: '\a' }, + Token { kind: TokenKindRune, value: '\b' }, + Token { kind: TokenKindRune, value: '\f' }, + Token { kind: TokenKindRune, value: '\n' }, + Token { kind: TokenKindRune, value: '\r' }, + Token { kind: TokenKindRune, value: '\t' }, + Token { kind: TokenKindRune, value: '\v' }, + Token { kind: TokenKindRune, value: '\'' }, + Token { kind: TokenKindRune, value: '"' }, + Token { kind: TokenKindRune, value: '\\' }, + Token { kind: TokenKindNewline }, + }, test) +} diff --git a/lexer/numbers.go b/lexer/numbers.go index 80d4a91..5518f58 100644 --- a/lexer/numbers.go +++ b/lexer/numbers.go @@ -100,7 +100,9 @@ func (lexer *LexingOperation) tokenizeNumber ( err = lexer.nextRune() if err != nil { return } } - + + // TODO: increase accuracy of this so that TestTokenizeNumbers is + // passed. if lexer.char == '.' { isFloat = true err = lexer.nextRune() diff --git a/tests/lexer/text b/tests/lexer/text new file mode 100644 index 0000000..3515bfc --- /dev/null +++ b/tests/lexer/text @@ -0,0 +1,2 @@ +"hello world!\a\b\f\n\r\t\v\'\"\\" +'\a' '\b' '\f' '\n' '\r' '\t' '\v' '\'' '\"' '\\'