diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 4a6fc96..bd44956 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -93,7 +93,7 @@ func TestTokenizeNumbers (test *testing.T) { checkTokenSlice("../tests/lexer/numbers.arf", []Token { Token { kind: TokenKindUInt, value: uint64(0) }, Token { kind: TokenKindNewline }, - Token { kind: TokenKindUInt, value: uint64(5) }, + Token { kind: TokenKindUInt, value: uint64(8) }, Token { kind: TokenKindNewline }, Token { kind: TokenKindUInt, value: uint64(83628266) }, Token { kind: TokenKindNewline }, diff --git a/lexer/numbers.go b/lexer/numbers.go index 4dce92b..7093750 100644 --- a/lexer/numbers.go +++ b/lexer/numbers.go @@ -1,7 +1,5 @@ package lexer -import "git.tebibyte.media/sashakoshka/arf/file" - // tokenizeSymbolBeginning lexes a token that starts with a number. func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) { var number uint64 @@ -23,11 +21,6 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error number, fragment, isFloat, err = lexer.tokenizeNumber(10) } else if lexer.char >= '0' && lexer.char <= '9' { number, fragment, isFloat, err = lexer.tokenizeNumber(8) - } else { - return file.NewError ( - lexer.file.Location(1), - "unexpected character in number literal", - file.ErrorKindError) } } else { number, fragment, isFloat, err = lexer.tokenizeNumber(10) diff --git a/tests/lexer/numbers.arf b/tests/lexer/numbers.arf index e0da9bb..2cb6de4 100644 --- a/tests/lexer/numbers.arf +++ b/tests/lexer/numbers.arf @@ -1,5 +1,6 @@ :arf 0 +8 83628266 0b100111111000001000011101010 0x4Fc10Ea