Fixed bug in number lexing function that prevented reading zero
This commit is contained in:
parent
d27c0ff07c
commit
608162fa92
@ -93,7 +93,7 @@ func TestTokenizeNumbers (test *testing.T) {
|
||||
checkTokenSlice("../tests/lexer/numbers.arf", []Token {
|
||||
Token { kind: TokenKindUInt, value: uint64(0) },
|
||||
Token { kind: TokenKindNewline },
|
||||
Token { kind: TokenKindUInt, value: uint64(5) },
|
||||
Token { kind: TokenKindUInt, value: uint64(8) },
|
||||
Token { kind: TokenKindNewline },
|
||||
Token { kind: TokenKindUInt, value: uint64(83628266) },
|
||||
Token { kind: TokenKindNewline },
|
||||
|
@ -1,7 +1,5 @@
|
||||
package lexer
|
||||
|
||||
import "git.tebibyte.media/sashakoshka/arf/file"
|
||||
|
||||
// tokenizeSymbolBeginning lexes a token that starts with a number.
|
||||
func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) {
|
||||
var number uint64
|
||||
@ -23,11 +21,6 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
|
||||
number, fragment, isFloat, err = lexer.tokenizeNumber(10)
|
||||
} else if lexer.char >= '0' && lexer.char <= '9' {
|
||||
number, fragment, isFloat, err = lexer.tokenizeNumber(8)
|
||||
} else {
|
||||
return file.NewError (
|
||||
lexer.file.Location(1),
|
||||
"unexpected character in number literal",
|
||||
file.ErrorKindError)
|
||||
}
|
||||
} else {
|
||||
number, fragment, isFloat, err = lexer.tokenizeNumber(10)
|
||||
|
@ -1,5 +1,6 @@
|
||||
:arf
|
||||
0
|
||||
8
|
||||
83628266
|
||||
0b100111111000001000011101010
|
||||
0x4Fc10Ea
|
||||
|
Reference in New Issue
Block a user