data-section #3

Merged
sashakoshka merged 42 commits from data-section into main 2022-08-17 18:21:20 +00:00
3 changed files with 2 additions and 8 deletions
Showing only changes of commit 608162fa92 - Show all commits

View File

@ -93,7 +93,7 @@ func TestTokenizeNumbers (test *testing.T) {
checkTokenSlice("../tests/lexer/numbers.arf", []Token {
Token { kind: TokenKindUInt, value: uint64(0) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindUInt, value: uint64(5) },
Token { kind: TokenKindUInt, value: uint64(8) },
Token { kind: TokenKindNewline },
Token { kind: TokenKindUInt, value: uint64(83628266) },
Token { kind: TokenKindNewline },

View File

@ -1,7 +1,5 @@
package lexer
import "git.tebibyte.media/sashakoshka/arf/file"
// tokenizeSymbolBeginning lexes a token that starts with a number.
func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) {
var number uint64
@ -23,11 +21,6 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
number, fragment, isFloat, err = lexer.tokenizeNumber(10)
} else if lexer.char >= '0' && lexer.char <= '9' {
number, fragment, isFloat, err = lexer.tokenizeNumber(8)
} else {
return file.NewError (
lexer.file.Location(1),
"unexpected character in number literal",
file.ErrorKindError)
}
} else {
number, fragment, isFloat, err = lexer.tokenizeNumber(10)

View File

@ -1,5 +1,6 @@
:arf
0
8
83628266
0b100111111000001000011101010
0x4Fc10Ea