Collapsed all number tokenization functions into just one
This commit is contained in:
parent
9e4684dbed
commit
40ad569870
@ -11,14 +11,14 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
|
|||||||
|
|
||||||
if lexer.char == 'x' {
|
if lexer.char == 'x' {
|
||||||
lexer.nextRune()
|
lexer.nextRune()
|
||||||
number, err = lexer.tokenizeHexidecimalNumber()
|
number, err = lexer.tokenizeNumber(16)
|
||||||
} else if lexer.char == 'b' {
|
} else if lexer.char == 'b' {
|
||||||
lexer.nextRune()
|
lexer.nextRune()
|
||||||
number, err = lexer.tokenizeBinaryNumber()
|
number, err = lexer.tokenizeNumber(2)
|
||||||
} else if lexer.char == '.' {
|
} else if lexer.char == '.' {
|
||||||
number, err = lexer.tokenizeDecimalNumber()
|
number, err = lexer.tokenizeNumber(10)
|
||||||
} else if lexer.char >= '0' && lexer.char <= '9' {
|
} else if lexer.char >= '0' && lexer.char <= '9' {
|
||||||
number, err = lexer.tokenizeOctalNumber()
|
number, err = lexer.tokenizeNumber(8)
|
||||||
} else {
|
} else {
|
||||||
return file.NewError (
|
return file.NewError (
|
||||||
lexer.file.Location(), 1,
|
lexer.file.Location(), 1,
|
||||||
@ -26,7 +26,7 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
|
|||||||
file.ErrorKindError)
|
file.ErrorKindError)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
number, err = lexer.tokenizeDecimalNumber()
|
number, err = lexer.tokenizeNumber(10)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil { return }
|
if err != nil { return }
|
||||||
@ -65,13 +65,13 @@ func runeToDigit (char rune, radix uint64) (digit uint64, worked bool) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// tokenizeHexidecimalNumber Reads and tokenizes a hexidecimal number.
|
// tokenizeNumber reads and tokenizes a number with the specified radix.
|
||||||
func (lexer *LexingOperation) tokenizeHexidecimalNumber () (number uint64, err error) {
|
func (lexer *LexingOperation) tokenizeNumber (radix uint64) (number uint64, err error) {
|
||||||
for {
|
for {
|
||||||
digit, worked := runeToDigit(lexer.char, 16)
|
digit, worked := runeToDigit(lexer.char, radix)
|
||||||
if !worked { break }
|
if !worked { break }
|
||||||
|
|
||||||
number *= 16
|
number *= radix
|
||||||
number += digit
|
number += digit
|
||||||
|
|
||||||
err = lexer.nextRune()
|
err = lexer.nextRune()
|
||||||
@ -79,49 +79,3 @@ func (lexer *LexingOperation) tokenizeHexidecimalNumber () (number uint64, err e
|
|||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// tokenizeBinaryNumber Reads and tokenizes a binary number.
|
|
||||||
func (lexer *LexingOperation) tokenizeBinaryNumber () (number uint64, err error) {
|
|
||||||
for {
|
|
||||||
digit, worked := runeToDigit(lexer.char, 2)
|
|
||||||
if !worked { break }
|
|
||||||
|
|
||||||
number *= 2
|
|
||||||
number += digit
|
|
||||||
|
|
||||||
err = lexer.nextRune()
|
|
||||||
if err != nil { return }
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// tokenizeDecimalNumber Reads and tokenizes a decimal number.
|
|
||||||
func (lexer *LexingOperation) tokenizeDecimalNumber () (number uint64, err error) {
|
|
||||||
for {
|
|
||||||
digit, worked := runeToDigit(lexer.char, 10)
|
|
||||||
if !worked { break }
|
|
||||||
|
|
||||||
number *= 10
|
|
||||||
number += digit
|
|
||||||
|
|
||||||
err = lexer.nextRune()
|
|
||||||
if err != nil { return }
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// tokenizeOctalNumber Reads and tokenizes an octal number.
|
|
||||||
func (lexer *LexingOperation) tokenizeOctalNumber () (number uint64, err error) {
|
|
||||||
for {
|
|
||||||
digit, worked := runeToDigit(lexer.char, 8)
|
|
||||||
if !worked { break }
|
|
||||||
|
|
||||||
number *= 8
|
|
||||||
number += digit
|
|
||||||
|
|
||||||
err = lexer.nextRune()
|
|
||||||
if err != nil { return }
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
Reference in New Issue
Block a user