Lexer tokens are now created by the lexer

This is so positional information can be accurately embedded into them.
This commit is contained in:
2022-08-12 14:34:07 -05:00
parent accf528869
commit 856d5763d3
5 changed files with 89 additions and 70 deletions

View File

@@ -8,6 +8,8 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
var fragment float64
var isFloat bool
token := lexer.newToken()
if lexer.char == '0' {
lexer.nextRune()
@@ -23,7 +25,7 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
number, fragment, isFloat, err = lexer.tokenizeNumber(8)
} else {
return file.NewError (
lexer.file.Location(),
lexer.file.Location(1),
"unexpected character in number literal",
file.ErrorKindError)
}
@@ -33,8 +35,6 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
if err != nil { return }
token := Token { }
if isFloat {
floatNumber := float64(number) + fragment