Fixed lexing digraph tokens

Lexer gave wrong token locations and would skip an extra rune when digraph was
not found.
This commit is contained in:
Sasha Koshka 2022-08-16 20:26:06 -04:00
parent 9e01eef45b
commit 3407aa7c59

View File

@ -175,9 +175,9 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
lexer.addToken(token)
err = lexer.nextRune()
case '.':
token := lexer.newToken()
err = lexer.nextRune()
if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindDot
if lexer.char == '.' {
token.kind = TokenKindElipsis
@ -210,15 +210,15 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
lexer.addToken(token)
err = lexer.nextRune()
case '+':
token := lexer.newToken()
err = lexer.nextRune()
if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindPlus
if lexer.char == '+' {
token.kind = TokenKindIncrement
err = lexer.nextRune()
}
lexer.addToken(token)
err = lexer.nextRune()
case '-':
err = lexer.tokenizeDashBeginning()
case '*':
@ -252,45 +252,45 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
lexer.addToken(token)
err = lexer.nextRune()
case '<':
token := lexer.newToken()
err = lexer.nextRune()
if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindLessThan
if lexer.char == '<' {
token.kind = TokenKindLShift
err = lexer.nextRune()
}
lexer.addToken(token)
err = lexer.nextRune()
case '>':
token := lexer.newToken()
err = lexer.nextRune()
if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindGreaterThan
if lexer.char == '>' {
token.kind = TokenKindRShift
err = lexer.nextRune()
}
lexer.addToken(token)
err = lexer.nextRune()
case '|':
token := lexer.newToken()
err = lexer.nextRune()
if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindBinaryOr
if lexer.char == '|' {
token.kind = TokenKindLogicalOr
err = lexer.nextRune()
}
lexer.addToken(token)
err = lexer.nextRune()
case '&':
token := lexer.newToken()
err = lexer.nextRune()
if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindBinaryAnd
if lexer.char == '&' {
token.kind = TokenKindLogicalAnd
err = lexer.nextRune()
}
lexer.addToken(token)
err = lexer.nextRune()
default:
err = file.NewError (
lexer.file.Location(1),