Fixed lexing digraph tokens

Lexer gave wrong token locations and would skip an extra rune when digraph was
not found.
This commit is contained in:
Sasha Koshka 2022-08-16 20:26:06 -04:00
parent 9e01eef45b
commit 3407aa7c59
1 changed files with 11 additions and 11 deletions

View File

@ -175,9 +175,9 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune() err = lexer.nextRune()
case '.': case '.':
token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindDot token.kind = TokenKindDot
if lexer.char == '.' { if lexer.char == '.' {
token.kind = TokenKindElipsis token.kind = TokenKindElipsis
@ -210,15 +210,15 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune() err = lexer.nextRune()
case '+': case '+':
token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindPlus token.kind = TokenKindPlus
if lexer.char == '+' { if lexer.char == '+' {
token.kind = TokenKindIncrement token.kind = TokenKindIncrement
err = lexer.nextRune()
} }
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune()
case '-': case '-':
err = lexer.tokenizeDashBeginning() err = lexer.tokenizeDashBeginning()
case '*': case '*':
@ -252,45 +252,45 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune() err = lexer.nextRune()
case '<': case '<':
token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindLessThan token.kind = TokenKindLessThan
if lexer.char == '<' { if lexer.char == '<' {
token.kind = TokenKindLShift token.kind = TokenKindLShift
err = lexer.nextRune()
} }
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune()
case '>': case '>':
token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindGreaterThan token.kind = TokenKindGreaterThan
if lexer.char == '>' { if lexer.char == '>' {
token.kind = TokenKindRShift token.kind = TokenKindRShift
err = lexer.nextRune()
} }
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune()
case '|': case '|':
token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindBinaryOr token.kind = TokenKindBinaryOr
if lexer.char == '|' { if lexer.char == '|' {
token.kind = TokenKindLogicalOr token.kind = TokenKindLogicalOr
err = lexer.nextRune()
} }
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune()
case '&': case '&':
token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
token := lexer.newToken()
token.kind = TokenKindBinaryAnd token.kind = TokenKindBinaryAnd
if lexer.char == '&' { if lexer.char == '&' {
token.kind = TokenKindLogicalAnd token.kind = TokenKindLogicalAnd
err = lexer.nextRune()
} }
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune()
default: default:
err = file.NewError ( err = file.NewError (
lexer.file.Location(1), lexer.file.Location(1),