From 3407aa7c5941b93147773e85e291d08983b835bd Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Tue, 16 Aug 2022 20:26:06 -0400 Subject: [PATCH] Fixed lexing digraph tokens Lexer gave wrong token locations and would skip an extra rune when digraph was not found. --- lexer/lexer.go | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/lexer/lexer.go b/lexer/lexer.go index 4cc7660..1c2b761 100644 --- a/lexer/lexer.go +++ b/lexer/lexer.go @@ -175,9 +175,9 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { lexer.addToken(token) err = lexer.nextRune() case '.': + token := lexer.newToken() err = lexer.nextRune() if err != nil { return } - token := lexer.newToken() token.kind = TokenKindDot if lexer.char == '.' { token.kind = TokenKindElipsis @@ -210,15 +210,15 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { lexer.addToken(token) err = lexer.nextRune() case '+': + token := lexer.newToken() err = lexer.nextRune() if err != nil { return } - token := lexer.newToken() token.kind = TokenKindPlus if lexer.char == '+' { token.kind = TokenKindIncrement + err = lexer.nextRune() } lexer.addToken(token) - err = lexer.nextRune() case '-': err = lexer.tokenizeDashBeginning() case '*': @@ -252,45 +252,45 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { lexer.addToken(token) err = lexer.nextRune() case '<': + token := lexer.newToken() err = lexer.nextRune() if err != nil { return } - token := lexer.newToken() token.kind = TokenKindLessThan if lexer.char == '<' { token.kind = TokenKindLShift + err = lexer.nextRune() } lexer.addToken(token) - err = lexer.nextRune() case '>': + token := lexer.newToken() err = lexer.nextRune() if err != nil { return } - token := lexer.newToken() token.kind = TokenKindGreaterThan if lexer.char == '>' { token.kind = TokenKindRShift + err = lexer.nextRune() } lexer.addToken(token) - err = lexer.nextRune() case '|': + token := lexer.newToken() err = lexer.nextRune() if err != nil { return } - token := lexer.newToken() token.kind = TokenKindBinaryOr if lexer.char == '|' { token.kind = TokenKindLogicalOr + err = lexer.nextRune() } lexer.addToken(token) - err = lexer.nextRune() case '&': + token := lexer.newToken() err = lexer.nextRune() if err != nil { return } - token := lexer.newToken() token.kind = TokenKindBinaryAnd if lexer.char == '&' { token.kind = TokenKindLogicalAnd + err = lexer.nextRune() } lexer.addToken(token) - err = lexer.nextRune() default: err = file.NewError ( lexer.file.Location(1),