Implement new tokens

I think this is all of them. The test will tell.
This commit is contained in:
Sasha Koshka 2022-08-30 01:02:49 -04:00
parent 1c2194b68a
commit 06a99ce232
1 changed files with 34 additions and 4 deletions

View File

@ -253,14 +253,26 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
lexer.addToken(token)
case '%':
token := lexer.newToken()
token.kind = TokenKindPercent
lexer.addToken(token)
err = lexer.nextRune()
if err != nil { return }
token.kind = TokenKindPercent
if lexer.char == '=' {
token.kind = TokenKindPercentAssignment
err = lexer.nextRune()
token.location.SetWidth(2)
}
lexer.addToken(token)
case '~':
token := lexer.newToken()
token.kind = TokenKindTilde
lexer.addToken(token)
err = lexer.nextRune()
if err != nil { return }
token.kind = TokenKindTilde
if lexer.char == '=' {
token.kind = TokenKindTildeAssignment
err = lexer.nextRune()
token.location.SetWidth(2)
}
lexer.addToken(token)
case '=':
token := lexer.newToken()
token.kind = TokenKindEqualTo
@ -275,6 +287,11 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
token.kind = TokenKindLShift
err = lexer.nextRune()
token.location.SetWidth(2)
if lexer.char == '=' {
token.kind = TokenKindLShiftAssignment
err = lexer.nextRune()
token.location.SetWidth(2)
}
} else if lexer.char == '=' {
token.kind = TokenKindLessThanEqualTo
err = lexer.nextRune()
@ -290,6 +307,11 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
token.kind = TokenKindRShift
err = lexer.nextRune()
token.location.SetWidth(2)
if lexer.char == '=' {
token.kind = TokenKindRShiftAssignment
err = lexer.nextRune()
token.location.SetWidth(2)
}
} else if lexer.char == '=' {
token.kind = TokenKindGreaterThanEqualTo
err = lexer.nextRune()
@ -305,6 +327,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
token.kind = TokenKindLogicalOr
err = lexer.nextRune()
token.location.SetWidth(2)
} else if lexer.char == '=' {
token.kind = TokenKindBinaryOrAssignment
err = lexer.nextRune()
token.location.SetWidth(2)
}
lexer.addToken(token)
case '&':
@ -316,6 +342,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
token.kind = TokenKindLogicalAnd
err = lexer.nextRune()
token.location.SetWidth(2)
} else if lexer.char == '=' {
token.kind = TokenKindBinaryAndAssignment
err = lexer.nextRune()
token.location.SetWidth(2)
}
lexer.addToken(token)
default: