Lexer passes all width tests

This commit is contained in:
Sasha Koshka 2022-08-18 11:35:48 -04:00
parent 120976a0f3
commit 15eb96e8ac
1 changed files with 11 additions and 0 deletions

View File

@ -93,6 +93,7 @@ func (lexer *LexingOperation) tokenizeAlphaBeginning () (err error) {
} }
token.value = got token.value = got
token.location.SetWidth(len(got))
if len(got) == 2 { if len(got) == 2 {
firstValid := got[0] == 'n' || got[0] == 'r' || got[0] == 'w' firstValid := got[0] == 'n' || got[0] == 'r' || got[0] == 'w'
@ -143,6 +144,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
} }
token.value = indentLevel token.value = indentLevel
token.location.SetWidth(indentLevel)
lexer.addToken(token) lexer.addToken(token)
case '\n': case '\n':
// line break // line break
@ -183,6 +185,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if lexer.char == '.' { if lexer.char == '.' {
token.kind = TokenKindElipsis token.kind = TokenKindElipsis
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
case ',': case ',':
@ -218,6 +221,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if lexer.char == '+' { if lexer.char == '+' {
token.kind = TokenKindIncrement token.kind = TokenKindIncrement
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
case '-': case '-':
@ -260,6 +264,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if lexer.char == '<' { if lexer.char == '<' {
token.kind = TokenKindLShift token.kind = TokenKindLShift
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
case '>': case '>':
@ -270,6 +275,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if lexer.char == '>' { if lexer.char == '>' {
token.kind = TokenKindRShift token.kind = TokenKindRShift
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
case '|': case '|':
@ -280,6 +286,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if lexer.char == '|' { if lexer.char == '|' {
token.kind = TokenKindLogicalOr token.kind = TokenKindLogicalOr
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
case '&': case '&':
@ -290,6 +297,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if lexer.char == '&' { if lexer.char == '&' {
token.kind = TokenKindLogicalAnd token.kind = TokenKindLogicalAnd
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
default: default:
@ -311,6 +319,7 @@ func (lexer *LexingOperation) tokenizeDashBeginning () (err error) {
if lexer.char == '-' { if lexer.char == '-' {
token := lexer.newToken() token := lexer.newToken()
token.kind = TokenKindDecrement token.kind = TokenKindDecrement
token.location.SetWidth(2)
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
@ -318,11 +327,13 @@ func (lexer *LexingOperation) tokenizeDashBeginning () (err error) {
if lexer.char == '-' { if lexer.char == '-' {
token.kind = TokenKindSeparator token.kind = TokenKindSeparator
lexer.nextRune() lexer.nextRune()
token.location.SetWidth(3)
} }
lexer.addToken(token) lexer.addToken(token)
} else if lexer.char == '>' { } else if lexer.char == '>' {
token := lexer.newToken() token := lexer.newToken()
token.kind = TokenKindReturnDirection token.kind = TokenKindReturnDirection
token.location.SetWidth(2)
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }