Lexer can now tokenize

minus
decrement
return direction
separator
This commit is contained in:
Sasha Koshka 2022-08-10 14:18:28 -04:00
parent d43a2540cf
commit 9c1baf8216
2 changed files with 22 additions and 5 deletions

View File

@ -59,6 +59,7 @@ func (err Error) Error () (formattedMessage string) {
// print an arrow with a tail spanning the width of the mistake
columnCountdown := err.Location.column
for columnCountdown > 1 {
// TODO: for tabs, print out a teb instead.
formattedMessage += " "
columnCountdown --
}

View File

@ -126,13 +126,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
})
lexer.nextRune()
case '+':
lexer.addToken (Token {
kind: TokenKindPlus,
})
// TODO: tokenize plus begin
lexer.nextRune()
case '-':
// TODO: tokenize dash begin
lexer.nextRune()
lexer.tokenizeDashBeginning()
case '*':
lexer.addToken (Token {
kind: TokenKindAsterisk,
@ -187,6 +184,25 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
return
}
func (lexer *LexingOperation) tokenizeDashBeginning () (err error) {
token := Token { kind: TokenKindMinus }
lexer.nextRune()
if lexer.char == '-' {
token.kind = TokenKindDecrement
lexer.nextRune()
} else if lexer.char == '>' {
token.kind = TokenKindReturnDirection
lexer.nextRune()
}
if lexer.char == '-' {
token.kind = TokenKindSeparator
lexer.nextRune()
}
return
}
// addToken adds a new token to the lexer's token slice.
func (lexer *LexingOperation) addToken (token Token) {
lexer.tokens = append(lexer.tokens, token)