Lexer no longer hangs when encountering letters

This commit is contained in:
Sasha Koshka 2022-08-10 13:59:09 -04:00
parent 9f8b48161a
commit 314c045e65

View File

@ -1,6 +1,7 @@
package lexer package lexer
import "io" import "io"
import "fmt"
import "github.com/sashakoshka/arf/file" import "github.com/sashakoshka/arf/file"
// LexingOperation holds information about an ongoing lexing operataion. // LexingOperation holds information about an ongoing lexing operataion.
@ -37,20 +38,24 @@ func (lexer *LexingOperation) tokenize () (err error) {
if number { if number {
// TODO: tokenize number begin // TODO: tokenize number begin
lexer.nextRune()
} else if lowercase || uppercase { } else if lowercase || uppercase {
// TODO: tokenize alpha begin // TODO: tokenize alpha begin
lexer.nextRune()
} else { } else {
err = lexer.tokenizeSymbolBeginning() err = lexer.tokenizeSymbolBeginning()
if err != nil { return err } if err != nil { return }
} }
// TODO: skip whitespace err = lexer.skipSpaces()
if err != nil { return }
} }
return return
} }
func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
fmt.Println(string(lexer.char))
switch lexer.char { switch lexer.char {
case '#': case '#':
// comment // comment
@ -182,10 +187,21 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
return return
} }
// addToken adds a new token to the lexer's token slice.
func (lexer *LexingOperation) addToken (token Token) { func (lexer *LexingOperation) addToken (token Token) {
lexer.tokens = append(lexer.tokens, token) lexer.tokens = append(lexer.tokens, token)
} }
// skipSpaces skips all space characters (not tabs or newlines)
func (lexer *LexingOperation) skipSpaces () (err error) {
for lexer.char == ' ' {
err = lexer.nextRune()
if err != nil { return }
}
return
}
// nextRune advances the lexer to the next rune in the file. // nextRune advances the lexer to the next rune in the file.
func (lexer *LexingOperation) nextRune () (err error) { func (lexer *LexingOperation) nextRune () (err error) {
lexer.char, _, err = lexer.file.ReadRune() lexer.char, _, err = lexer.file.ReadRune()