Added a ton more token kinds

This commit is contained in:
Sasha Koshka 2022-08-10 00:48:18 -04:00
parent 64de897de5
commit a0ce033c27
2 changed files with 61 additions and 2 deletions

View File

@ -36,9 +36,9 @@ func (lexer *LexingOperation) tokenize () (err error) {
number := lexer.char >= '0' && lexer.char <= '9' number := lexer.char >= '0' && lexer.char <= '9'
if number { if number {
// TODO: tokenize number // TODO: tokenize number begin
} else if lowercase || uppercase { } else if lowercase || uppercase {
// TODO: tokenize multi // TODO: tokenize alpha begin
} else { } else {
err = lexer.tokenizeSymbolBeginning() err = lexer.tokenizeSymbolBeginning()
if err != nil { return err } if err != nil { return err }