All basic symbol tokens are now parsed

This commit is contained in:
Sasha Koshka 2022-08-10 01:03:59 -04:00
parent a0ce033c27
commit 0d2d10fe04
1 changed files with 35 additions and 12 deletions

View File

@ -51,16 +51,28 @@ func (lexer *LexingOperation) tokenize () (err error) {
} }
func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
// TODO: ignore comments
switch lexer.char { switch lexer.char {
case '#':
// comment
for lexer.char != '\n' {
lexer.nextRune()
}
case '\t': case '\t':
// indent level
// TODO: throw error if tab is not at line beginning, or after
// other tab
for lexer.char == '\t' { for lexer.char == '\t' {
lexer.addToken (Token { lexer.addToken (Token {
kind: TokenKindIndent, kind: TokenKindIndent,
}) })
lexer.nextRune() lexer.nextRune()
} }
// TODO: newline case '\n':
// line break
lexer.addToken (Token {
kind: TokenKindNewline,
})
lexer.nextRune()
case '"': case '"':
// TODO: tokenize string literal // TODO: tokenize string literal
lexer.nextRune() lexer.nextRune()
@ -98,28 +110,42 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
}) })
lexer.nextRune() lexer.nextRune()
case '+': case '+':
// TODO: tokenize plus lexer.addToken (Token {
kind: TokenKindPlus,
})
lexer.nextRune() lexer.nextRune()
case '-': case '-':
// TODO: tokenize dash begin // TODO: tokenize dash begin
lexer.nextRune() lexer.nextRune()
case '*': case '*':
// TODO: tokenize asterisk lexer.addToken (Token {
kind: TokenKindAsterisk,
})
lexer.nextRune() lexer.nextRune()
case '/': case '/':
// TODO: tokenize slash lexer.addToken (Token {
kind: TokenKindSlash,
})
lexer.nextRune() lexer.nextRune()
case '@': case '@':
// TODO: tokenize @ lexer.addToken (Token {
kind: TokenKindAt,
})
lexer.nextRune() lexer.nextRune()
case '!': case '!':
// TODO: tokenize exclamation mark lexer.addToken (Token {
kind: TokenKindExclamation,
})
lexer.nextRune() lexer.nextRune()
case '%': case '%':
// TODO: tokenize percent lexer.addToken (Token {
kind: TokenKindPercent,
})
lexer.nextRune() lexer.nextRune()
case '~': case '~':
// TODO: tokenize tilde lexer.addToken (Token {
kind: TokenKindTilde,
})
lexer.nextRune() lexer.nextRune()
case '<': case '<':
// TODO: tokenize less than begin // TODO: tokenize less than begin
@ -133,9 +159,6 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
case '&': case '&':
// TODO: tokenize and begin // TODO: tokenize and begin
lexer.nextRune() lexer.nextRune()
// TODO: add more for things like math symbols, return
// direction operators, indentation, etc
default: default:
err = file.NewError ( err = file.NewError (
lexer.file.Location(), 1, lexer.file.Location(), 1,