Added lexing various symbol tokens

This commit is contained in:
Sasha Koshka 2022-08-09 22:18:12 -04:00
parent 861400b4a1
commit 64de897de5
1 changed files with 61 additions and 38 deletions

View File

@ -40,42 +40,8 @@ func (lexer *LexingOperation) tokenize () (err error) {
} else if lowercase || uppercase {
// TODO: tokenize multi
} else {
switch lexer.char {
case '\t':
for lexer.char == '\t' {
lexer.addToken (Token {
kind: TokenKindIndent,
})
lexer.nextRune()
}
case '"':
// TODO: tokenize string literal
lexer.nextRune()
case '\'':
// TODO: tokenize rune literal
lexer.nextRune()
case ':':
// TODO: colon token
case '.':
// TODO: dot token
case '[':
// TODO: left bracket token
case ']':
// TODO: right bracket token
case '{':
// TODO: left brace token
case '}':
// TODO: right brace token
// TODO: add more for things like math symbols, return
// direction operators, indentation, etc
default:
err = file.NewError (
lexer.file.Location(), 1,
"unexpected character " +
string(lexer.char),
file.ErrorKindError)
return
}
err = lexer.tokenizeSymbolBeginning()
if err != nil { return err }
}
// TODO: skip whitespace
@ -84,6 +50,65 @@ func (lexer *LexingOperation) tokenize () (err error) {
return
}
func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
switch lexer.char {
case '\t':
for lexer.char == '\t' {
lexer.addToken (Token {
kind: TokenKindIndent,
})
lexer.nextRune()
}
case '"':
// TODO: tokenize string literal
lexer.nextRune()
case '\'':
// TODO: tokenize rune literal
lexer.nextRune()
case ':':
lexer.addToken (Token {
kind: TokenKindColon,
})
lexer.nextRune()
case '.':
lexer.addToken (Token {
kind: TokenKindDot,
})
lexer.nextRune()
case '[':
lexer.addToken (Token {
kind: TokenKindLBracket,
})
lexer.nextRune()
case ']':
lexer.addToken (Token {
kind: TokenKindRBracket,
})
lexer.nextRune()
case '{':
lexer.addToken (Token {
kind: TokenKindLBrace,
})
lexer.nextRune()
case '}':
lexer.addToken (Token {
kind: TokenKindRBrace,
})
lexer.nextRune()
// TODO: add more for things like math symbols, return
// direction operators, indentation, etc
default:
err = file.NewError (
lexer.file.Location(), 1,
"unexpected character " +
string(lexer.char),
file.ErrorKindError)
return
}
return
}
func (lexer *LexingOperation) addToken (token Token) {
lexer.tokens = append(lexer.tokens, token)
}
@ -98,5 +123,3 @@ func (lexer *LexingOperation) nextRune () (err error) {
}
return
}
//