Added lexing various symbol tokens

This commit is contained in:
Sasha Koshka 2022-08-09 22:18:12 -04:00
parent 861400b4a1
commit 64de897de5
1 changed files with 61 additions and 38 deletions

View File

@ -40,42 +40,8 @@ func (lexer *LexingOperation) tokenize () (err error) {
} else if lowercase || uppercase { } else if lowercase || uppercase {
// TODO: tokenize multi // TODO: tokenize multi
} else { } else {
switch lexer.char { err = lexer.tokenizeSymbolBeginning()
case '\t': if err != nil { return err }
for lexer.char == '\t' {
lexer.addToken (Token {
kind: TokenKindIndent,
})
lexer.nextRune()
}
case '"':
// TODO: tokenize string literal
lexer.nextRune()
case '\'':
// TODO: tokenize rune literal
lexer.nextRune()
case ':':
// TODO: colon token
case '.':
// TODO: dot token
case '[':
// TODO: left bracket token
case ']':
// TODO: right bracket token
case '{':
// TODO: left brace token
case '}':
// TODO: right brace token
// TODO: add more for things like math symbols, return
// direction operators, indentation, etc
default:
err = file.NewError (
lexer.file.Location(), 1,
"unexpected character " +
string(lexer.char),
file.ErrorKindError)
return
}
} }
// TODO: skip whitespace // TODO: skip whitespace
@ -84,6 +50,65 @@ func (lexer *LexingOperation) tokenize () (err error) {
return return
} }
func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
switch lexer.char {
case '\t':
for lexer.char == '\t' {
lexer.addToken (Token {
kind: TokenKindIndent,
})
lexer.nextRune()
}
case '"':
// TODO: tokenize string literal
lexer.nextRune()
case '\'':
// TODO: tokenize rune literal
lexer.nextRune()
case ':':
lexer.addToken (Token {
kind: TokenKindColon,
})
lexer.nextRune()
case '.':
lexer.addToken (Token {
kind: TokenKindDot,
})
lexer.nextRune()
case '[':
lexer.addToken (Token {
kind: TokenKindLBracket,
})
lexer.nextRune()
case ']':
lexer.addToken (Token {
kind: TokenKindRBracket,
})
lexer.nextRune()
case '{':
lexer.addToken (Token {
kind: TokenKindLBrace,
})
lexer.nextRune()
case '}':
lexer.addToken (Token {
kind: TokenKindRBrace,
})
lexer.nextRune()
// TODO: add more for things like math symbols, return
// direction operators, indentation, etc
default:
err = file.NewError (
lexer.file.Location(), 1,
"unexpected character " +
string(lexer.char),
file.ErrorKindError)
return
}
return
}
func (lexer *LexingOperation) addToken (token Token) { func (lexer *LexingOperation) addToken (token Token) {
lexer.tokens = append(lexer.tokens, token) lexer.tokens = append(lexer.tokens, token)
} }
@ -98,5 +123,3 @@ func (lexer *LexingOperation) nextRune () (err error) {
} }
return return
} }
//