LexingOperation now stores the list of tokens
This commit is contained in:
parent
c042305ebe
commit
861400b4a1
@ -5,14 +5,16 @@ import "github.com/sashakoshka/arf/file"
|
|||||||
|
|
||||||
// LexingOperation holds information about an ongoing lexing operataion.
|
// LexingOperation holds information about an ongoing lexing operataion.
|
||||||
type LexingOperation struct {
|
type LexingOperation struct {
|
||||||
file *file.File
|
file *file.File
|
||||||
char rune
|
char rune
|
||||||
|
tokens []Token
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tokenize converts a file into a slice of tokens (lexemes).
|
// Tokenize converts a file into a slice of tokens (lexemes).
|
||||||
func Tokenize (file *file.File) (tokens []Token, err error) {
|
func Tokenize (file *file.File) (tokens []Token, err error) {
|
||||||
lexer := LexingOperation { file: file }
|
lexer := LexingOperation { file: file }
|
||||||
tokens, err = lexer.tokenize()
|
err = lexer.tokenize()
|
||||||
|
tokens = lexer.tokens
|
||||||
|
|
||||||
// if the lexing operation returned io.EOF, nothing went wrong so we
|
// if the lexing operation returned io.EOF, nothing went wrong so we
|
||||||
// return nil for err.
|
// return nil for err.
|
||||||
@ -24,7 +26,7 @@ func Tokenize (file *file.File) (tokens []Token, err error) {
|
|||||||
|
|
||||||
// tokenize converts a file into a slice of tokens (lexemes). It will always
|
// tokenize converts a file into a slice of tokens (lexemes). It will always
|
||||||
// return a non-nil error, but if nothing went wrong it will return io.EOF.
|
// return a non-nil error, but if nothing went wrong it will return io.EOF.
|
||||||
func (lexer *LexingOperation) tokenize () (tokens []Token, err error) {
|
func (lexer *LexingOperation) tokenize () (err error) {
|
||||||
err = lexer.nextRune()
|
err = lexer.nextRune()
|
||||||
if err != nil { return }
|
if err != nil { return }
|
||||||
|
|
||||||
@ -39,6 +41,13 @@ func (lexer *LexingOperation) tokenize () (tokens []Token, err error) {
|
|||||||
// TODO: tokenize multi
|
// TODO: tokenize multi
|
||||||
} else {
|
} else {
|
||||||
switch lexer.char {
|
switch lexer.char {
|
||||||
|
case '\t':
|
||||||
|
for lexer.char == '\t' {
|
||||||
|
lexer.addToken (Token {
|
||||||
|
kind: TokenKindIndent,
|
||||||
|
})
|
||||||
|
lexer.nextRune()
|
||||||
|
}
|
||||||
case '"':
|
case '"':
|
||||||
// TODO: tokenize string literal
|
// TODO: tokenize string literal
|
||||||
lexer.nextRune()
|
lexer.nextRune()
|
||||||
@ -75,6 +84,10 @@ func (lexer *LexingOperation) tokenize () (tokens []Token, err error) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (lexer *LexingOperation) addToken (token Token) {
|
||||||
|
lexer.tokens = append(lexer.tokens, token)
|
||||||
|
}
|
||||||
|
|
||||||
// nextRune advances the lexer to the next rune in the file.
|
// nextRune advances the lexer to the next rune in the file.
|
||||||
func (lexer *LexingOperation) nextRune () (err error) {
|
func (lexer *LexingOperation) nextRune () (err error) {
|
||||||
lexer.char, _, err = lexer.file.ReadRune()
|
lexer.char, _, err = lexer.file.ReadRune()
|
||||||
|
Reference in New Issue
Block a user