Privated lexingOperation

This commit is contained in:
Sasha Koshka 2022-10-12 00:00:34 -04:00
parent b92a3dcba3
commit 1196bb3801
3 changed files with 17 additions and 15 deletions

View File

@ -5,8 +5,8 @@ import "git.tebibyte.media/arf/arf/file"
import "git.tebibyte.media/arf/arf/types" import "git.tebibyte.media/arf/arf/types"
import "git.tebibyte.media/arf/arf/infoerr" import "git.tebibyte.media/arf/arf/infoerr"
// LexingOperation holds information about an ongoing lexing operataion. // lexingOperation holds information about an ongoing lexing operataion.
type LexingOperation struct { type lexingOperation struct {
file *file.File file *file.File
char rune char rune
tokens []Token tokens []Token
@ -14,7 +14,7 @@ type LexingOperation struct {
// Tokenize converts a file into a slice of tokens (lexemes). // Tokenize converts a file into a slice of tokens (lexemes).
func Tokenize (file *file.File) (tokens []Token, err error) { func Tokenize (file *file.File) (tokens []Token, err error) {
lexer := LexingOperation { file: file } lexer := lexingOperation { file: file }
err = lexer.tokenize() err = lexer.tokenize()
tokens = lexer.tokens tokens = lexer.tokens
@ -28,7 +28,7 @@ func Tokenize (file *file.File) (tokens []Token, err error) {
// tokenize converts a file into a slice of tokens (lexemes). It will always // tokenize converts a file into a slice of tokens (lexemes). It will always
// return a non-nil error, but if nothing went wrong it will return io.EOF. // return a non-nil error, but if nothing went wrong it will return io.EOF.
func (lexer *LexingOperation) tokenize () (err error) { func (lexer *lexingOperation) tokenize () (err error) {
// check to see if the beginning of the file says :arf // check to see if the beginning of the file says :arf
var shebangCheck = []rune(":arf\n") var shebangCheck = []rune(":arf\n")
for index := 0; index < 5; index ++ { for index := 0; index < 5; index ++ {
@ -66,6 +66,8 @@ func (lexer *LexingOperation) tokenize () (err error) {
if err != nil { return } if err != nil { return }
} }
// TODO: figure out why this is here and what its proper place is
// because it is apparently unreachable
if lexer.tokens[len(lexer.tokens) - 1].kind != TokenKindNewline { if lexer.tokens[len(lexer.tokens) - 1].kind != TokenKindNewline {
token := lexer.newToken() token := lexer.newToken()
token.kind = TokenKindNewline token.kind = TokenKindNewline
@ -75,7 +77,7 @@ func (lexer *LexingOperation) tokenize () (err error) {
return return
} }
func (lexer *LexingOperation) tokenizeAlphaBeginning () (err error) { func (lexer *lexingOperation) tokenizeAlphaBeginning () (err error) {
token := lexer.newToken() token := lexer.newToken()
token.kind = TokenKindName token.kind = TokenKindName
@ -109,7 +111,7 @@ func (lexer *LexingOperation) tokenizeAlphaBeginning () (err error) {
return return
} }
func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { func (lexer *lexingOperation) tokenizeSymbolBeginning () (err error) {
switch lexer.char { switch lexer.char {
case '#': case '#':
// comment // comment
@ -385,7 +387,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
return return
} }
func (lexer *LexingOperation) tokenizeDashBeginning () (err error) { func (lexer *lexingOperation) tokenizeDashBeginning () (err error) {
token := lexer.newToken() token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
@ -422,17 +424,17 @@ func (lexer *LexingOperation) tokenizeDashBeginning () (err error) {
} }
// newToken creates a new token from the lexer's current position in the file. // newToken creates a new token from the lexer's current position in the file.
func (lexer *LexingOperation) newToken () (token Token) { func (lexer *lexingOperation) newToken () (token Token) {
return Token { location: lexer.file.Location(1) } return Token { location: lexer.file.Location(1) }
} }
// addToken adds a new token to the lexer's token slice. // addToken adds a new token to the lexer's token slice.
func (lexer *LexingOperation) addToken (token Token) { func (lexer *lexingOperation) addToken (token Token) {
lexer.tokens = append(lexer.tokens, token) lexer.tokens = append(lexer.tokens, token)
} }
// skipSpaces skips all space characters (not tabs or newlines) // skipSpaces skips all space characters (not tabs or newlines)
func (lexer *LexingOperation) skipSpaces () (err error) { func (lexer *lexingOperation) skipSpaces () (err error) {
for lexer.char == ' ' { for lexer.char == ' ' {
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
@ -442,7 +444,7 @@ func (lexer *LexingOperation) skipSpaces () (err error) {
} }
// nextRune advances the lexer to the next rune in the file. // nextRune advances the lexer to the next rune in the file.
func (lexer *LexingOperation) nextRune () (err error) { func (lexer *lexingOperation) nextRune () (err error) {
lexer.char, _, err = lexer.file.ReadRune() lexer.char, _, err = lexer.file.ReadRune()
if err != nil && err != io.EOF { if err != nil && err != io.EOF {
return infoerr.NewError ( return infoerr.NewError (

View File

@ -4,7 +4,7 @@ import "strconv"
import "git.tebibyte.media/arf/arf/infoerr" import "git.tebibyte.media/arf/arf/infoerr"
// tokenizeSymbolBeginning lexes a token that starts with a number. // tokenizeSymbolBeginning lexes a token that starts with a number.
func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) { func (lexer *lexingOperation) tokenizeNumberBeginning (negative bool) (err error) {
var intNumber uint64 var intNumber uint64
var floatNumber float64 var floatNumber float64
var isFloat bool var isFloat bool
@ -107,7 +107,7 @@ func runeIsDigit (char rune, radix uint64) (isDigit bool) {
} }
// tokenizeNumber reads and tokenizes a number with the specified radix. // tokenizeNumber reads and tokenizes a number with the specified radix.
func (lexer *LexingOperation) tokenizeNumber ( func (lexer *lexingOperation) tokenizeNumber (
radix uint64, radix uint64,
) ( ) (
intNumber uint64, intNumber uint64,

View File

@ -4,7 +4,7 @@ import "strconv"
import "git.tebibyte.media/arf/arf/infoerr" import "git.tebibyte.media/arf/arf/infoerr"
// tokenizeString tokenizes a string or rune literal. // tokenizeString tokenizes a string or rune literal.
func (lexer *LexingOperation) tokenizeString () (err error) { func (lexer *lexingOperation) tokenizeString () (err error) {
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
@ -62,7 +62,7 @@ var escapeSequenceMap = map[rune] rune {
} }
// getEscapeSequence reads an escape sequence in a string or rune literal. // getEscapeSequence reads an escape sequence in a string or rune literal.
func (lexer *LexingOperation) getEscapeSequence () ( func (lexer *lexingOperation) getEscapeSequence () (
result rune, result rune,
amountRead int, amountRead int,
err error, err error,