Replaced references to file.Error with infoerr.Error

This commit is contained in:
Sasha Koshka
2022-08-18 00:58:40 -04:00
parent d42d0c5b34
commit 39e4fbe844
9 changed files with 43 additions and 35 deletions

View File

@@ -3,6 +3,7 @@ package lexer
import "io"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/types"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// LexingOperation holds information about an ongoing lexing operataion.
type LexingOperation struct {
@@ -34,10 +35,10 @@ func (lexer *LexingOperation) tokenize () (err error) {
err = lexer.nextRune()
if err != nil || shebangCheck[index] != lexer.char {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"not an arf file",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
}
@@ -123,10 +124,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if !previousToken.Is(TokenKindNewline) {
err = lexer.nextRune()
file.NewError (
infoerr.NewError (
lexer.file.Location(1),
"tab not used as indent",
file.ErrorKindWarn).Print()
infoerr.ErrorKindWarn).Print()
return
}
@@ -292,11 +293,11 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
}
lexer.addToken(token)
default:
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"unexpected symbol character " +
string(lexer.char),
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
@@ -362,9 +363,9 @@ func (lexer *LexingOperation) skipSpaces () (err error) {
func (lexer *LexingOperation) nextRune () (err error) {
lexer.char, _, err = lexer.file.ReadRune()
if err != nil && err != io.EOF {
return file.NewError (
return infoerr.NewError (
lexer.file.Location(1),
err.Error(), file.ErrorKindError)
err.Error(), infoerr.ErrorKindError)
}
return
}

View File

@@ -1,7 +1,7 @@
package lexer
import "strconv"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// tokenizeSymbolBeginning lexes a token that starts with a number.
func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) {
@@ -89,10 +89,10 @@ func (lexer *LexingOperation) tokenizeNumber (
if !runeIsDigit(lexer.char, radix) { break }
if lexer.char == '.' {
if radix != 10 {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"floats must have radix of 10",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
isFloat = true
@@ -110,10 +110,10 @@ func (lexer *LexingOperation) tokenizeNumber (
}
if err != nil {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"could not parse number: " + err.Error(),
file.ErrorKindError)
infoerr.ErrorKindError)
return
}

View File

@@ -1,7 +1,7 @@
package lexer
import "strconv"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// tokenizeString tokenizes a string or rune literal.
func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
@@ -42,10 +42,10 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
if isRuneLiteral {
if len(got) > 1 {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"excess data in rune literal",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
@@ -98,10 +98,10 @@ func (lexer *LexingOperation) getEscapeSequence () (result rune, err error) {
}
if len(number) < 3 {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"octal escape sequence too short",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
@@ -132,20 +132,20 @@ func (lexer *LexingOperation) getEscapeSequence () (result rune, err error) {
}
if len(number) < want {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"hex escape sequence too short ",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
parsedNumber, _ := strconv.ParseInt(number, 16, want * 4)
result = rune(parsedNumber)
} else {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"unknown escape character " +
string(lexer.char), file.ErrorKindError)
string(lexer.char), infoerr.ErrorKindError)
return
}

View File

@@ -2,6 +2,7 @@ package lexer
import "fmt"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// TokenKind is an enum represzenting what role a token has.
type TokenKind int
@@ -89,8 +90,13 @@ func (token Token) Location () (location file.Location) {
}
// NewError creates a new error at this token's location.
func (token Token) NewError (message string, kind file.ErrorKind) (err file.Error) {
return token.location.NewError(message, kind)
func (token Token) NewError (
message string,
kind infoerr.ErrorKind,
) (
err infoerr.Error,
) {
return infoerr.NewError(token.location, message, kind)
}
// Describe generates a textual description of the token to be used in debug