Replaced references to file.Error with infoerr.Error

This commit is contained in:
Sasha Koshka 2022-08-18 00:58:40 -04:00
parent d42d0c5b34
commit 39e4fbe844
9 changed files with 43 additions and 35 deletions

View File

@ -3,6 +3,7 @@ package lexer
import "io"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/types"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// LexingOperation holds information about an ongoing lexing operataion.
type LexingOperation struct {
@ -34,10 +35,10 @@ func (lexer *LexingOperation) tokenize () (err error) {
err = lexer.nextRune()
if err != nil || shebangCheck[index] != lexer.char {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"not an arf file",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
}
@ -123,10 +124,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
if !previousToken.Is(TokenKindNewline) {
err = lexer.nextRune()
file.NewError (
infoerr.NewError (
lexer.file.Location(1),
"tab not used as indent",
file.ErrorKindWarn).Print()
infoerr.ErrorKindWarn).Print()
return
}
@ -292,11 +293,11 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
}
lexer.addToken(token)
default:
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"unexpected symbol character " +
string(lexer.char),
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
@ -362,9 +363,9 @@ func (lexer *LexingOperation) skipSpaces () (err error) {
func (lexer *LexingOperation) nextRune () (err error) {
lexer.char, _, err = lexer.file.ReadRune()
if err != nil && err != io.EOF {
return file.NewError (
return infoerr.NewError (
lexer.file.Location(1),
err.Error(), file.ErrorKindError)
err.Error(), infoerr.ErrorKindError)
}
return
}

View File

@ -1,7 +1,7 @@
package lexer
import "strconv"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// tokenizeSymbolBeginning lexes a token that starts with a number.
func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) {
@ -89,10 +89,10 @@ func (lexer *LexingOperation) tokenizeNumber (
if !runeIsDigit(lexer.char, radix) { break }
if lexer.char == '.' {
if radix != 10 {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"floats must have radix of 10",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
isFloat = true
@ -110,10 +110,10 @@ func (lexer *LexingOperation) tokenizeNumber (
}
if err != nil {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"could not parse number: " + err.Error(),
file.ErrorKindError)
infoerr.ErrorKindError)
return
}

View File

@ -1,7 +1,7 @@
package lexer
import "strconv"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// tokenizeString tokenizes a string or rune literal.
func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
@ -42,10 +42,10 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
if isRuneLiteral {
if len(got) > 1 {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"excess data in rune literal",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
@ -98,10 +98,10 @@ func (lexer *LexingOperation) getEscapeSequence () (result rune, err error) {
}
if len(number) < 3 {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"octal escape sequence too short",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
@ -132,20 +132,20 @@ func (lexer *LexingOperation) getEscapeSequence () (result rune, err error) {
}
if len(number) < want {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"hex escape sequence too short ",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
parsedNumber, _ := strconv.ParseInt(number, 16, want * 4)
result = rune(parsedNumber)
} else {
err = file.NewError (
err = infoerr.NewError (
lexer.file.Location(1),
"unknown escape character " +
string(lexer.char), file.ErrorKindError)
string(lexer.char), infoerr.ErrorKindError)
return
}

View File

@ -2,6 +2,7 @@ package lexer
import "fmt"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// TokenKind is an enum represzenting what role a token has.
type TokenKind int
@ -89,8 +90,13 @@ func (token Token) Location () (location file.Location) {
}
// NewError creates a new error at this token's location.
func (token Token) NewError (message string, kind file.ErrorKind) (err file.Error) {
return token.location.NewError(message, kind)
func (token Token) NewError (
message string,
kind infoerr.ErrorKind,
) (
err infoerr.Error,
) {
return infoerr.NewError(token.location, message, kind)
}
// Describe generates a textual description of the token to be used in debug

View File

@ -1,7 +1,7 @@
package parser
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/lexer"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
var validArgumentStartTokens = []lexer.TokenKind {
lexer.TokenKindName,
@ -37,7 +37,7 @@ func (parser *ParsingOperation) parseArgument () (argument Argument, err error)
err = parser.token.NewError (
"cannot use member selection in " +
"a variable definition",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}

View File

@ -1,7 +1,7 @@
package parser
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/lexer"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// parse body parses the body of an arf file, after the metadata header.
func (parser *ParsingOperation) parseBody () (err error) {
@ -27,7 +27,7 @@ func (parser *ParsingOperation) parseBody () (err error) {
default:
err = parser.token.NewError (
"unknown section type \"" + sectionType + "\"",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
}

View File

@ -1,8 +1,8 @@
package parser
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/types"
import "git.tebibyte.media/sashakoshka/arf/lexer"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// parseData parses a data section.
func (parser *ParsingOperation) parseDataSection () (
@ -132,7 +132,7 @@ func (parser *ParsingOperation) parseObjectInitializationValues () (
err = parser.token.NewError (
"duplicate member \"" + name + "\" in object " +
"member initialization",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}
@ -273,7 +273,7 @@ func (parser *ParsingOperation) parseType () (what Type, err error) {
default:
err = parser.token.NewError (
"unknown type qualifier \"" + qualifier + "\"",
file.ErrorKindError)
infoerr.ErrorKindError)
return
}

View File

@ -1,7 +1,7 @@
package parser
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/lexer"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// parseMeta parsese the metadata header at the top of an arf file.
func (parser *ParsingOperation) parseMeta () (err error) {
@ -35,7 +35,7 @@ func (parser *ParsingOperation) parseMeta () (err error) {
default:
parser.token.NewError (
"unrecognized metadata field: " + field,
file.ErrorKindError)
infoerr.ErrorKindError)
}
err = parser.nextToken(lexer.TokenKindNewline)

View File

@ -5,6 +5,7 @@ import "os"
import "path/filepath"
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/lexer"
import "git.tebibyte.media/sashakoshka/arf/infoerr"
// ParsingOperation holds information about an ongoing parsing operation.
type ParsingOperation struct {
@ -96,9 +97,9 @@ func (parser *ParsingOperation) expect (allowed ...lexer.TokenKind) (err error)
message += allowedItem.Describe()
}
err = file.NewError (
err = infoerr.NewError (
parser.token.Location(),
message, file.ErrorKindError)
message, infoerr.ErrorKindError)
return
}