Added metadata parser

This commit is contained in:
Sasha Koshka 2022-08-12 16:22:51 -05:00
parent 856d5763d3
commit f23c3a234a
2 changed files with 42 additions and 5 deletions

View File

@ -1,5 +1,41 @@
package parser
import "git.tebibyte.media/sashakoshka/arf/file"
import "git.tebibyte.media/sashakoshka/arf/lexer"
// parseMeta parsese the metadata header at the top of an arf file.
func (parser *ParsingOperation) parseMeta () (err error) {
for {
err = parser.expect (
lexer.TokenKindName,
lexer.TokenKindSeparator)
if err != nil { return }
if parser.token.Is(lexer.TokenKindSeparator) {
err = parser.nextToken()
return
}
field := parser.token.Value().(string)
err = parser.nextToken(lexer.TokenKindString)
if err != nil { return }
value := parser.token.Value().(string)
switch field {
case "author":
parser.tree.author = value
case "license":
parser.tree.license = value
case "require":
parser.tree.requires = append(parser.tree.requires, value)
default:
parser.token.NewError (
"unrecognized metadata field: " + field,
file.ErrorKindError)
}
err = parser.nextToken(lexer.TokenKindNewline)
if err != nil { return }
}
}

View File

@ -70,7 +70,7 @@ func (parser *ParsingOperation) parse (sourceFile *file.File) (err error) {
// expect takes in a list of allowed token kinds, and returns an error if the
// current token isn't one of them. If the length of allowed is zero, this
// function will not return an error.
func (parser *ParsingOperation) expect (allowed []lexer.TokenKind) (err error) {
func (parser *ParsingOperation) expect (allowed ...lexer.TokenKind) (err error) {
if len(allowed) == 0 { return }
for _, kind := range allowed {
@ -78,16 +78,17 @@ func (parser *ParsingOperation) expect (allowed []lexer.TokenKind) (err error) {
}
err = file.NewError (
parser.token.Location(), 1,
parser.token.Location(),
"unexpected token", file.ErrorKindError)
return
}
// nextToken is the same as expect, but it advances to the next token first.
func (parser *ParsingOperation) nextToken (allowed []lexer.TokenKind) (err error) {
func (parser *ParsingOperation) nextToken (allowed ...lexer.TokenKind) (err error) {
parser.tokenIndex ++
if parser.tokenIndex >= len(parser.tokens) { return io.EOF }
parser.token = parser.tokens[parser.tokenIndex]
err = parser.expect(allowed...)
return
}