44 lines
1.1 KiB
Go
44 lines
1.1 KiB
Go
|
package parse
|
||
|
|
||
|
// TokenKind is a number identifying a token kind. To enumerate your set of
|
||
|
// tokens, use iota and start at zero.
|
||
|
type TokenKind int; const (
|
||
|
EOF TokenKind = -1
|
||
|
)
|
||
|
|
||
|
// Token is any token.
|
||
|
type Token struct {
|
||
|
Position Position
|
||
|
Kind TokenKind
|
||
|
Value string
|
||
|
}
|
||
|
|
||
|
// EOF returns whether or not the token is an EOF token.
|
||
|
func (tok Token) EOF () bool {
|
||
|
return tok.Kind == EOF
|
||
|
}
|
||
|
|
||
|
// Is returns whether or not the token kind matches any of the given kinds.
|
||
|
func (tok Token) Is (kinds ...TokenKind) bool {
|
||
|
for _, kind := range kinds {
|
||
|
if tok.Kind == kind { return true }
|
||
|
}
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
// Is returns whether or not the token value matches any of the given values.
|
||
|
func (tok Token) ValueIs (values ...string) bool {
|
||
|
for _, value := range values {
|
||
|
if tok.Value == value { return true }
|
||
|
}
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
// Lexer is an object capable of producing tokens.
|
||
|
type Lexer interface {
|
||
|
// Next returns the next token. If there are no more tokens, it returns
|
||
|
// an EOF token. It only returns an error on EOF if the file terminated
|
||
|
// unexpectedly.
|
||
|
Next () (Token, error)
|
||
|
}
|