diff --git a/generate/lex.go b/generate/lex.go index a20c3d9..c815aad 100644 --- a/generate/lex.go +++ b/generate/lex.go @@ -10,6 +10,7 @@ const ( TokenMethod parse.TokenKind = iota TokenKey TokenIdent + TokenOption TokenComma TokenLBrace TokenRBrace @@ -22,6 +23,7 @@ var tokenNames = map[parse.TokenKind] string { TokenMethod: "Method", TokenKey: "Key", TokenIdent: "Ident", + TokenOption: "Option", TokenComma: "Comma", TokenLBrace: "LBrace", TokenRBrace: "RBrace", @@ -122,6 +124,11 @@ func (this *lexer) nextInternal() (token parse.Token, err error) { if this.eof { err = nil; return } if err != nil { return } } + // Option + case this.rune == '?': + token.Kind = TokenOption + appendRune() + if this.eof { err = nil; return } // Comma case this.rune == ',': token.Kind = TokenComma diff --git a/generate/lex_test.go b/generate/lex_test.go index 8715cea..37676cf 100644 --- a/generate/lex_test.go +++ b/generate/lex_test.go @@ -16,6 +16,7 @@ func TestLex(test *testing.T) { // wow 0001 Users []User, 0002 Followers U32, + 0003 Wings ?Int, }`)) if err != nil { test.Fatal(parse.Format(err)) } @@ -42,6 +43,11 @@ func TestLex(test *testing.T) { tok(TokenIdent, "Followers"), tok(TokenIdent, "U32"), tok(TokenComma, ","), + tok(TokenKey, "0003"), + tok(TokenIdent, "Wings"), + tok(TokenOption, "?"), + tok(TokenIdent, "Int"), + tok(TokenComma, ","), tok(TokenRBrace, "}"), tok(parse.EOF, ""), }