diff --git a/lexer/lexer.go b/lexer/lexer.go index a1e43c0..1807cfd 100644 --- a/lexer/lexer.go +++ b/lexer/lexer.go @@ -1,7 +1,7 @@ package lexer import "io" -import "fmt" +// import "fmt" import "github.com/sashakoshka/arf/file" import "github.com/sashakoshka/arf/types" @@ -33,7 +33,7 @@ func (lexer *LexingOperation) tokenize () (err error) { if err != nil { return } for { - fmt.Println(string(lexer.char)) + // fmt.Println(string(lexer.char)) lowercase := lexer.char >= 'a' && lexer.char <= 'z' uppercase := lexer.char >= 'A' && lexer.char <= 'Z' diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 62a5791..cd9cb5b 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -13,6 +13,12 @@ func TestTokenizeAll (test *testing.T) { } tokens, err := Tokenize(file) + + // print all tokens + for _, token := range tokens { + test.Log("got token:", token.Describe()) + } + test.Log("resulting error:") test.Log(err.Error()) if err == nil { diff --git a/lexer/token.go b/lexer/token.go index e803606..0ac8298 100644 --- a/lexer/token.go +++ b/lexer/token.go @@ -1,5 +1,6 @@ package lexer +import "fmt" import "github.com/sashakoshka/arf/file" // TokenKind is an enum represzenting what role a token has. @@ -84,3 +85,86 @@ func (token Token) Equals (testToken Token) (match bool) { func (token Token) Location () (location file.Location) { return token.location } + +// Describe generates a textual description of the token to be used in debug +// logs. +func (token Token) Describe () (description string) { + switch token.kind { + case TokenKindNewline: + description += "Newline" + case TokenKindIndent: + description += "Indent" + case TokenKindSeparator: + description += "Separator" + case TokenKindPermission: + description += "Permission" + case TokenKindReturnDirection: + description += "ReturnDirection" + case TokenKindInt: + description += "Int" + case TokenKindUInt: + description += "UInt" + case TokenKindFloat: + description += "Float" + case TokenKindString: + description += "String" + case TokenKindRune: + description += "Rune" + case TokenKindName: + description += "Name" + case TokenKindColon: + description += "Colon" + case TokenKindDot: + description += "Dot" + case TokenKindLBracket: + description += "LBracket" + case TokenKindRBracket: + description += "RBracket" + case TokenKindLBrace: + description += "LBrace" + case TokenKindRBrace: + description += "RBrace" + case TokenKindPlus: + description += "Plus" + case TokenKindMinus: + description += "Minus" + case TokenKindIncrement: + description += "Increment" + case TokenKindDecrement: + description += "Decrement" + case TokenKindAsterisk: + description += "Asterisk" + case TokenKindSlash: + description += "Slash" + case TokenKindAt: + description += "At" + case TokenKindExclamation: + description += "Exclamation" + case TokenKindPercent: + description += "Percent" + case TokenKindTilde: + description += "Tilde" + case TokenKindLessThan: + description += "LessThan" + case TokenKindLShift: + description += "LShift" + case TokenKindGreaterThan: + description += "GreaterThan" + case TokenKindRShift: + description += "RShift" + case TokenKindBinaryOr: + description += "BinaryOr" + case TokenKindLogicalOr: + description += "LogicalOr" + case TokenKindBinaryAnd: + description += "BinaryAnd" + case TokenKindLogicalAnd: + description += "LogicalAnd" + } + + if token.value != nil { + description += fmt.Sprint(": ", token.value) + } + + return +}