diff --git a/lexer/lexer.go b/lexer/lexer.go index 5b0754e..040b405 100644 --- a/lexer/lexer.go +++ b/lexer/lexer.go @@ -13,7 +13,7 @@ type LexingOperation struct { // Tokenize converts a file into a slice of tokens (lexemes). func Tokenize (file *file.File) (tokens []Token, err error) { lexer := LexingOperation { file: file } - err = lexer.tokenize() + err = lexer.tokenize() tokens = lexer.tokens // if the lexing operation returned io.EOF, nothing went wrong so we diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 0ea97af..62a5791 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -13,6 +13,8 @@ func TestTokenizeAll (test *testing.T) { } tokens, err := Tokenize(file) + test.Log("resulting error:") + test.Log(err.Error()) if err == nil { test.Log("Tokenize() should have returned an error") test.Fail()