diff --git a/lexer/lexer.go b/lexer/lexer.go index 1e543b4..5b0754e 100644 --- a/lexer/lexer.go +++ b/lexer/lexer.go @@ -79,6 +79,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { } case '\n': // line break + // TODO: if last line was blank, (ony whitespace) discard. lexer.addToken (Token { kind: TokenKindNewline, }) diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go new file mode 100644 index 0000000..1f8a7c1 --- /dev/null +++ b/lexer/lexer_test.go @@ -0,0 +1,33 @@ +package lexer + +import "testing" +import "github.com/sashakoshka/arf/file" + +func TestTokenizeAll (test *testing.T) { + file, err := file.Open("tests/parser/all") + if err != nil { + test.Log(err) + test.Fail() + } + + tokens, err := Tokenize(file) + if err != nil { + test.Log(err) + test.Fail() + } + + correct := []Token { + Token { kind: TokenKindSeparator, }, + } + + if len(tokens) != len(correct) { + test.Log("lexed", tokens, "tokens, want", correct) + } + + for index, token := range tokens { + if !token.Equals(correct[index]) { + test.Log("token", index, "not equal") + test.Fail() + } + } +} diff --git a/lexer/token.go b/lexer/token.go index 5454bab..7569257 100644 --- a/lexer/token.go +++ b/lexer/token.go @@ -73,6 +73,11 @@ func (token Token) Value () (value any) { return token.value } +// Equals returns whether this token is equal to another token +func (token Token) Equals (testToken Token) (match bool) { + return token == testToken +} + // Location returns the location of the token in its file. func (token Token) Location () (location file.Location) { return token.location diff --git a/main.go b/main.go index b475b96..e7676a3 100644 --- a/main.go +++ b/main.go @@ -3,20 +3,20 @@ package arf import "os" import "io" import "path/filepath" -import "github.com/sashakoshka/arf/lexer" +// import "github.com/sashakoshka/arf/lexer" func CompileModule (modulePath string, output io.Writer) (err error) { moduleFiles, err := os.ReadDir(modulePath) if err != nil { return err } - var moduleTokens []lexer.Token + // var moduleTokens []lexer.Token for _, entry := range moduleFiles { if filepath.Ext(entry.Name()) != ".arf" || entry.IsDir() { continue } - tokens, err := lexer.Tokenize() - if err != nil { return err } + // tokens, err := lexer.Tokenize() + // if err != nil { return err } } return diff --git a/tests/parser/all b/tests/parser/all new file mode 100644 index 0000000..8e9b527 --- /dev/null +++ b/tests/parser/all @@ -0,0 +1,6 @@ +# normal tokens +--- rn rr nw -> 239 -120 + - / * -- ++ [{[skadjsk 2039.32]}] 0x5FC2D +"skdjlsakdj" '\n' + +# erroneous tokens (poorly spaced) +---rnrrnw->239-120+-/*--++ diff --git a/tests/parser/indent b/tests/parser/indent new file mode 100644 index 0000000..5089313 --- /dev/null +++ b/tests/parser/indent @@ -0,0 +1,7 @@ +line1 + line2 + line3 + +line4 + + line5