Added unit test for lexer

This commit is contained in:
Sasha Koshka 2022-08-10 11:28:29 -04:00
parent 245798f33d
commit 3f16d34aa1
6 changed files with 56 additions and 4 deletions

View File

@ -79,6 +79,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
} }
case '\n': case '\n':
// line break // line break
// TODO: if last line was blank, (ony whitespace) discard.
lexer.addToken (Token { lexer.addToken (Token {
kind: TokenKindNewline, kind: TokenKindNewline,
}) })

33
lexer/lexer_test.go Normal file
View File

@ -0,0 +1,33 @@
package lexer
import "testing"
import "github.com/sashakoshka/arf/file"
func TestTokenizeAll (test *testing.T) {
file, err := file.Open("tests/parser/all")
if err != nil {
test.Log(err)
test.Fail()
}
tokens, err := Tokenize(file)
if err != nil {
test.Log(err)
test.Fail()
}
correct := []Token {
Token { kind: TokenKindSeparator, },
}
if len(tokens) != len(correct) {
test.Log("lexed", tokens, "tokens, want", correct)
}
for index, token := range tokens {
if !token.Equals(correct[index]) {
test.Log("token", index, "not equal")
test.Fail()
}
}
}

View File

@ -73,6 +73,11 @@ func (token Token) Value () (value any) {
return token.value return token.value
} }
// Equals returns whether this token is equal to another token
func (token Token) Equals (testToken Token) (match bool) {
return token == testToken
}
// Location returns the location of the token in its file. // Location returns the location of the token in its file.
func (token Token) Location () (location file.Location) { func (token Token) Location () (location file.Location) {
return token.location return token.location

View File

@ -3,20 +3,20 @@ package arf
import "os" import "os"
import "io" import "io"
import "path/filepath" import "path/filepath"
import "github.com/sashakoshka/arf/lexer" // import "github.com/sashakoshka/arf/lexer"
func CompileModule (modulePath string, output io.Writer) (err error) { func CompileModule (modulePath string, output io.Writer) (err error) {
moduleFiles, err := os.ReadDir(modulePath) moduleFiles, err := os.ReadDir(modulePath)
if err != nil { return err } if err != nil { return err }
var moduleTokens []lexer.Token // var moduleTokens []lexer.Token
for _, entry := range moduleFiles { for _, entry := range moduleFiles {
if filepath.Ext(entry.Name()) != ".arf" || entry.IsDir() { if filepath.Ext(entry.Name()) != ".arf" || entry.IsDir() {
continue continue
} }
tokens, err := lexer.Tokenize() // tokens, err := lexer.Tokenize()
if err != nil { return err } // if err != nil { return err }
} }
return return

6
tests/parser/all Normal file
View File

@ -0,0 +1,6 @@
# normal tokens
--- rn rr nw -> 239 -120 + - / * -- ++ [{[skadjsk 2039.32]}] 0x5FC2D
"skdjlsakdj" '\n'
# erroneous tokens (poorly spaced)
---rnrrnw->239-120+-/*--++

7
tests/parser/indent Normal file
View File

@ -0,0 +1,7 @@
line1
line2
line3
line4
line5