Added unit test for lexer
This commit is contained in:
parent
245798f33d
commit
3f16d34aa1
@ -79,6 +79,7 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
|
||||
}
|
||||
case '\n':
|
||||
// line break
|
||||
// TODO: if last line was blank, (ony whitespace) discard.
|
||||
lexer.addToken (Token {
|
||||
kind: TokenKindNewline,
|
||||
})
|
||||
|
33
lexer/lexer_test.go
Normal file
33
lexer/lexer_test.go
Normal file
@ -0,0 +1,33 @@
|
||||
package lexer
|
||||
|
||||
import "testing"
|
||||
import "github.com/sashakoshka/arf/file"
|
||||
|
||||
func TestTokenizeAll (test *testing.T) {
|
||||
file, err := file.Open("tests/parser/all")
|
||||
if err != nil {
|
||||
test.Log(err)
|
||||
test.Fail()
|
||||
}
|
||||
|
||||
tokens, err := Tokenize(file)
|
||||
if err != nil {
|
||||
test.Log(err)
|
||||
test.Fail()
|
||||
}
|
||||
|
||||
correct := []Token {
|
||||
Token { kind: TokenKindSeparator, },
|
||||
}
|
||||
|
||||
if len(tokens) != len(correct) {
|
||||
test.Log("lexed", tokens, "tokens, want", correct)
|
||||
}
|
||||
|
||||
for index, token := range tokens {
|
||||
if !token.Equals(correct[index]) {
|
||||
test.Log("token", index, "not equal")
|
||||
test.Fail()
|
||||
}
|
||||
}
|
||||
}
|
@ -73,6 +73,11 @@ func (token Token) Value () (value any) {
|
||||
return token.value
|
||||
}
|
||||
|
||||
// Equals returns whether this token is equal to another token
|
||||
func (token Token) Equals (testToken Token) (match bool) {
|
||||
return token == testToken
|
||||
}
|
||||
|
||||
// Location returns the location of the token in its file.
|
||||
func (token Token) Location () (location file.Location) {
|
||||
return token.location
|
||||
|
8
main.go
8
main.go
@ -3,20 +3,20 @@ package arf
|
||||
import "os"
|
||||
import "io"
|
||||
import "path/filepath"
|
||||
import "github.com/sashakoshka/arf/lexer"
|
||||
// import "github.com/sashakoshka/arf/lexer"
|
||||
|
||||
func CompileModule (modulePath string, output io.Writer) (err error) {
|
||||
moduleFiles, err := os.ReadDir(modulePath)
|
||||
if err != nil { return err }
|
||||
|
||||
var moduleTokens []lexer.Token
|
||||
// var moduleTokens []lexer.Token
|
||||
for _, entry := range moduleFiles {
|
||||
if filepath.Ext(entry.Name()) != ".arf" || entry.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
tokens, err := lexer.Tokenize()
|
||||
if err != nil { return err }
|
||||
// tokens, err := lexer.Tokenize()
|
||||
// if err != nil { return err }
|
||||
}
|
||||
|
||||
return
|
||||
|
6
tests/parser/all
Normal file
6
tests/parser/all
Normal file
@ -0,0 +1,6 @@
|
||||
# normal tokens
|
||||
--- rn rr nw -> 239 -120 + - / * -- ++ [{[skadjsk 2039.32]}] 0x5FC2D
|
||||
"skdjlsakdj" '\n'
|
||||
|
||||
# erroneous tokens (poorly spaced)
|
||||
---rnrrnw->239-120+-/*--++
|
7
tests/parser/indent
Normal file
7
tests/parser/indent
Normal file
@ -0,0 +1,7 @@
|
||||
line1
|
||||
line2
|
||||
line3
|
||||
|
||||
line4
|
||||
|
||||
line5
|
Reference in New Issue
Block a user