Some test case fixes for the lexer

This commit is contained in:
Sasha Koshka 2022-10-04 16:47:32 -04:00
parent 7af98d1c6f
commit 6a72cc9f12
2 changed files with 16 additions and 8 deletions

View File

@ -73,7 +73,7 @@ func compareErr (
correctWidth int, correctWidth int,
test *testing.T, test *testing.T,
) { ) {
test.Log("testing errors in", filePath) test.Log("testing error in", filePath)
file, err := file.Open(filePath) file, err := file.Open(filePath)
if err != nil { if err != nil {
test.Log(err) test.Log(err)
@ -82,11 +82,17 @@ func compareErr (
} }
_, err = Tokenize(file) _, err = Tokenize(file)
check, _ := err.(infoerr.Error) check, isCorrectType := err.(infoerr.Error)
test.Log("error that was recieved:") test.Log("error that was recieved:")
test.Log(check) test.Log(check)
if !isCorrectType {
test.Log("error is not infoerr.Error, something has gone wrong.")
test.Fail()
return
}
if check.Kind() != correctKind { if check.Kind() != correctKind {
test.Log("mismatched error kind") test.Log("mismatched error kind")
test.Log("- want:", correctKind) test.Log("- want:", correctKind)
@ -215,7 +221,7 @@ func TestTokenizeNumbers (test *testing.T) {
func TestTokenizeText (test *testing.T) { func TestTokenizeText (test *testing.T) {
checkTokenSlice("../tests/lexer/text.arf", test, checkTokenSlice("../tests/lexer/text.arf", test,
quickToken(34, TokenKindString, "hello world!\a\b\f\n\r\t\v'\\"), quickToken(32, TokenKindString, "hello world!\a\b\f\n\r\t\v'\\"),
quickToken(1, TokenKindNewline, nil), quickToken(1, TokenKindNewline, nil),
quickToken(4, TokenKindString, "\a"), quickToken(4, TokenKindString, "\a"),
quickToken(4, TokenKindString, "\b"), quickToken(4, TokenKindString, "\b"),
@ -250,21 +256,25 @@ func TestTokenizeIndent (test *testing.T) {
) )
} }
func TestTokenizeErr (test *testing.T) { func TestTokenizeErrUnexpectedSymbol (test *testing.T) {
compareErr ( compareErr (
"../tests/lexer/error/unexpectedSymbol.arf", "../tests/lexer/error/unexpectedSymbol.arf",
infoerr.ErrorKindError, infoerr.ErrorKindError,
"unexpected symbol character ;", "unexpected symbol character ;",
1, 5, 1, 1, 5, 1,
test) test)
}
func TestTokenizeErrExcessDataRune (test *testing.T) {
compareErr ( compareErr (
"../tests/lexer/error/excessDataRune.arf", "../tests/lexer/error/excessDataRune.arf",
infoerr.ErrorKindError, infoerr.ErrorKindError,
"excess data in rune literal", "excess data in rune literal",
1, 1, 7, 1, 1, 7,
test) test)
}
func TestTokenizeErrUnknownEscape (test *testing.T) {
compareErr ( compareErr (
"../tests/lexer/error/unknownEscape.arf", "../tests/lexer/error/unknownEscape.arf",
infoerr.ErrorKindError, infoerr.ErrorKindError,

View File

@ -12,7 +12,6 @@ func (lexer *LexingOperation) tokenizeString () (err error) {
got := "" got := ""
tokenWidth := 2 tokenWidth := 2
beginning := lexer.file.Location(1)
for { for {
if lexer.char == '\\' { if lexer.char == '\\' {
err = lexer.nextRune() err = lexer.nextRune()
@ -40,7 +39,6 @@ func (lexer *LexingOperation) tokenizeString () (err error) {
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return } if err != nil { return }
beginning.SetWidth(len(got))
token.kind = TokenKindString token.kind = TokenKindString
token.value = got token.value = got