diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 3c7e5d1..9143521 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -36,7 +36,7 @@ func TestTokenizeAll (test *testing.T) { Token { kind: TokenKindInt, value: int64(-349820394) }, Token { kind: TokenKindUInt, value: uint64(932748397) }, Token { kind: TokenKindFloat, value: 239485.37520 }, - Token { kind: TokenKindString, value: "hello world\n" }, + Token { kind: TokenKindString, value: "hello world!\n" }, Token { kind: TokenKindRune, value: 'E' }, Token { kind: TokenKindName, value: "helloWorld" }, Token { kind: TokenKindColon }, diff --git a/lexer/text.go b/lexer/text.go index 02fdc61..e349581 100644 --- a/lexer/text.go +++ b/lexer/text.go @@ -2,6 +2,19 @@ package lexer import "github.com/sashakoshka/arf/file" +var escapeSequenceMap = map[rune] rune { + 'a': '\x07', + 'b': '\x08', + 'f': '\x0c', + 'n': '\x0a', + 'r': '\x0d', + 't': '\x09', + 'v': '\x0b', + '\'': '\'', + '"': '"', + '\\': '\\', +} + func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) { err = lexer.nextRune() if err != nil { return } @@ -9,7 +22,24 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) { got := "" for { - got += string(lexer.char) + // TODO: add hexadecimal escape codes + if lexer.char == '\\' { + err = lexer.nextRune() + if err != nil { return } + + actual, exists := escapeSequenceMap[lexer.char] + if exists { + got += string(actual) + } else { + err = file.NewError ( + lexer.file.Location(), 1, + "unknown escape character " + + string(lexer.char), file.ErrorKindError) + return + } + } else { + got += string(lexer.char) + } err = lexer.nextRune() if err != nil { return }