pass-test-tokenize-all #1

Merged
sashakoshka merged 20 commits from pass-test-tokenize-all into main 2022-08-11 03:07:36 -06:00
2 changed files with 32 additions and 2 deletions
Showing only changes of commit a0d5b3a1a2 - Show all commits

View File

@ -36,7 +36,7 @@ func TestTokenizeAll (test *testing.T) {
Token { kind: TokenKindInt, value: int64(-349820394) },
Token { kind: TokenKindUInt, value: uint64(932748397) },
Token { kind: TokenKindFloat, value: 239485.37520 },
Token { kind: TokenKindString, value: "hello world\n" },
Token { kind: TokenKindString, value: "hello world!\n" },
Token { kind: TokenKindRune, value: 'E' },
Token { kind: TokenKindName, value: "helloWorld" },
Token { kind: TokenKindColon },

View File

@ -2,6 +2,19 @@ package lexer
import "github.com/sashakoshka/arf/file"
var escapeSequenceMap = map[rune] rune {
'a': '\x07',
'b': '\x08',
'f': '\x0c',
'n': '\x0a',
'r': '\x0d',
't': '\x09',
'v': '\x0b',
'\'': '\'',
'"': '"',
'\\': '\\',
}
func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
err = lexer.nextRune()
if err != nil { return }
@ -9,7 +22,24 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
got := ""
for {
got += string(lexer.char)
// TODO: add hexadecimal escape codes
if lexer.char == '\\' {
err = lexer.nextRune()
if err != nil { return }
actual, exists := escapeSequenceMap[lexer.char]
if exists {
got += string(actual)
} else {
err = file.NewError (
lexer.file.Location(), 1,
"unknown escape character " +
string(lexer.char), file.ErrorKindError)
return
}
} else {
got += string(lexer.char)
}
err = lexer.nextRune()
if err != nil { return }