Added naieve string and char literal parsing

This commit is contained in:
Sasha Koshka 2022-08-11 02:47:51 -05:00
parent 3768e3454f
commit 758b85e735
4 changed files with 55 additions and 9 deletions

View File

@ -40,7 +40,6 @@ func (lexer *LexingOperation) tokenize () (err error) {
number := lexer.char >= '0' && lexer.char <= '9'
if number {
// TODO: tokenize number begin\
err = lexer.tokenizeNumberBeginning(false)
if err != nil { return }
} else if lowercase || uppercase {
@ -126,11 +125,9 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
})
err = lexer.nextRune()
case '"':
// TODO: tokenize string literal
err = lexer.nextRune()
err = lexer.tokenizeString(false)
case '\'':
// TODO: tokenize rune literal
err = lexer.nextRune()
err = lexer.tokenizeString(true)
case ':':
lexer.addToken (Token {
kind: TokenKindColon,

View File

@ -19,10 +19,9 @@ func TestTokenizeAll (test *testing.T) {
test.Log("got token:", token.Describe())
}
test.Log("resulting error:")
if err != nil {
test.Log("returned error:")
test.Log(err.Error())
if err == nil {
test.Log("Tokenize() should have returned an error")
test.Fail()
return
}

View File

@ -58,6 +58,9 @@ func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error
return
}
// runeToDigit converts a rune from 0-F to a corresponding digit, with a maximum
// radix. If the character is invalid, or the digit is too big, it will return
// false for worked.
func runeToDigit (char rune, radix uint64) (digit uint64, worked bool) {
worked = true

47
lexer/text.go Normal file
View File

@ -0,0 +1,47 @@
package lexer
import "github.com/sashakoshka/arf/file"
func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
err = lexer.nextRune()
if err != nil { return }
got := ""
for {
got += string(lexer.char)
err = lexer.nextRune()
if err != nil { return }
if isRuneLiteral {
if lexer.char == '\'' { break }
} else {
if lexer.char == '"' { break }
}
}
err = lexer.nextRune()
if err != nil { return }
token := Token { }
if isRuneLiteral {
if len(got) > 1 {
err = file.NewError (
lexer.file.Location(), len(got) - 1,
"excess data in rune literal",
file.ErrorKindError)
return
}
token.kind = TokenKindRune
token.value = rune([]rune(got)[0])
} else {
token.kind = TokenKindString
token.value = got
}
lexer.addToken(token)
return
}