Merge pull request 'remove-rune-literal' (#8) from remove-rune-literal into main
Reviewed-on: arf/arf#8
This commit is contained in:
commit
48b53e48f3
@ -16,7 +16,6 @@ type Argument interface {
|
||||
// UIntLiteral
|
||||
// FloatLiteral
|
||||
// StringLiteral
|
||||
// RuneLiteral
|
||||
|
||||
ToString (indent int) (output string)
|
||||
canBePassedAs (what Type) (allowed bool)
|
||||
@ -62,9 +61,6 @@ func (analyzer AnalysisOperation) analyzeArgument (
|
||||
|
||||
case parser.ArgumentKindString:
|
||||
outputArgument = StringLiteral(inputArgument.Value().(string))
|
||||
|
||||
case parser.ArgumentKindRune:
|
||||
outputArgument = RuneLiteral(inputArgument.Value().(rune))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ var PrimitiveFace = createPrimitive("Face", Type {})
|
||||
var PrimitiveFunc = createPrimitive("Func", Type {})
|
||||
|
||||
var BuiltInString = createPrimitive("String", Type {
|
||||
actual: PrimitiveU32,
|
||||
actual: &PrimitiveU32,
|
||||
kind: TypeKindVariableArray,
|
||||
})
|
||||
|
||||
|
@ -167,10 +167,8 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
|
||||
token.kind = TokenKindNewline
|
||||
lexer.addToken(token)
|
||||
err = lexer.nextRune()
|
||||
case '"':
|
||||
err = lexer.tokenizeString(false)
|
||||
case '\'':
|
||||
err = lexer.tokenizeString(true)
|
||||
err = lexer.tokenizeString()
|
||||
case ':':
|
||||
token := lexer.newToken()
|
||||
token.kind = TokenKindColon
|
||||
|
@ -73,7 +73,7 @@ func compareErr (
|
||||
correctWidth int,
|
||||
test *testing.T,
|
||||
) {
|
||||
test.Log("testing errors in", filePath)
|
||||
test.Log("testing error in", filePath)
|
||||
file, err := file.Open(filePath)
|
||||
if err != nil {
|
||||
test.Log(err)
|
||||
@ -81,12 +81,28 @@ func compareErr (
|
||||
return
|
||||
}
|
||||
|
||||
_, err = Tokenize(file)
|
||||
check := err.(infoerr.Error)
|
||||
tokens, err := Tokenize(file)
|
||||
check, isCorrectType := err.(infoerr.Error)
|
||||
|
||||
for index, token := range tokens {
|
||||
test.Log(index, "\tgot token:", token.Describe())
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
test.Log("no error was recieved, test failed.")
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
test.Log("error that was recieved:")
|
||||
test.Log(check)
|
||||
|
||||
if !isCorrectType {
|
||||
test.Log("error is not infoerr.Error, something has gone wrong.")
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if check.Kind() != correctKind {
|
||||
test.Log("mismatched error kind")
|
||||
test.Log("- want:", correctKind)
|
||||
@ -132,7 +148,7 @@ func TestTokenizeAll (test *testing.T) {
|
||||
quickToken(9, TokenKindUInt, uint64(932748397)),
|
||||
quickToken(12, TokenKindFloat, 239485.37520),
|
||||
quickToken(16, TokenKindString, "hello world!\n"),
|
||||
quickToken(3, TokenKindRune, 'E'),
|
||||
quickToken(3, TokenKindString, "E"),
|
||||
quickToken(10, TokenKindName, "helloWorld"),
|
||||
quickToken(1, TokenKindColon, nil),
|
||||
quickToken(1, TokenKindDot, nil),
|
||||
@ -215,18 +231,17 @@ func TestTokenizeNumbers (test *testing.T) {
|
||||
|
||||
func TestTokenizeText (test *testing.T) {
|
||||
checkTokenSlice("../tests/lexer/text.arf", test,
|
||||
quickToken(34, TokenKindString, "hello world!\a\b\f\n\r\t\v'\"\\"),
|
||||
quickToken(32, TokenKindString, "hello world!\a\b\f\n\r\t\v'\\"),
|
||||
quickToken(1, TokenKindNewline, nil),
|
||||
quickToken(4, TokenKindRune, '\a'),
|
||||
quickToken(4, TokenKindRune, '\b'),
|
||||
quickToken(4, TokenKindRune, '\f'),
|
||||
quickToken(4, TokenKindRune, '\n'),
|
||||
quickToken(4, TokenKindRune, '\r'),
|
||||
quickToken(4, TokenKindRune, '\t'),
|
||||
quickToken(4, TokenKindRune, '\v'),
|
||||
quickToken(4, TokenKindRune, '\''),
|
||||
quickToken(4, TokenKindRune, '"' ),
|
||||
quickToken(4, TokenKindRune, '\\'),
|
||||
quickToken(4, TokenKindString, "\a"),
|
||||
quickToken(4, TokenKindString, "\b"),
|
||||
quickToken(4, TokenKindString, "\f"),
|
||||
quickToken(4, TokenKindString, "\n"),
|
||||
quickToken(4, TokenKindString, "\r"),
|
||||
quickToken(4, TokenKindString, "\t"),
|
||||
quickToken(4, TokenKindString, "\v"),
|
||||
quickToken(4, TokenKindString, "'"),
|
||||
quickToken(4, TokenKindString, "\\"),
|
||||
quickToken(1, TokenKindNewline, nil),
|
||||
quickToken(35, TokenKindString, "hello world \x40\u0040\U00000040!"),
|
||||
quickToken(1, TokenKindNewline, nil),
|
||||
@ -251,21 +266,16 @@ func TestTokenizeIndent (test *testing.T) {
|
||||
)
|
||||
}
|
||||
|
||||
func TestTokenizeErr (test *testing.T) {
|
||||
func TestTokenizeErrUnexpectedSymbol (test *testing.T) {
|
||||
compareErr (
|
||||
"../tests/lexer/error/unexpectedSymbol.arf",
|
||||
infoerr.ErrorKindError,
|
||||
"unexpected symbol character ;",
|
||||
1, 5, 1,
|
||||
test)
|
||||
|
||||
compareErr (
|
||||
"../tests/lexer/error/excessDataRune.arf",
|
||||
infoerr.ErrorKindError,
|
||||
"excess data in rune literal",
|
||||
1, 1, 7,
|
||||
test)
|
||||
|
||||
}
|
||||
|
||||
func TestTokenizeErrUnknownEscape (test *testing.T) {
|
||||
compareErr (
|
||||
"../tests/lexer/error/unknownEscape.arf",
|
||||
infoerr.ErrorKindError,
|
||||
|
@ -4,7 +4,7 @@ import "strconv"
|
||||
import "git.tebibyte.media/arf/arf/infoerr"
|
||||
|
||||
// tokenizeString tokenizes a string or rune literal.
|
||||
func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
|
||||
func (lexer *LexingOperation) tokenizeString () (err error) {
|
||||
err = lexer.nextRune()
|
||||
if err != nil { return }
|
||||
|
||||
@ -12,7 +12,6 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
|
||||
got := ""
|
||||
tokenWidth := 2
|
||||
|
||||
beginning := lexer.file.Location(1)
|
||||
for {
|
||||
if lexer.char == '\\' {
|
||||
err = lexer.nextRune()
|
||||
@ -34,32 +33,14 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
|
||||
if err != nil { return }
|
||||
}
|
||||
|
||||
if isRuneLiteral {
|
||||
if lexer.char == '\'' { break }
|
||||
} else {
|
||||
if lexer.char == '"' { break }
|
||||
}
|
||||
if lexer.char == '\'' { break }
|
||||
}
|
||||
|
||||
err = lexer.nextRune()
|
||||
if err != nil { return }
|
||||
|
||||
beginning.SetWidth(len(got))
|
||||
if isRuneLiteral {
|
||||
if len(got) > 1 {
|
||||
err = infoerr.NewError (
|
||||
beginning,
|
||||
"excess data in rune literal",
|
||||
infoerr.ErrorKindError)
|
||||
return
|
||||
}
|
||||
|
||||
token.kind = TokenKindRune
|
||||
token.value = rune([]rune(got)[0])
|
||||
} else {
|
||||
token.kind = TokenKindString
|
||||
token.value = got
|
||||
}
|
||||
token.kind = TokenKindString
|
||||
token.value = got
|
||||
|
||||
token.location.SetWidth(tokenWidth)
|
||||
lexer.addToken(token)
|
||||
@ -77,7 +58,6 @@ var escapeSequenceMap = map[rune] rune {
|
||||
't': '\x09',
|
||||
'v': '\x0b',
|
||||
'\'': '\'',
|
||||
'"': '"',
|
||||
'\\': '\\',
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,6 @@ const (
|
||||
TokenKindUInt
|
||||
TokenKindFloat
|
||||
TokenKindString
|
||||
TokenKindRune
|
||||
|
||||
TokenKindName
|
||||
|
||||
@ -156,8 +155,6 @@ func (tokenKind TokenKind) Describe () (description string) {
|
||||
description = "Float"
|
||||
case TokenKindString:
|
||||
description = "String"
|
||||
case TokenKindRune:
|
||||
description = "Rune"
|
||||
case TokenKindName:
|
||||
description = "Name"
|
||||
case TokenKindColon:
|
||||
|
@ -12,7 +12,6 @@ var validArgumentStartTokens = []lexer.TokenKind {
|
||||
lexer.TokenKindUInt,
|
||||
lexer.TokenKindFloat,
|
||||
lexer.TokenKindString,
|
||||
lexer.TokenKindRune,
|
||||
|
||||
lexer.TokenKindLBracket,
|
||||
lexer.TokenKindLParen,
|
||||
@ -78,11 +77,6 @@ func (parser *ParsingOperation) parseArgument () (argument Argument, err error)
|
||||
argument.value = parser.token.Value().(string)
|
||||
parser.nextToken()
|
||||
|
||||
case lexer.TokenKindRune:
|
||||
argument.kind = ArgumentKindRune
|
||||
argument.value = parser.token.Value().(rune)
|
||||
parser.nextToken()
|
||||
|
||||
case lexer.TokenKindLBracket:
|
||||
argument.kind = ArgumentKindPhrase
|
||||
argument.value, err = parser.parseArgumentLevelPhrase()
|
||||
|
@ -25,7 +25,7 @@ func ro cBasicPhrases
|
||||
[fn [gn 329 983 57] 123]
|
||||
func ro dArgumentTypes
|
||||
---
|
||||
[bird tree butterfly.wing "hello world" grass:Int:8:mut]
|
||||
[bird tree butterfly.wing 'hello world' grass:Int:8:mut]
|
||||
func ro eMath
|
||||
> x:Int
|
||||
> y:Int
|
||||
|
@ -8,11 +8,11 @@ func TestMeta (test *testing.T) {
|
||||
cwd, _ := os.Getwd()
|
||||
checkTree ("../tests/parser/meta", false,
|
||||
`:arf
|
||||
author "Sasha Koshka"
|
||||
license "GPLv3"
|
||||
require "` + filepath.Join(cwd, "./some/local/module") + `"
|
||||
require "/usr/local/include/arf/someLibraryInstalledInStandardLocation"
|
||||
require "/some/absolute/path/to/someModule"
|
||||
author 'Sasha Koshka'
|
||||
license 'GPLv3'
|
||||
require '` + filepath.Join(cwd, "./some/local/module") + `'
|
||||
require '/usr/local/include/arf/someLibraryInstalledInStandardLocation'
|
||||
require '/some/absolute/path/to/someModule'
|
||||
---
|
||||
`, test)
|
||||
}
|
||||
|
@ -34,16 +34,16 @@ func (tree SyntaxTree) ToString (indent int) (output string) {
|
||||
output += doIndent(indent, ":arf\n")
|
||||
|
||||
if tree.author != "" {
|
||||
output += doIndent(indent, "author \"", tree.author, "\"\n")
|
||||
output += doIndent(indent, "author '", tree.author, "'\n")
|
||||
}
|
||||
|
||||
if tree.license != "" {
|
||||
output += doIndent(indent, "license \"", tree.license, "\"\n")
|
||||
output += doIndent(indent, "license '", tree.license, "'\n")
|
||||
}
|
||||
|
||||
for _, name := range sortMapKeysAlphabetically(tree.requires) {
|
||||
require := tree.requires[name]
|
||||
output += doIndent(indent, "require \"", require, "\"\n")
|
||||
output += doIndent(indent, "require '", require, "'\n")
|
||||
}
|
||||
|
||||
output += doIndent(indent, "---\n")
|
||||
@ -153,13 +153,7 @@ func (argument Argument) ToString (indent int, breakLine bool) (output string) {
|
||||
case ArgumentKindString:
|
||||
output += doIndent (
|
||||
indent,
|
||||
"\"" + argument.value.(string) + "\"")
|
||||
if breakLine { output += "\n" }
|
||||
|
||||
case ArgumentKindRune:
|
||||
output += doIndent (
|
||||
indent,
|
||||
"'" + string(argument.value.(rune)) + "'")
|
||||
"'" + argument.value.(string) + "'")
|
||||
if breakLine { output += "\n" }
|
||||
}
|
||||
|
||||
|
@ -123,11 +123,8 @@ const (
|
||||
// 0.44
|
||||
ArgumentKindFloat
|
||||
|
||||
// "hello world"
|
||||
// 'hello world'
|
||||
ArgumentKindString
|
||||
|
||||
// 'S'
|
||||
ArgumentKindRune
|
||||
)
|
||||
|
||||
// Argument represents a value that can be placed anywhere a value goes. This
|
||||
|
@ -13,7 +13,7 @@ type ro bBitFields:Obj
|
||||
ro that:Int & 1
|
||||
ro this:Int 298 & 24
|
||||
type ro cInit:Obj
|
||||
ro that:String "hello world"
|
||||
ro that:String 'hello world'
|
||||
ro this:Int 23
|
||||
type ro dInitInherit:aBasic
|
||||
ro that 9384
|
||||
@ -24,7 +24,7 @@ type ro eInitAndDefine:aBasic
|
||||
ro born:Int 4
|
||||
ro in:Int
|
||||
ro the:Int:3 (9348 92384 92834)
|
||||
ro walls:String "live in the walls, die in the walls."
|
||||
ro walls:String 'live in the walls, die in the walls.'
|
||||
type ro fBasic:Int
|
||||
type ro gBasicInit:Int
|
||||
6
|
||||
|
@ -1,3 +1,3 @@
|
||||
:arf
|
||||
--- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.,..()[]{}
|
||||
--- rw -> -349820394 932748397 239485.37520 'hello world!\n' 'E' helloWorld:.,..()[]{}
|
||||
+ - ++ -- * / @ ! % %= ~ ~= = == != < <= << <<= > >= >> >>= | |= || & &= && ^ ^=
|
||||
|
@ -1,2 +0,0 @@
|
||||
:arf
|
||||
'aaaaaaa'
|
@ -1,2 +1,2 @@
|
||||
:arf
|
||||
"\g"
|
||||
'\g'
|
||||
|
@ -1,4 +1,4 @@
|
||||
:arf
|
||||
"hello world!\a\b\f\n\r\t\v\'\"\\"
|
||||
'\a' '\b' '\f' '\n' '\r' '\t' '\v' '\'' '\"' '\\'
|
||||
"hello world \x40\u0040\U00000040!"
|
||||
'hello world!\a\b\f\n\r\t\v\'\\'
|
||||
'\a' '\b' '\f' '\n' '\r' '\t' '\v' '\'' '\\'
|
||||
'hello world \x40\u0040\U00000040!'
|
||||
|
@ -5,11 +5,11 @@ require "io"
|
||||
---
|
||||
|
||||
# this is a global variable
|
||||
data pv helloText:String "Hello, world!"
|
||||
data pv helloText:String 'Hello, world!'
|
||||
|
||||
# this is a struct definition
|
||||
objt ro Greeter:Obj
|
||||
rw text:String "Hi."
|
||||
rw text:String 'Hi.'
|
||||
|
||||
# this is a function
|
||||
func ro main
|
||||
|
@ -27,7 +27,7 @@ func ro cBasicPhrases
|
||||
|
||||
func ro dArgumentTypes
|
||||
---
|
||||
[bird tree butterfly.wing "hello world"
|
||||
[bird tree butterfly.wing 'hello world'
|
||||
grass:Int:mut:8]
|
||||
|
||||
func ro eMath
|
||||
|
@ -1,7 +1,7 @@
|
||||
:arf
|
||||
author "Sasha Koshka"
|
||||
license "GPLv3"
|
||||
require "./some/local/module"
|
||||
require "/some/absolute/path/to/someModule"
|
||||
require "someLibraryInstalledInStandardLocation"
|
||||
author 'Sasha Koshka'
|
||||
license 'GPLv3'
|
||||
require './some/local/module'
|
||||
require '/some/absolute/path/to/someModule'
|
||||
require 'someLibraryInstalledInStandardLocation'
|
||||
---
|
||||
|
@ -7,7 +7,7 @@ data ro aExternalData:Int
|
||||
data ro bSingleValue:Int 342
|
||||
|
||||
data ro cNestedObject:Obj (
|
||||
(324 "hello world")
|
||||
(324 'hello world')
|
||||
(123.8439 9328.21348239)
|
||||
)
|
||||
|
||||
|
@ -9,7 +9,7 @@ type ro bBitFields:Obj
|
||||
ro this:Int 298 & 24
|
||||
|
||||
type ro cInit:Obj
|
||||
ro that:String "hello world"
|
||||
ro that:String 'hello world'
|
||||
ro this:Int 23
|
||||
|
||||
# the semantic analyzer should let these sections restrict the permissions of
|
||||
@ -24,7 +24,7 @@ type ro eInitAndDefine:aBasic
|
||||
ro born:Int 4
|
||||
ro in:Int
|
||||
ro the:Int:3 (9348 92384 92834)
|
||||
ro walls:String "live in the walls, die in the walls."
|
||||
ro walls:String 'live in the walls, die in the walls.'
|
||||
|
||||
type ro fBasic:Int
|
||||
|
||||
|
Reference in New Issue
Block a user