From 7af98d1c6fc784542471173b044e98ace9ca77b3 Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Tue, 4 Oct 2022 16:35:00 -0400 Subject: [PATCH 1/6] Removed rune literals from analyzer --- lexer/lexer.go | 4 +--- lexer/lexer_test.go | 25 ++++++++++++------------- lexer/text.go | 26 ++++---------------------- lexer/token.go | 3 --- tests/lexer/all.arf | 2 +- tests/lexer/error/unknownEscape.arf | 2 +- tests/lexer/text.arf | 6 +++--- 7 files changed, 22 insertions(+), 46 deletions(-) diff --git a/lexer/lexer.go b/lexer/lexer.go index c49154c..ea2c7d1 100644 --- a/lexer/lexer.go +++ b/lexer/lexer.go @@ -167,10 +167,8 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) { token.kind = TokenKindNewline lexer.addToken(token) err = lexer.nextRune() - case '"': - err = lexer.tokenizeString(false) case '\'': - err = lexer.tokenizeString(true) + err = lexer.tokenizeString() case ':': token := lexer.newToken() token.kind = TokenKindColon diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 623410c..95dda44 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -82,7 +82,7 @@ func compareErr ( } _, err = Tokenize(file) - check := err.(infoerr.Error) + check, _ := err.(infoerr.Error) test.Log("error that was recieved:") test.Log(check) @@ -132,7 +132,7 @@ func TestTokenizeAll (test *testing.T) { quickToken(9, TokenKindUInt, uint64(932748397)), quickToken(12, TokenKindFloat, 239485.37520), quickToken(16, TokenKindString, "hello world!\n"), - quickToken(3, TokenKindRune, 'E'), + quickToken(3, TokenKindString, "E"), quickToken(10, TokenKindName, "helloWorld"), quickToken(1, TokenKindColon, nil), quickToken(1, TokenKindDot, nil), @@ -215,18 +215,17 @@ func TestTokenizeNumbers (test *testing.T) { func TestTokenizeText (test *testing.T) { checkTokenSlice("../tests/lexer/text.arf", test, - quickToken(34, TokenKindString, "hello world!\a\b\f\n\r\t\v'\"\\"), + quickToken(34, TokenKindString, "hello world!\a\b\f\n\r\t\v'\\"), quickToken(1, TokenKindNewline, nil), - quickToken(4, TokenKindRune, '\a'), - quickToken(4, TokenKindRune, '\b'), - quickToken(4, TokenKindRune, '\f'), - quickToken(4, TokenKindRune, '\n'), - quickToken(4, TokenKindRune, '\r'), - quickToken(4, TokenKindRune, '\t'), - quickToken(4, TokenKindRune, '\v'), - quickToken(4, TokenKindRune, '\''), - quickToken(4, TokenKindRune, '"' ), - quickToken(4, TokenKindRune, '\\'), + quickToken(4, TokenKindString, "\a"), + quickToken(4, TokenKindString, "\b"), + quickToken(4, TokenKindString, "\f"), + quickToken(4, TokenKindString, "\n"), + quickToken(4, TokenKindString, "\r"), + quickToken(4, TokenKindString, "\t"), + quickToken(4, TokenKindString, "\v"), + quickToken(4, TokenKindString, "'"), + quickToken(4, TokenKindString, "\\"), quickToken(1, TokenKindNewline, nil), quickToken(35, TokenKindString, "hello world \x40\u0040\U00000040!"), quickToken(1, TokenKindNewline, nil), diff --git a/lexer/text.go b/lexer/text.go index 8c0234e..8d68d50 100644 --- a/lexer/text.go +++ b/lexer/text.go @@ -4,7 +4,7 @@ import "strconv" import "git.tebibyte.media/arf/arf/infoerr" // tokenizeString tokenizes a string or rune literal. -func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) { +func (lexer *LexingOperation) tokenizeString () (err error) { err = lexer.nextRune() if err != nil { return } @@ -34,32 +34,15 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) { if err != nil { return } } - if isRuneLiteral { - if lexer.char == '\'' { break } - } else { - if lexer.char == '"' { break } - } + if lexer.char == '\'' { break } } err = lexer.nextRune() if err != nil { return } beginning.SetWidth(len(got)) - if isRuneLiteral { - if len(got) > 1 { - err = infoerr.NewError ( - beginning, - "excess data in rune literal", - infoerr.ErrorKindError) - return - } - - token.kind = TokenKindRune - token.value = rune([]rune(got)[0]) - } else { - token.kind = TokenKindString - token.value = got - } + token.kind = TokenKindString + token.value = got token.location.SetWidth(tokenWidth) lexer.addToken(token) @@ -77,7 +60,6 @@ var escapeSequenceMap = map[rune] rune { 't': '\x09', 'v': '\x0b', '\'': '\'', - '"': '"', '\\': '\\', } diff --git a/lexer/token.go b/lexer/token.go index 033bd06..dd52589 100644 --- a/lexer/token.go +++ b/lexer/token.go @@ -19,7 +19,6 @@ const ( TokenKindUInt TokenKindFloat TokenKindString - TokenKindRune TokenKindName @@ -156,8 +155,6 @@ func (tokenKind TokenKind) Describe () (description string) { description = "Float" case TokenKindString: description = "String" - case TokenKindRune: - description = "Rune" case TokenKindName: description = "Name" case TokenKindColon: diff --git a/tests/lexer/all.arf b/tests/lexer/all.arf index 950ece0..4f4d646 100644 --- a/tests/lexer/all.arf +++ b/tests/lexer/all.arf @@ -1,3 +1,3 @@ :arf ---- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.,..()[]{} +--- rw -> -349820394 932748397 239485.37520 'hello world!\n' 'E' helloWorld:.,..()[]{} + - ++ -- * / @ ! % %= ~ ~= = == != < <= << <<= > >= >> >>= | |= || & &= && ^ ^= diff --git a/tests/lexer/error/unknownEscape.arf b/tests/lexer/error/unknownEscape.arf index cff9a06..5add659 100644 --- a/tests/lexer/error/unknownEscape.arf +++ b/tests/lexer/error/unknownEscape.arf @@ -1,2 +1,2 @@ :arf -"\g" +'\g' diff --git a/tests/lexer/text.arf b/tests/lexer/text.arf index abbc5e7..491401d 100644 --- a/tests/lexer/text.arf +++ b/tests/lexer/text.arf @@ -1,4 +1,4 @@ :arf -"hello world!\a\b\f\n\r\t\v\'\"\\" -'\a' '\b' '\f' '\n' '\r' '\t' '\v' '\'' '\"' '\\' -"hello world \x40\u0040\U00000040!" +'hello world!\a\b\f\n\r\t\v\'\\' +'\a' '\b' '\f' '\n' '\r' '\t' '\v' '\'' '\\' +'hello world \x40\u0040\U00000040!' From 6a72cc9f128c39ba5c9c339bac1c123cf75c3be2 Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Tue, 4 Oct 2022 16:47:32 -0400 Subject: [PATCH 2/6] Some test case fixes for the lexer --- lexer/lexer_test.go | 22 ++++++++++++++++------ lexer/text.go | 2 -- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 95dda44..71b8126 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -73,7 +73,7 @@ func compareErr ( correctWidth int, test *testing.T, ) { - test.Log("testing errors in", filePath) + test.Log("testing error in", filePath) file, err := file.Open(filePath) if err != nil { test.Log(err) @@ -82,11 +82,17 @@ func compareErr ( } _, err = Tokenize(file) - check, _ := err.(infoerr.Error) + check, isCorrectType := err.(infoerr.Error) test.Log("error that was recieved:") test.Log(check) + if !isCorrectType { + test.Log("error is not infoerr.Error, something has gone wrong.") + test.Fail() + return + } + if check.Kind() != correctKind { test.Log("mismatched error kind") test.Log("- want:", correctKind) @@ -215,7 +221,7 @@ func TestTokenizeNumbers (test *testing.T) { func TestTokenizeText (test *testing.T) { checkTokenSlice("../tests/lexer/text.arf", test, - quickToken(34, TokenKindString, "hello world!\a\b\f\n\r\t\v'\\"), + quickToken(32, TokenKindString, "hello world!\a\b\f\n\r\t\v'\\"), quickToken(1, TokenKindNewline, nil), quickToken(4, TokenKindString, "\a"), quickToken(4, TokenKindString, "\b"), @@ -250,21 +256,25 @@ func TestTokenizeIndent (test *testing.T) { ) } -func TestTokenizeErr (test *testing.T) { +func TestTokenizeErrUnexpectedSymbol (test *testing.T) { compareErr ( "../tests/lexer/error/unexpectedSymbol.arf", infoerr.ErrorKindError, "unexpected symbol character ;", 1, 5, 1, test) - +} + +func TestTokenizeErrExcessDataRune (test *testing.T) { compareErr ( "../tests/lexer/error/excessDataRune.arf", infoerr.ErrorKindError, "excess data in rune literal", 1, 1, 7, test) - +} + +func TestTokenizeErrUnknownEscape (test *testing.T) { compareErr ( "../tests/lexer/error/unknownEscape.arf", infoerr.ErrorKindError, diff --git a/lexer/text.go b/lexer/text.go index 8d68d50..01fb57b 100644 --- a/lexer/text.go +++ b/lexer/text.go @@ -12,7 +12,6 @@ func (lexer *LexingOperation) tokenizeString () (err error) { got := "" tokenWidth := 2 - beginning := lexer.file.Location(1) for { if lexer.char == '\\' { err = lexer.nextRune() @@ -40,7 +39,6 @@ func (lexer *LexingOperation) tokenizeString () (err error) { err = lexer.nextRune() if err != nil { return } - beginning.SetWidth(len(got)) token.kind = TokenKindString token.value = got From c42f4f46fc7071632ac57b3bf1f51caa8ec364f4 Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Tue, 4 Oct 2022 16:51:53 -0400 Subject: [PATCH 3/6] Removed excess data in rune test case --- lexer/lexer_test.go | 21 +++++++++++---------- tests/lexer/error/excessDataRune.arf | 2 -- 2 files changed, 11 insertions(+), 12 deletions(-) delete mode 100644 tests/lexer/error/excessDataRune.arf diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index 71b8126..3241b25 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -81,8 +81,18 @@ func compareErr ( return } - _, err = Tokenize(file) + tokens, err := Tokenize(file) check, isCorrectType := err.(infoerr.Error) + + for index, token := range tokens { + test.Log(index, "\tgot token:", token.Describe()) + } + + if err == nil { + test.Log("no error was recieved, test failed.") + test.Fail() + return + } test.Log("error that was recieved:") test.Log(check) @@ -265,15 +275,6 @@ func TestTokenizeErrUnexpectedSymbol (test *testing.T) { test) } -func TestTokenizeErrExcessDataRune (test *testing.T) { - compareErr ( - "../tests/lexer/error/excessDataRune.arf", - infoerr.ErrorKindError, - "excess data in rune literal", - 1, 1, 7, - test) -} - func TestTokenizeErrUnknownEscape (test *testing.T) { compareErr ( "../tests/lexer/error/unknownEscape.arf", diff --git a/tests/lexer/error/excessDataRune.arf b/tests/lexer/error/excessDataRune.arf deleted file mode 100644 index a0174af..0000000 --- a/tests/lexer/error/excessDataRune.arf +++ /dev/null @@ -1,2 +0,0 @@ -:arf -'aaaaaaa' From b6d3c04acd21ba8a4ef513bfc5974d91110af210 Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Tue, 4 Oct 2022 17:07:31 -0400 Subject: [PATCH 4/6] Removed runes from parser --- parser/argument.go | 6 ------ parser/tree-tostring.go | 8 +------- parser/tree.go | 5 +---- tests/parser/full/main.arf | 4 ++-- tests/parser/meta/meta.arf | 10 +++++----- tests/parser/skim/main.arf | 2 +- tests/parser/type/main.arf | 4 ++-- 7 files changed, 12 insertions(+), 27 deletions(-) diff --git a/parser/argument.go b/parser/argument.go index 5332e2b..1185ec0 100644 --- a/parser/argument.go +++ b/parser/argument.go @@ -12,7 +12,6 @@ var validArgumentStartTokens = []lexer.TokenKind { lexer.TokenKindUInt, lexer.TokenKindFloat, lexer.TokenKindString, - lexer.TokenKindRune, lexer.TokenKindLBracket, lexer.TokenKindLParen, @@ -78,11 +77,6 @@ func (parser *ParsingOperation) parseArgument () (argument Argument, err error) argument.value = parser.token.Value().(string) parser.nextToken() - case lexer.TokenKindRune: - argument.kind = ArgumentKindRune - argument.value = parser.token.Value().(rune) - parser.nextToken() - case lexer.TokenKindLBracket: argument.kind = ArgumentKindPhrase argument.value, err = parser.parseArgumentLevelPhrase() diff --git a/parser/tree-tostring.go b/parser/tree-tostring.go index ab75531..0ab3946 100644 --- a/parser/tree-tostring.go +++ b/parser/tree-tostring.go @@ -153,13 +153,7 @@ func (argument Argument) ToString (indent int, breakLine bool) (output string) { case ArgumentKindString: output += doIndent ( indent, - "\"" + argument.value.(string) + "\"") - if breakLine { output += "\n" } - - case ArgumentKindRune: - output += doIndent ( - indent, - "'" + string(argument.value.(rune)) + "'") + "'" + argument.value.(string) + "'") if breakLine { output += "\n" } } diff --git a/parser/tree.go b/parser/tree.go index f46e053..0b3f09f 100644 --- a/parser/tree.go +++ b/parser/tree.go @@ -123,11 +123,8 @@ const ( // 0.44 ArgumentKindFloat - // "hello world" + // 'hello world' ArgumentKindString - - // 'S' - ArgumentKindRune ) // Argument represents a value that can be placed anywhere a value goes. This diff --git a/tests/parser/full/main.arf b/tests/parser/full/main.arf index 0817ddd..1187b90 100644 --- a/tests/parser/full/main.arf +++ b/tests/parser/full/main.arf @@ -5,11 +5,11 @@ require "io" --- # this is a global variable -data pv helloText:String "Hello, world!" +data pv helloText:String 'Hello, world!' # this is a struct definition objt ro Greeter:Obj - rw text:String "Hi." + rw text:String 'Hi.' # this is a function func ro main diff --git a/tests/parser/meta/meta.arf b/tests/parser/meta/meta.arf index 8ea66e1..6eae161 100644 --- a/tests/parser/meta/meta.arf +++ b/tests/parser/meta/meta.arf @@ -1,7 +1,7 @@ :arf -author "Sasha Koshka" -license "GPLv3" -require "./some/local/module" -require "/some/absolute/path/to/someModule" -require "someLibraryInstalledInStandardLocation" +author 'Sasha Koshka' +license 'GPLv3' +require './some/local/module' +require '/some/absolute/path/to/someModule' +require 'someLibraryInstalledInStandardLocation' --- diff --git a/tests/parser/skim/main.arf b/tests/parser/skim/main.arf index e2bfed6..3bf5aa9 100644 --- a/tests/parser/skim/main.arf +++ b/tests/parser/skim/main.arf @@ -7,7 +7,7 @@ data ro aExternalData:Int data ro bSingleValue:Int 342 data ro cNestedObject:Obj ( - (324 "hello world") + (324 'hello world') (123.8439 9328.21348239) ) diff --git a/tests/parser/type/main.arf b/tests/parser/type/main.arf index 7236678..a415b7d 100644 --- a/tests/parser/type/main.arf +++ b/tests/parser/type/main.arf @@ -9,7 +9,7 @@ type ro bBitFields:Obj ro this:Int 298 & 24 type ro cInit:Obj - ro that:String "hello world" + ro that:String 'hello world' ro this:Int 23 # the semantic analyzer should let these sections restrict the permissions of @@ -24,7 +24,7 @@ type ro eInitAndDefine:aBasic ro born:Int 4 ro in:Int ro the:Int:3 (9348 92384 92834) - ro walls:String "live in the walls, die in the walls." + ro walls:String 'live in the walls, die in the walls.' type ro fBasic:Int From c7e6c9299a26f6b30aa26a090d6515ef47610b49 Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Tue, 4 Oct 2022 17:13:08 -0400 Subject: [PATCH 5/6] Removed runes from the test case --- parser/func_test.go | 2 +- parser/meta_test.go | 10 +++++----- parser/tree-tostring.go | 6 +++--- parser/type_test.go | 4 ++-- tests/parser/func/main.arf | 2 +- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/parser/func_test.go b/parser/func_test.go index 7ad00d2..09c9e53 100644 --- a/parser/func_test.go +++ b/parser/func_test.go @@ -25,7 +25,7 @@ func ro cBasicPhrases [fn [gn 329 983 57] 123] func ro dArgumentTypes --- - [bird tree butterfly.wing "hello world" grass:Int:8:mut] + [bird tree butterfly.wing 'hello world' grass:Int:8:mut] func ro eMath > x:Int > y:Int diff --git a/parser/meta_test.go b/parser/meta_test.go index 07d3952..93428df 100644 --- a/parser/meta_test.go +++ b/parser/meta_test.go @@ -8,11 +8,11 @@ func TestMeta (test *testing.T) { cwd, _ := os.Getwd() checkTree ("../tests/parser/meta", false, `:arf -author "Sasha Koshka" -license "GPLv3" -require "` + filepath.Join(cwd, "./some/local/module") + `" -require "/usr/local/include/arf/someLibraryInstalledInStandardLocation" -require "/some/absolute/path/to/someModule" +author 'Sasha Koshka' +license 'GPLv3' +require '` + filepath.Join(cwd, "./some/local/module") + `' +require '/usr/local/include/arf/someLibraryInstalledInStandardLocation' +require '/some/absolute/path/to/someModule' --- `, test) } diff --git a/parser/tree-tostring.go b/parser/tree-tostring.go index 0ab3946..157e3ca 100644 --- a/parser/tree-tostring.go +++ b/parser/tree-tostring.go @@ -34,16 +34,16 @@ func (tree SyntaxTree) ToString (indent int) (output string) { output += doIndent(indent, ":arf\n") if tree.author != "" { - output += doIndent(indent, "author \"", tree.author, "\"\n") + output += doIndent(indent, "author '", tree.author, "'\n") } if tree.license != "" { - output += doIndent(indent, "license \"", tree.license, "\"\n") + output += doIndent(indent, "license '", tree.license, "'\n") } for _, name := range sortMapKeysAlphabetically(tree.requires) { require := tree.requires[name] - output += doIndent(indent, "require \"", require, "\"\n") + output += doIndent(indent, "require '", require, "'\n") } output += doIndent(indent, "---\n") diff --git a/parser/type_test.go b/parser/type_test.go index 6619c1d..5c0ec22 100644 --- a/parser/type_test.go +++ b/parser/type_test.go @@ -13,7 +13,7 @@ type ro bBitFields:Obj ro that:Int & 1 ro this:Int 298 & 24 type ro cInit:Obj - ro that:String "hello world" + ro that:String 'hello world' ro this:Int 23 type ro dInitInherit:aBasic ro that 9384 @@ -24,7 +24,7 @@ type ro eInitAndDefine:aBasic ro born:Int 4 ro in:Int ro the:Int:3 (9348 92384 92834) - ro walls:String "live in the walls, die in the walls." + ro walls:String 'live in the walls, die in the walls.' type ro fBasic:Int type ro gBasicInit:Int 6 diff --git a/tests/parser/func/main.arf b/tests/parser/func/main.arf index 3c47e1c..b8ed2d3 100644 --- a/tests/parser/func/main.arf +++ b/tests/parser/func/main.arf @@ -27,7 +27,7 @@ func ro cBasicPhrases func ro dArgumentTypes --- - [bird tree butterfly.wing "hello world" + [bird tree butterfly.wing 'hello world' grass:Int:mut:8] func ro eMath From 6d5bb59712c83d76c172cd1624c39d15af3bfebf Mon Sep 17 00:00:00 2001 From: Sasha Koshka Date: Tue, 4 Oct 2022 17:25:05 -0400 Subject: [PATCH 6/6] Removed runes from analyzer ez --- analyzer/argument.go | 4 ---- analyzer/primitives.go | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/analyzer/argument.go b/analyzer/argument.go index 86e9311..85b78af 100644 --- a/analyzer/argument.go +++ b/analyzer/argument.go @@ -16,7 +16,6 @@ type Argument interface { // UIntLiteral // FloatLiteral // StringLiteral - // RuneLiteral ToString (indent int) (output string) canBePassedAs (what Type) (allowed bool) @@ -62,9 +61,6 @@ func (analyzer AnalysisOperation) analyzeArgument ( case parser.ArgumentKindString: outputArgument = StringLiteral(inputArgument.Value().(string)) - - case parser.ArgumentKindRune: - outputArgument = RuneLiteral(inputArgument.Value().(rune)) } return } diff --git a/analyzer/primitives.go b/analyzer/primitives.go index ee42038..87e906a 100644 --- a/analyzer/primitives.go +++ b/analyzer/primitives.go @@ -19,7 +19,7 @@ var PrimitiveFace = createPrimitive("Face", Type {}) var PrimitiveFunc = createPrimitive("Func", Type {}) var BuiltInString = createPrimitive("String", Type { - actual: PrimitiveU32, + actual: &PrimitiveU32, kind: TypeKindVariableArray, })