Compare commits
55 Commits
790e7e632e
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ac71fa1c3 | ||
|
|
9232432c35 | ||
|
|
b536b01eeb | ||
|
|
8175a9d4c5 | ||
|
|
3dd2ea83d3 | ||
|
|
b7631530bc | ||
|
|
fa1d8efe55 | ||
| e74aff3299 | |||
|
|
89a60e620e | ||
|
|
cd528552c8 | ||
|
|
067bf2f4df | ||
| 777c8df6a4 | |||
| c470997887 | |||
| 715766edb4 | |||
| 821fa0ecb3 | |||
| e316eb7791 | |||
| 731cc828ce | |||
| 05aa0e6177 | |||
| fb43f96acc | |||
|
|
b64fbd9fc4 | ||
|
|
0d366964ca | ||
| a5477717eb | |||
|
|
0b80a55f79 | ||
|
|
08935d69c0 | ||
|
|
39f8d7e4ac | ||
|
|
1f88b54eaa | ||
| b0d4ecc83f | |||
| 4eac5c67aa | |||
| 441b036a1c | |||
| 8817d72cb3 | |||
| 3ef1e706b3 | |||
| 944fc8514e | |||
|
|
cd55a0ad8d | ||
|
|
f95c7e0b1c | ||
| 15d1b602b3 | |||
|
|
c29efd97ba | ||
|
|
aa84d9a429 | ||
|
|
5dcf3b3d1a | ||
|
|
d8074fa5cb | ||
|
|
6a6fe8353e | ||
|
|
c4f763af5b | ||
|
|
6fbda34300 | ||
|
|
59126f60cc | ||
|
|
ca80a5968d | ||
| 61819311e9 | |||
|
|
f3b2d11f59 | ||
|
|
3900bbe7bf | ||
|
|
b878017b81 | ||
|
|
5271876196 | ||
|
|
617d76fc46 | ||
|
|
0ceaedbcd8 | ||
|
|
edb9c1a0b6 | ||
|
|
bd433fc65d | ||
|
|
c847d2187d | ||
|
|
cb2264977a |
12
README.md
12
README.md
@@ -16,7 +16,7 @@ A directory of ARF files is called a module, and modules will compile to object
|
||||
files (one per module) using C as an intermediate language (maybe LLVM IR in the
|
||||
future).
|
||||
|
||||
## Design aspects
|
||||
## Design Aspects
|
||||
|
||||
These are some design goals that I have followed/am following:
|
||||
|
||||
@@ -32,7 +32,7 @@ These are some design goals that I have followed/am following:
|
||||
- One line at a time - the language's syntax should encourage writing code that
|
||||
flows vertically and not horizontally, with minimal nesting
|
||||
|
||||
## Planned features
|
||||
## Planned Features
|
||||
|
||||
- Type definition through inheritence
|
||||
- Struct member functions
|
||||
@@ -49,3 +49,11 @@ These are some design goals that I have followed/am following:
|
||||
- [ ] Semantic tree -> C -> object file
|
||||
- [ ] Figure out HOW to implement generics
|
||||
- [ ] Create a standard library
|
||||
|
||||
## Compiler Progress
|
||||
|
||||
<img src="assets/heatmap.png" alt="Progress heatmap" width="400">
|
||||
|
||||
- Yellow: needs to be completed for the MVP
|
||||
- Lime: ongoing progress in this area
|
||||
- Green: Already completed
|
||||
|
||||
BIN
assets/heatmap.png
Normal file
BIN
assets/heatmap.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 119 KiB |
@@ -1,8 +1,8 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 288 80" width="288" height="80">
|
||||
<path d="M48 0L112 0L112 32L96 32L96 16L56 16L40 32L16 32L48 0Z" fill="#bf616a" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M96 64L136 64L136 80L104 80L96 72L96 64Z" fill="#bf616a" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M120 0L120 32L136 32L136 16L184 16L184 32L176 40L8 40L0 48L0 56L184 56L200 40L200 32L200 0L120 0Z" fill="#bf616a" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M191 61L204 48L236 80L210 80L191 61Z" fill="#bf616a" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M256 40L208 40L224 56L256 56L256 40Z" fill="#bf616a" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M208 0L288 0L288 16L224 16L224 32L208 32L208 0Z" fill="#bf616a" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M48 0L112 0L112 32L96 32L96 16L56 16L40 32L16 32L48 0Z" fill="#b81414" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M96 64L136 64L136 80L104 80L96 72L96 64Z" fill="#b81414" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M120 0L120 32L136 32L136 16L184 16L184 32L176 40L8 40L0 48L0 56L184 56L200 40L200 32L200 0L120 0Z" fill="#b81414" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M191 61L204 48L236 80L210 80L191 61Z" fill="#b81414" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M256 40L208 40L224 56L256 56L256 40Z" fill="#b81414" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
<path d="M208 0L288 0L288 16L224 16L224 32L208 32L208 0Z" fill="#b81414" fill-rule="evenodd" opacity="1" stroke="none"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 847 B After Width: | Height: | Size: 847 B |
@@ -1,3 +1,9 @@
|
||||
:arf
|
||||
require "io"
|
||||
---
|
||||
|
||||
func rr main
|
||||
func ro main
|
||||
> arguments:{String}
|
||||
< status:Int 0
|
||||
---
|
||||
io.println "hello world"
|
||||
|
||||
@@ -5,37 +5,31 @@ require "io"
|
||||
---
|
||||
|
||||
# this is a global variable
|
||||
data wn helloText:String "Hello, world!"
|
||||
data pv helloText:String "Hello, world!"
|
||||
|
||||
# this is a struct definition
|
||||
type rr Greeter:Obj
|
||||
# "Hi." is a string constant. all Greeters will be initialized with a
|
||||
# pointer to it. I don't know really it depends on what I decide that
|
||||
# a String type even is.
|
||||
wr text:String "Hi."
|
||||
"sdfdsf" "ahh"
|
||||
"asdf"
|
||||
objt ro Greeter:Obj
|
||||
rw text:String "Hi."
|
||||
|
||||
# this is a function
|
||||
func rr main
|
||||
> argc:Int
|
||||
> argv:{String}
|
||||
< status:Int 0
|
||||
---
|
||||
let greeter:Greeter:mut
|
||||
greeter.setText helloText
|
||||
greeter.greet
|
||||
func ro main
|
||||
> arguments:{String}
|
||||
< status:Int 0
|
||||
---
|
||||
set greeter:Greeter:mut
|
||||
greeter.setText helloText
|
||||
greeter.greet
|
||||
|
||||
# this is a member function
|
||||
func rr greet
|
||||
@ greeter:{Greeter}
|
||||
---
|
||||
io.println greeter.text
|
||||
func ro greet
|
||||
@ greeter:{Greeter}
|
||||
---
|
||||
io.println greeter.text
|
||||
|
||||
# this is mutator member function
|
||||
func rr setText
|
||||
@ greeter:{Greeter}
|
||||
> text:String
|
||||
---
|
||||
greeter.text.set text
|
||||
func ro setText
|
||||
@ greeter:{Greeter}
|
||||
> text:String
|
||||
---
|
||||
greeter.text.set text
|
||||
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
:arf
|
||||
require io
|
||||
---
|
||||
|
||||
func rr main
|
||||
> argc:Int
|
||||
> argv:{String}
|
||||
< status:Int
|
||||
---
|
||||
io.println [io.readln]
|
||||
= status 0
|
||||
@@ -1,13 +0,0 @@
|
||||
:arf
|
||||
---
|
||||
|
||||
data:{Int 6}
|
||||
-39480 398 29 0x3AFe3 0b10001010110 0o666
|
||||
|
||||
func rr literals
|
||||
---
|
||||
= stringLiteral:String "skadjlsakdj"
|
||||
= intArrayLiteral:{Int 3} 2398
|
||||
-2938 324
|
||||
= runeLiteral:Rune 'a'
|
||||
= floatArrayLiteral:{F64 5} 3248.23 0.324 -94.29
|
||||
21
face_test.go
Normal file
21
face_test.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package parser
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestFace (test *testing.T) {
|
||||
checkTree ("../tests/parser/face",
|
||||
`:arf
|
||||
---
|
||||
face ro Destroyer:Face
|
||||
destroy
|
||||
face ro ReadWriter:Face
|
||||
read
|
||||
> into:{Byte ..}
|
||||
< read:Int
|
||||
< err:Error
|
||||
write
|
||||
> data:{Byte ..}
|
||||
< wrote:Int
|
||||
< err:Error
|
||||
`, test)
|
||||
}
|
||||
@@ -242,9 +242,15 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
|
||||
err = lexer.nextRune()
|
||||
case '!':
|
||||
token := lexer.newToken()
|
||||
token.kind = TokenKindExclamation
|
||||
lexer.addToken(token)
|
||||
err = lexer.nextRune()
|
||||
if err != nil { return }
|
||||
token.kind = TokenKindExclamation
|
||||
if lexer.char == '=' {
|
||||
token.kind = TokenKindNotEqualTo
|
||||
err = lexer.nextRune()
|
||||
token.location.SetWidth(2)
|
||||
}
|
||||
lexer.addToken(token)
|
||||
case '%':
|
||||
token := lexer.newToken()
|
||||
token.kind = TokenKindPercent
|
||||
@@ -255,6 +261,11 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
|
||||
token.kind = TokenKindTilde
|
||||
lexer.addToken(token)
|
||||
err = lexer.nextRune()
|
||||
case '=':
|
||||
token := lexer.newToken()
|
||||
token.kind = TokenKindEqualTo
|
||||
lexer.addToken(token)
|
||||
err = lexer.nextRune()
|
||||
case '<':
|
||||
token := lexer.newToken()
|
||||
err = lexer.nextRune()
|
||||
@@ -264,6 +275,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
|
||||
token.kind = TokenKindLShift
|
||||
err = lexer.nextRune()
|
||||
token.location.SetWidth(2)
|
||||
} else if lexer.char == '=' {
|
||||
token.kind = TokenKindLessThanEqualTo
|
||||
err = lexer.nextRune()
|
||||
token.location.SetWidth(2)
|
||||
}
|
||||
lexer.addToken(token)
|
||||
case '>':
|
||||
@@ -275,6 +290,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
|
||||
token.kind = TokenKindRShift
|
||||
err = lexer.nextRune()
|
||||
token.location.SetWidth(2)
|
||||
} else if lexer.char == '=' {
|
||||
token.kind = TokenKindGreaterThanEqualTo
|
||||
err = lexer.nextRune()
|
||||
token.location.SetWidth(2)
|
||||
}
|
||||
lexer.addToken(token)
|
||||
case '|':
|
||||
|
||||
@@ -153,9 +153,13 @@ func TestTokenizeAll (test *testing.T) {
|
||||
quickToken(1, TokenKindExclamation, nil),
|
||||
quickToken(1, TokenKindPercent, nil),
|
||||
quickToken(1, TokenKindTilde, nil),
|
||||
quickToken(1, TokenKindEqualTo, nil),
|
||||
quickToken(2, TokenKindNotEqualTo, nil),
|
||||
quickToken(1, TokenKindLessThan, nil),
|
||||
quickToken(2, TokenKindLessThanEqualTo, nil),
|
||||
quickToken(2, TokenKindLShift, nil),
|
||||
quickToken(1, TokenKindGreaterThan, nil),
|
||||
quickToken(2, TokenKindGreaterThanEqualTo, nil),
|
||||
quickToken(2, TokenKindRShift, nil),
|
||||
quickToken(1, TokenKindBinaryOr, nil),
|
||||
quickToken(2, TokenKindLogicalOr, nil),
|
||||
|
||||
@@ -45,9 +45,13 @@ const (
|
||||
TokenKindPercent
|
||||
TokenKindTilde
|
||||
|
||||
TokenKindEqualTo
|
||||
TokenKindNotEqualTo
|
||||
TokenKindLessThanEqualTo
|
||||
TokenKindLessThan
|
||||
TokenKindLShift
|
||||
TokenKindGreaterThan
|
||||
TokenKindGreaterThanEqualTo
|
||||
TokenKindRShift
|
||||
TokenKindBinaryOr
|
||||
TokenKindLogicalOr
|
||||
@@ -173,12 +177,20 @@ func (tokenKind TokenKind) Describe () (description string) {
|
||||
description = "Percent"
|
||||
case TokenKindTilde:
|
||||
description = "Tilde"
|
||||
case TokenKindEqualTo:
|
||||
description = "EqualTo"
|
||||
case TokenKindNotEqualTo:
|
||||
description = "NotEqualTo"
|
||||
case TokenKindLessThan:
|
||||
description = "LessThan"
|
||||
case TokenKindLessThanEqualTo:
|
||||
description = "LessThanEqualTo"
|
||||
case TokenKindLShift:
|
||||
description = "LShift"
|
||||
case TokenKindGreaterThan:
|
||||
description = "GreaterThan"
|
||||
case TokenKindGreaterThanEqualTo:
|
||||
description = "GreaterThanEqualTo"
|
||||
case TokenKindRShift:
|
||||
description = "RShift"
|
||||
case TokenKindBinaryOr:
|
||||
|
||||
@@ -29,8 +29,33 @@ func (parser *ParsingOperation) parseBody () (err error) {
|
||||
}
|
||||
parser.tree.typeSections[section.name] = section
|
||||
if err != nil { return }
|
||||
case "objt":
|
||||
var section *ObjtSection
|
||||
section, err = parser.parseObjtSection()
|
||||
if parser.tree.objtSections == nil {
|
||||
parser.tree.objtSections =
|
||||
make(map[string] *ObjtSection)
|
||||
}
|
||||
parser.tree.objtSections[section.name] = section
|
||||
if err != nil { return }
|
||||
case "face":
|
||||
var section *FaceSection
|
||||
section, err = parser.parseFaceSection()
|
||||
if parser.tree.faceSections == nil {
|
||||
parser.tree.faceSections =
|
||||
make(map[string] *FaceSection)
|
||||
}
|
||||
parser.tree.faceSections[section.name] = section
|
||||
if err != nil { return }
|
||||
case "enum":
|
||||
var section *EnumSection
|
||||
section, err = parser.parseEnumSection()
|
||||
if parser.tree.enumSections == nil {
|
||||
parser.tree.enumSections =
|
||||
make(map[string] *EnumSection)
|
||||
}
|
||||
parser.tree.enumSections[section.name] = section
|
||||
if err != nil { return }
|
||||
case "func":
|
||||
default:
|
||||
err = parser.token.NewError (
|
||||
|
||||
@@ -289,8 +289,6 @@ func (parser *ParsingOperation) parseIdentifier () (
|
||||
identifier.location = parser.token.Location()
|
||||
|
||||
for {
|
||||
// TODO: eat up newlines and tabs after the dot, but not before
|
||||
// it.
|
||||
if !parser.token.Is(lexer.TokenKindName) { break }
|
||||
|
||||
identifier.trail = append (
|
||||
@@ -301,6 +299,18 @@ func (parser *ParsingOperation) parseIdentifier () (
|
||||
if err != nil { return }
|
||||
|
||||
if !parser.token.Is(lexer.TokenKindDot) { break }
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
// allow the identifier to continue on to the next line if there
|
||||
// is a line break right after the dot
|
||||
for parser.token.Is(lexer.TokenKindNewline) ||
|
||||
parser.token.Is(lexer.TokenKindIndent) {
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
38
parser/data_test.go
Normal file
38
parser/data_test.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package parser
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestData (test *testing.T) {
|
||||
checkTree ("../tests/parser/data",
|
||||
`:arf
|
||||
---
|
||||
data ro integer:Int 3202
|
||||
data ro integerArray16:{Int 16}
|
||||
data ro integerArrayInitialized:{Int 16}
|
||||
3948
|
||||
293
|
||||
293049
|
||||
948
|
||||
912
|
||||
340
|
||||
0
|
||||
2304
|
||||
0
|
||||
4785
|
||||
92
|
||||
data ro integerArrayVariable:{Int ..}
|
||||
data ro integerPointer:{Int}
|
||||
data ro mutInteger:Int:mut 3202
|
||||
data ro mutIntegerPointer:{Int}:mut
|
||||
data ro nestedObject:Obj
|
||||
.that
|
||||
.bird2 123.8439
|
||||
.bird3 9328.21348239
|
||||
.this
|
||||
.bird0 324
|
||||
.bird1 "hello world"
|
||||
data ro object:thing.thing.thing.thing
|
||||
.that 2139
|
||||
.this 324
|
||||
`, test)
|
||||
}
|
||||
93
parser/enum.go
Normal file
93
parser/enum.go
Normal file
@@ -0,0 +1,93 @@
|
||||
package parser
|
||||
|
||||
import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
import "git.tebibyte.media/sashakoshka/arf/lexer"
|
||||
import "git.tebibyte.media/sashakoshka/arf/infoerr"
|
||||
|
||||
func (parser *ParsingOperation) parseEnumSection () (
|
||||
section *EnumSection,
|
||||
err error,
|
||||
) {
|
||||
err = parser.expect(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
|
||||
section = &EnumSection { location: parser.token.Location() }
|
||||
|
||||
// get permission
|
||||
err = parser.nextToken(lexer.TokenKindPermission)
|
||||
if err != nil { return }
|
||||
section.permission = parser.token.Value().(types.Permission)
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
section.name = parser.token.Value().(string)
|
||||
|
||||
// parse inherited type
|
||||
err = parser.nextToken(lexer.TokenKindColon)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
section.what, err = parser.parseType()
|
||||
if err != nil { return }
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
// parse members
|
||||
err = parser.parseEnumMembers(section)
|
||||
if err != nil { return }
|
||||
|
||||
if len(section.members) == 0 {
|
||||
infoerr.NewError (
|
||||
section.location,
|
||||
"defining an enum with no members",
|
||||
infoerr.ErrorKindWarn).Print()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseEnumMembers parses a list of members for an enum section. Indentation
|
||||
// level is assumed.
|
||||
func (parser *ParsingOperation) parseEnumMembers (
|
||||
into *EnumSection,
|
||||
) (
|
||||
err error,
|
||||
) {
|
||||
|
||||
for {
|
||||
// if we've left the block, stop parsing
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { return }
|
||||
if parser.token.Value().(int) != 1 { return }
|
||||
|
||||
member := EnumMember { }
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
member.location = parser.token.Location()
|
||||
member.name = parser.token.Value().(string)
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
// parse default value
|
||||
if parser.token.Is(lexer.TokenKindNewline) {
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
member.value, err = parser.parseInitializationValues(1)
|
||||
into.members = append(into.members, member)
|
||||
if err != nil { return }
|
||||
} else {
|
||||
member.value, err = parser.parseArgument()
|
||||
into.members = append(into.members, member)
|
||||
if err != nil { return }
|
||||
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
}
|
||||
}
|
||||
}
|
||||
38
parser/enum_test.go
Normal file
38
parser/enum_test.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package parser
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestEnum (test *testing.T) {
|
||||
checkTree ("../tests/parser/enum",
|
||||
`:arf
|
||||
---
|
||||
enum ro AffrontToGod:{Int 4}
|
||||
bird0
|
||||
28394
|
||||
9328
|
||||
398
|
||||
9
|
||||
bird1
|
||||
23
|
||||
932832
|
||||
398
|
||||
2349
|
||||
bird2
|
||||
1
|
||||
2
|
||||
3
|
||||
4
|
||||
enum ro NamedColor:U32
|
||||
red 16711680
|
||||
green 65280
|
||||
blue 255
|
||||
enum ro Weekday:Int
|
||||
sunday
|
||||
monday
|
||||
tuesday
|
||||
wednesday
|
||||
thursday
|
||||
friday
|
||||
saturday
|
||||
`, test)
|
||||
}
|
||||
132
parser/face.go
Normal file
132
parser/face.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package parser
|
||||
|
||||
import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
import "git.tebibyte.media/sashakoshka/arf/lexer"
|
||||
import "git.tebibyte.media/sashakoshka/arf/infoerr"
|
||||
|
||||
// parseFaceSection parses an interface section.
|
||||
func (parser *ParsingOperation) parseFaceSection () (
|
||||
section *FaceSection,
|
||||
err error,
|
||||
) {
|
||||
err = parser.expect(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
|
||||
section = &FaceSection {
|
||||
location: parser.token.Location(),
|
||||
behaviors: make(map[string] FaceBehavior),
|
||||
}
|
||||
|
||||
// get permission
|
||||
err = parser.nextToken(lexer.TokenKindPermission)
|
||||
if err != nil { return }
|
||||
section.permission = parser.token.Value().(types.Permission)
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
section.name = parser.token.Value().(string)
|
||||
|
||||
// parse inherited interface
|
||||
err = parser.nextToken(lexer.TokenKindColon)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
section.inherits, err = parser.parseIdentifier()
|
||||
if err != nil { return }
|
||||
err = parser.nextToken(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
// parse members
|
||||
for {
|
||||
// if we've left the block, stop parsing
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { return }
|
||||
if parser.token.Value().(int) != 1 { return }
|
||||
|
||||
// parse behavior
|
||||
behaviorBeginning := parser.token.Location()
|
||||
var behavior FaceBehavior
|
||||
behavior, err = parser.parseFaceBehavior()
|
||||
|
||||
// add to section
|
||||
_, exists := section.behaviors[behavior.name]
|
||||
if exists {
|
||||
err = infoerr.NewError (
|
||||
behaviorBeginning,
|
||||
"multiple behaviors named " + behavior.name +
|
||||
" in this interface",
|
||||
infoerr.ErrorKindError)
|
||||
return
|
||||
}
|
||||
section.behaviors[behavior.name] = behavior
|
||||
|
||||
if err != nil { return }
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseFaceBehavior parses a single interface behavior. Indentation level is
|
||||
// assumed.
|
||||
func (parser *ParsingOperation) parseFaceBehavior () (
|
||||
behavior FaceBehavior,
|
||||
err error,
|
||||
) {
|
||||
err = parser.expect(lexer.TokenKindIndent)
|
||||
if err != nil { return }
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
behavior.name = parser.token.Value().(string)
|
||||
|
||||
err = parser.nextToken(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
for {
|
||||
// if we've left the block, stop parsing
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { return }
|
||||
if parser.token.Value().(int) != 2 { return }
|
||||
|
||||
// get preceding symbol
|
||||
err = parser.nextToken (
|
||||
lexer.TokenKindGreaterThan,
|
||||
lexer.TokenKindLessThan)
|
||||
if err != nil { return }
|
||||
kind := parser.token.Kind()
|
||||
|
||||
var declaration Declaration
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
declaration.name = parser.token.Value().(string)
|
||||
|
||||
// parse inherited type
|
||||
err = parser.nextToken(lexer.TokenKindColon)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
declaration.what, err = parser.parseType()
|
||||
if err != nil { return }
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
if kind == lexer.TokenKindGreaterThan {
|
||||
behavior.inputs = append (
|
||||
behavior.inputs,
|
||||
declaration)
|
||||
} else {
|
||||
behavior.outputs = append (
|
||||
behavior.outputs,
|
||||
declaration)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
14
parser/meta_test.go
Normal file
14
parser/meta_test.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package parser
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestMeta (test *testing.T) {
|
||||
checkTree ("../tests/parser/meta",
|
||||
`:arf
|
||||
author "Sasha Koshka"
|
||||
license "GPLv3"
|
||||
require "someModule"
|
||||
require "otherModule"
|
||||
---
|
||||
`, test)
|
||||
}
|
||||
127
parser/objt.go
Normal file
127
parser/objt.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package parser
|
||||
|
||||
import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
import "git.tebibyte.media/sashakoshka/arf/lexer"
|
||||
import "git.tebibyte.media/sashakoshka/arf/infoerr"
|
||||
|
||||
// parseObjtSection parses an object type definition. This allows for structured
|
||||
// types to be defined, and for member variables to be added and overridden.
|
||||
func (parser *ParsingOperation) parseObjtSection () (
|
||||
section *ObjtSection,
|
||||
err error,
|
||||
) {
|
||||
err = parser.expect(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
|
||||
section = &ObjtSection { location: parser.token.Location() }
|
||||
|
||||
// get permission
|
||||
err = parser.nextToken(lexer.TokenKindPermission)
|
||||
if err != nil { return }
|
||||
section.permission = parser.token.Value().(types.Permission)
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
section.name = parser.token.Value().(string)
|
||||
|
||||
// parse inherited type
|
||||
err = parser.nextToken(lexer.TokenKindColon)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
section.inherits, err = parser.parseIdentifier()
|
||||
if err != nil { return }
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
// parse members
|
||||
err = parser.parseObjtMembers(section)
|
||||
if err != nil { return }
|
||||
|
||||
if len(section.members) == 0 {
|
||||
infoerr.NewError (
|
||||
section.location,
|
||||
"defining an object with no members",
|
||||
infoerr.ErrorKindWarn).Print()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseObjtMembers parses a list of members for an object section. Indentation
|
||||
// level is assumed.
|
||||
func (parser *ParsingOperation) parseObjtMembers (
|
||||
into *ObjtSection,
|
||||
) (
|
||||
err error,
|
||||
) {
|
||||
for {
|
||||
// if we've left the block, stop parsing
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { return }
|
||||
if parser.token.Value().(int) != 1 { return }
|
||||
|
||||
// add member to object section
|
||||
var member ObjtMember
|
||||
member, err = parser.parseObjtMember()
|
||||
into.members = append(into.members, member)
|
||||
if err != nil { return }
|
||||
}
|
||||
}
|
||||
|
||||
// parseObjtMember parses a single member of an object section. Indentation
|
||||
// level is assumed.
|
||||
func (parser *ParsingOperation) parseObjtMember () (
|
||||
member ObjtMember,
|
||||
err error,
|
||||
) {
|
||||
// get permission
|
||||
err = parser.nextToken(lexer.TokenKindPermission)
|
||||
if err != nil { return }
|
||||
member.permission = parser.token.Value().(types.Permission)
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
member.name = parser.token.Value().(string)
|
||||
|
||||
// get type
|
||||
err = parser.nextToken(lexer.TokenKindColon)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
member.what, err = parser.parseType()
|
||||
if err != nil { return }
|
||||
|
||||
println(parser.token.Describe())
|
||||
|
||||
// if there is a bit width, get it
|
||||
if parser.token.Is(lexer.TokenKindBinaryAnd) {
|
||||
err = parser.nextToken(lexer.TokenKindUInt)
|
||||
if err != nil { return }
|
||||
member.bitWidth = parser.token.Value().(uint64)
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
}
|
||||
|
||||
// parse default value
|
||||
if parser.token.Is(lexer.TokenKindNewline) {
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
member.defaultValue,
|
||||
err = parser.parseInitializationValues(1)
|
||||
if err != nil { return }
|
||||
} else {
|
||||
member.defaultValue, err = parser.parseArgument()
|
||||
if err != nil { return }
|
||||
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
31
parser/objt_test.go
Normal file
31
parser/objt_test.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package parser
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestObjt (test *testing.T) {
|
||||
checkTree ("../tests/parser/objt",
|
||||
`:arf
|
||||
---
|
||||
objt ro Basic:Obj
|
||||
ro that:Basic
|
||||
ro this:Basic
|
||||
objt ro BitFields:Obj
|
||||
ro that:Int & 1
|
||||
ro this:Int & 24 298
|
||||
objt ro ComplexInit:Obj
|
||||
ro whatever:{Int 3}
|
||||
230984
|
||||
849
|
||||
394580
|
||||
ro complex0:Bird
|
||||
.that 98
|
||||
.this 2
|
||||
ro complex1:Bird
|
||||
.that 98902
|
||||
.this 235
|
||||
ro basic:Int 87
|
||||
objt ro Init:Obj
|
||||
ro that:String "hello world"
|
||||
ro this:Int 23
|
||||
`, test)
|
||||
}
|
||||
@@ -1,150 +0,0 @@
|
||||
package parser
|
||||
|
||||
import "io"
|
||||
import "testing"
|
||||
// import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
|
||||
func checkTree (modulePath string, correct string, test *testing.T) {
|
||||
tree, err := Parse(modulePath)
|
||||
treeString := tree.ToString(0)
|
||||
treeRunes := []rune(treeString)
|
||||
|
||||
test.Log("CORRECT TREE:")
|
||||
test.Log(correct)
|
||||
test.Log("WHAT WAS PARSED:")
|
||||
test.Log(treeString)
|
||||
|
||||
if err != io.EOF && err != nil {
|
||||
test.Log("returned error:")
|
||||
test.Log(err.Error())
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
equal := true
|
||||
line := 0
|
||||
column := 0
|
||||
|
||||
for index, correctChar := range correct {
|
||||
if index >= len(treeRunes) {
|
||||
test.Log (
|
||||
"parsed is too short at line", line + 1,
|
||||
"col", column + 1)
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if correctChar != treeRunes[index] {
|
||||
test.Log (
|
||||
"trees not equal at line", line + 1,
|
||||
"col", column + 1)
|
||||
test.Log("correct: [" + string(correctChar) + "]")
|
||||
test.Log("got: [" + string(treeRunes[index]) + "]")
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if correctChar == '\n' {
|
||||
line ++
|
||||
column = 0
|
||||
} else {
|
||||
column ++
|
||||
}
|
||||
}
|
||||
|
||||
if len(treeString) > len(correct) {
|
||||
test.Log("parsed is too long")
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if !equal {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestMeta (test *testing.T) {
|
||||
checkTree ("../tests/parser/meta",
|
||||
`:arf
|
||||
author "Sasha Koshka"
|
||||
license "GPLv3"
|
||||
require "someModule"
|
||||
require "otherModule"
|
||||
---
|
||||
`, test)
|
||||
}
|
||||
|
||||
func TestData (test *testing.T) {
|
||||
checkTree ("../tests/parser/data",
|
||||
`:arf
|
||||
---
|
||||
data ro integer:Int 3202
|
||||
data ro integerArray16:{Int 16}
|
||||
data ro integerArrayInitialized:{Int 16}
|
||||
3948
|
||||
293
|
||||
293049
|
||||
948
|
||||
912
|
||||
340
|
||||
0
|
||||
2304
|
||||
0
|
||||
4785
|
||||
92
|
||||
data ro integerArrayVariable:{Int ..}
|
||||
data ro integerPointer:{Int}
|
||||
data ro mutInteger:Int:mut 3202
|
||||
data ro mutIntegerPointer:{Int}:mut
|
||||
data ro nestedObject:Obj
|
||||
.that
|
||||
.bird2 123.8439
|
||||
.bird3 9328.21348239
|
||||
.this
|
||||
.bird0 324
|
||||
.bird1 "hello world"
|
||||
data ro object:Obj
|
||||
.that 2139
|
||||
.this 324
|
||||
`, test)
|
||||
}
|
||||
|
||||
func TestType (test *testing.T) {
|
||||
checkTree ("../tests/parser/type",
|
||||
`:arf
|
||||
---
|
||||
type ro Basic:Int
|
||||
type ro BasicInit:Int 6
|
||||
type ro IntArray:{Int ..}
|
||||
type ro IntArrayInit:{Int 3}
|
||||
3298
|
||||
923
|
||||
92
|
||||
`, test)
|
||||
}
|
||||
|
||||
func TestType (test *testing.T) {
|
||||
checkTree ("../tests/parser/objt",
|
||||
`:arf
|
||||
---
|
||||
type ro Basic:Obj
|
||||
ro that:Basic
|
||||
ro this:Basic
|
||||
type ro ComplexInit:Obj
|
||||
ro basic:Int 87
|
||||
ro complex0:Bird
|
||||
.that 98
|
||||
.this 2
|
||||
ro complex1:Bird
|
||||
.that 98902
|
||||
.this 235
|
||||
ro whatever:{Int 3}
|
||||
230984
|
||||
849
|
||||
394580
|
||||
type ro Init:Obj
|
||||
ro that:String "hello world"
|
||||
ro this:Int
|
||||
`, test)
|
||||
}
|
||||
|
||||
74
parser/test-common.go
Normal file
74
parser/test-common.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package parser
|
||||
|
||||
import "io"
|
||||
import "strings"
|
||||
import "testing"
|
||||
// import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
|
||||
func checkTree (modulePath string, correct string, test *testing.T) {
|
||||
tree, err := Parse(modulePath)
|
||||
treeString := tree.ToString(0)
|
||||
treeRunes := []rune(treeString)
|
||||
|
||||
test.Log("CORRECT TREE:")
|
||||
logWithLineNumbers(correct, test)
|
||||
test.Log("WHAT WAS PARSED:")
|
||||
logWithLineNumbers(treeString, test)
|
||||
|
||||
if err != io.EOF && err != nil {
|
||||
test.Log("returned error:")
|
||||
test.Log(err.Error())
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
equal := true
|
||||
line := 0
|
||||
column := 0
|
||||
|
||||
for index, correctChar := range correct {
|
||||
if index >= len(treeRunes) {
|
||||
test.Log (
|
||||
"parsed is too short at line", line + 1,
|
||||
"col", column + 1)
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if correctChar != treeRunes[index] {
|
||||
test.Log (
|
||||
"trees not equal at line", line + 1,
|
||||
"col", column + 1)
|
||||
test.Log("correct: [" + string(correctChar) + "]")
|
||||
test.Log("got: [" + string(treeRunes[index]) + "]")
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if correctChar == '\n' {
|
||||
line ++
|
||||
column = 0
|
||||
} else {
|
||||
column ++
|
||||
}
|
||||
}
|
||||
|
||||
if len(treeString) > len(correct) {
|
||||
test.Log("parsed is too long")
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if !equal {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func logWithLineNumbers (bigString string, test *testing.T) {
|
||||
lines := strings.Split (
|
||||
strings.Replace(bigString, "\t", " ", -1), "\n")
|
||||
|
||||
for index, line := range lines {
|
||||
test.Logf("%3d | %s", index + 1, line)
|
||||
}
|
||||
}
|
||||
@@ -51,6 +51,21 @@ func (tree *SyntaxTree) ToString (indent int) (output string) {
|
||||
output += tree.typeSections[name].ToString(indent)
|
||||
}
|
||||
|
||||
objtSectionKeys := sortMapKeysAlphabetically(tree.objtSections)
|
||||
for _, name := range objtSectionKeys {
|
||||
output += tree.objtSections[name].ToString(indent)
|
||||
}
|
||||
|
||||
enumSectionKeys := sortMapKeysAlphabetically(tree.enumSections)
|
||||
for _, name := range enumSectionKeys {
|
||||
output += tree.enumSections[name].ToString(indent)
|
||||
}
|
||||
|
||||
faceSectionKeys := sortMapKeysAlphabetically(tree.faceSections)
|
||||
for _, name := range faceSectionKeys {
|
||||
output += tree.faceSections[name].ToString(indent)
|
||||
}
|
||||
|
||||
dataSectionKeys := sortMapKeysAlphabetically(tree.dataSections)
|
||||
for _, name := range dataSectionKeys {
|
||||
output += tree.dataSections[name].ToString(indent)
|
||||
@@ -276,12 +291,16 @@ func (section *TypeSection) ToString (indent int) (output string) {
|
||||
return
|
||||
}
|
||||
|
||||
func (member *ObjtMember) ToString (indent int) (output string) {
|
||||
func (member ObjtMember) ToString (indent int) (output string) {
|
||||
output += doIndent(indent)
|
||||
|
||||
output += member.permission.ToString() + " "
|
||||
output += member.name + ":"
|
||||
output += member.what.ToString()
|
||||
|
||||
if member.bitWidth > 0 {
|
||||
output += fmt.Sprint(" & ", member.bitWidth)
|
||||
}
|
||||
|
||||
isComplexInitialization :=
|
||||
member.defaultValue.kind == ArgumentKindObjectInitializationValues ||
|
||||
@@ -314,3 +333,59 @@ func (section *ObjtSection) ToString (indent int) (output string) {
|
||||
return
|
||||
}
|
||||
|
||||
func (section *EnumSection) ToString (indent int) (output string) {
|
||||
output += doIndent (
|
||||
indent,
|
||||
"enum ",
|
||||
section.permission.ToString(), " ",
|
||||
section.name, ":",
|
||||
section.what.ToString(), "\n")
|
||||
|
||||
for _, member := range section.members {
|
||||
output += doIndent(indent + 1, member.name)
|
||||
|
||||
isComplexInitialization :=
|
||||
member.value.kind == ArgumentKindObjectInitializationValues ||
|
||||
member.value.kind == ArgumentKindArrayInitializationValues
|
||||
|
||||
if member.value.value == nil {
|
||||
output += "\n"
|
||||
} else if isComplexInitialization {
|
||||
output += "\n"
|
||||
output += member.value.ToString(indent + 2, true)
|
||||
} else {
|
||||
output += " " + member.value.ToString(0, false)
|
||||
output += "\n"
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (section *FaceSection) ToString (indent int) (output string) {
|
||||
output += doIndent (
|
||||
indent,
|
||||
"face ",
|
||||
section.permission.ToString(), " ",
|
||||
section.name, ":",
|
||||
section.inherits.ToString(), "\n")
|
||||
|
||||
for _, name := range sortMapKeysAlphabetically(section.behaviors) {
|
||||
behavior := section.behaviors[name]
|
||||
output += behavior.ToString(indent + 1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (behavior *FaceBehavior) ToString (indent int) (output string) {
|
||||
output += doIndent(indent, behavior.name, "\n")
|
||||
|
||||
for _, inputItem := range behavior.inputs {
|
||||
output += doIndent(indent + 1, "> ", inputItem.ToString(), "\n")
|
||||
}
|
||||
|
||||
for _, outputItem := range behavior.outputs {
|
||||
output += doIndent(indent + 1, "< ", outputItem.ToString(), "\n")
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -12,6 +12,9 @@ type SyntaxTree struct {
|
||||
|
||||
requires []string
|
||||
typeSections map[string] *TypeSection
|
||||
objtSections map[string] *ObjtSection
|
||||
enumSections map[string] *EnumSection
|
||||
faceSections map[string] *FaceSection
|
||||
dataSections map[string] *DataSection
|
||||
}
|
||||
|
||||
@@ -175,8 +178,9 @@ type TypeSection struct {
|
||||
type ObjtMember struct {
|
||||
location file.Location
|
||||
name string
|
||||
|
||||
|
||||
what Type
|
||||
bitWidth uint64
|
||||
permission types.Permission
|
||||
defaultValue Argument
|
||||
}
|
||||
@@ -185,8 +189,43 @@ type ObjtMember struct {
|
||||
type ObjtSection struct {
|
||||
location file.Location
|
||||
name string
|
||||
|
||||
inherits Type
|
||||
|
||||
inherits Identifier
|
||||
permission types.Permission
|
||||
members map[string] ObjtMember
|
||||
members []ObjtMember
|
||||
}
|
||||
|
||||
type EnumMember struct {
|
||||
location file.Location
|
||||
name string
|
||||
value Argument
|
||||
}
|
||||
|
||||
// EnumSection represents an enumerated type section.
|
||||
type EnumSection struct {
|
||||
location file.Location
|
||||
name string
|
||||
|
||||
what Type
|
||||
permission types.Permission
|
||||
members []EnumMember
|
||||
}
|
||||
|
||||
// FaceBehavior represents a behavior of an interface section.
|
||||
type FaceBehavior struct {
|
||||
location file.Location
|
||||
name string
|
||||
|
||||
inputs []Declaration
|
||||
outputs []Declaration
|
||||
}
|
||||
|
||||
// FaceSection represents an interface type section.
|
||||
type FaceSection struct {
|
||||
location file.Location
|
||||
name string
|
||||
inherits Identifier
|
||||
|
||||
permission types.Permission
|
||||
behaviors map[string] FaceBehavior
|
||||
}
|
||||
|
||||
136
parser/type.go
136
parser/type.go
@@ -2,9 +2,10 @@ package parser
|
||||
|
||||
import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
import "git.tebibyte.media/sashakoshka/arf/lexer"
|
||||
import "git.tebibyte.media/sashakoshka/arf/infoerr"
|
||||
// import "git.tebibyte.media/sashakoshka/arf/infoerr"
|
||||
|
||||
// parseTypeSection parses a type definition.
|
||||
// parseTypeSection parses a blind type definition, meaning it can inherit from
|
||||
// anything including primitives, but cannot define structure.
|
||||
func (parser *ParsingOperation) parseTypeSection () (
|
||||
section *TypeSection,
|
||||
err error,
|
||||
@@ -14,50 +15,33 @@ func (parser *ParsingOperation) parseTypeSection () (
|
||||
|
||||
section = &TypeSection { location: parser.token.Location() }
|
||||
|
||||
// parse root node
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
section.root, err = parser.parseTypeNode(0)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// parseTypeNode parses a single type definition node recursively.
|
||||
func (parser *ParsingOperation) parseTypeNode (
|
||||
baseIndent int,
|
||||
) (
|
||||
node TypeNode,
|
||||
err error,
|
||||
) {
|
||||
node.children = make(map[string] TypeNode)
|
||||
|
||||
// get permission
|
||||
err = parser.expect(lexer.TokenKindPermission)
|
||||
err = parser.nextToken(lexer.TokenKindPermission)
|
||||
if err != nil { return }
|
||||
node.permission = parser.token.Value().(types.Permission)
|
||||
section.permission = parser.token.Value().(types.Permission)
|
||||
|
||||
// get name
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
node.name = parser.token.Value().(string)
|
||||
section.name = parser.token.Value().(string)
|
||||
|
||||
// get inherited type
|
||||
// parse inherited type
|
||||
err = parser.nextToken(lexer.TokenKindColon)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
node.what, err = parser.parseType()
|
||||
section.inherits, err = parser.parseType()
|
||||
if err != nil { return }
|
||||
|
||||
// get value, or child nodes
|
||||
// parse default values
|
||||
if parser.token.Is(lexer.TokenKindNewline) {
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
err = parser.parseTypeNodeBlock(baseIndent, &node)
|
||||
section.defaultValue, err = parser.parseInitializationValues(0)
|
||||
if err != nil { return }
|
||||
} else {
|
||||
node.defaultValue, err = parser.parseArgument()
|
||||
section.defaultValue, err = parser.parseArgument()
|
||||
if err != nil { return }
|
||||
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
@@ -67,101 +51,3 @@ func (parser *ParsingOperation) parseTypeNode (
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseTypeNodeBlock starts on the line after a type node, and parses what
|
||||
// could be either an array initialization, an object initialization, or more
|
||||
// child nodes. It is similar to parseInitializationValues. If none of these
|
||||
// things were found the parser stays at the beginning of the line and the
|
||||
// method returns.
|
||||
func (parser *ParsingOperation) parseTypeNodeBlock (
|
||||
baseIndent int,
|
||||
parent *TypeNode,
|
||||
) (
|
||||
err error,
|
||||
) {
|
||||
// check if line is indented one more than baseIndent
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { return }
|
||||
if parser.token.Value().(int) != baseIndent + 1 { return }
|
||||
|
||||
thingLocation := parser.token.Location()
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
if parser.token.Is(lexer.TokenKindDot) {
|
||||
|
||||
// object initialization
|
||||
parser.previousToken()
|
||||
initializationArgument := Argument { location: thingLocation }
|
||||
var initializationValues ObjectInitializationValues
|
||||
initializationValues, err = parser.parseObjectInitializationValues()
|
||||
initializationArgument.kind = ArgumentKindObjectInitializationValues
|
||||
initializationArgument.value = &initializationValues
|
||||
parent.defaultValue = initializationArgument
|
||||
|
||||
} else if parser.token.Is(lexer.TokenKindPermission) {
|
||||
|
||||
// child members
|
||||
parser.previousToken()
|
||||
err = parser.parseTypeNodeChildren(parent)
|
||||
|
||||
} else {
|
||||
|
||||
// array initialization
|
||||
parser.previousToken()
|
||||
initializationArgument := Argument { location: thingLocation }
|
||||
var initializationValues ArrayInitializationValues
|
||||
initializationValues, err = parser.parseArrayInitializationValues()
|
||||
initializationArgument.kind = ArgumentKindArrayInitializationValues
|
||||
initializationArgument.value = &initializationValues
|
||||
parent.defaultValue = initializationArgument
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// parseTypeNodeChildren parses child type nodes into a parent type node.
|
||||
func (parser *ParsingOperation) parseTypeNodeChildren (
|
||||
parent *TypeNode,
|
||||
) (
|
||||
err error,
|
||||
) {
|
||||
baseIndent := 0
|
||||
begin := true
|
||||
|
||||
for {
|
||||
// if there is no indent we can just stop parsing
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { break }
|
||||
indent := parser.token.Value().(int)
|
||||
|
||||
if begin == true {
|
||||
baseIndent = indent
|
||||
begin = false
|
||||
}
|
||||
|
||||
// do not parse any further if the indent has changed
|
||||
if indent != baseIndent { break }
|
||||
|
||||
// move on to the beginning of the line, which must contain
|
||||
// a type node
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
var child TypeNode
|
||||
child, err = parser.parseTypeNode(baseIndent)
|
||||
|
||||
// if the member has already been listed, throw an error
|
||||
_, exists := parent.children[child.name]
|
||||
if exists {
|
||||
err = parser.token.NewError (
|
||||
"duplicate member \"" + child.name +
|
||||
"\" in object member initialization",
|
||||
infoerr.ErrorKindError)
|
||||
return
|
||||
}
|
||||
|
||||
// store in parent
|
||||
parent.children[child.name] = child
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
17
parser/type_test.go
Normal file
17
parser/type_test.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package parser
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestType (test *testing.T) {
|
||||
checkTree ("../tests/parser/type",
|
||||
`:arf
|
||||
---
|
||||
type ro Basic:Int
|
||||
type ro BasicInit:Int 6
|
||||
type ro IntArray:{Int ..}
|
||||
type ro IntArrayInit:{Int 3}
|
||||
3298
|
||||
923
|
||||
92
|
||||
`, test)
|
||||
}
|
||||
@@ -1,3 +1,3 @@
|
||||
:arf
|
||||
--- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.,..[]{}
|
||||
+ - ++ -- * / @ ! % ~ < << > >> | || & &&
|
||||
+ - ++ -- * / @ ! % ~ = != < <= << > >= >> | || & &&
|
||||
|
||||
@@ -22,7 +22,9 @@ data ro integerArrayInitialized:{Int 16}
|
||||
|
||||
# data wr mutIntegerPointerInit:{Int}:mut [& integer]
|
||||
|
||||
data ro object:Obj
|
||||
# TODO: maybe test identifiers somewhere else?
|
||||
data ro object:thing.thing.
|
||||
thing.thing
|
||||
.this 324
|
||||
.that 2139
|
||||
|
||||
|
||||
30
tests/parser/enum/main.arf
Normal file
30
tests/parser/enum/main.arf
Normal file
@@ -0,0 +1,30 @@
|
||||
:arf
|
||||
---
|
||||
|
||||
enum ro Weekday:Int
|
||||
sunday
|
||||
monday
|
||||
tuesday
|
||||
wednesday
|
||||
thursday
|
||||
friday
|
||||
saturday
|
||||
|
||||
enum ro NamedColor:U32
|
||||
red 0xFF0000
|
||||
green 0x00FF00
|
||||
blue 0x0000FF
|
||||
|
||||
enum ro AffrontToGod:{Int 4}
|
||||
bird0
|
||||
28394 9328
|
||||
398 9
|
||||
bird1
|
||||
23 932832
|
||||
398
|
||||
2349
|
||||
bird2
|
||||
1
|
||||
2
|
||||
3
|
||||
4
|
||||
15
tests/parser/face/main.arf
Normal file
15
tests/parser/face/main.arf
Normal file
@@ -0,0 +1,15 @@
|
||||
:arf
|
||||
---
|
||||
|
||||
face ro ReadWriter:Face
|
||||
write
|
||||
> data:{Byte ..}
|
||||
< wrote:Int
|
||||
< err:Error
|
||||
read
|
||||
> into:{Byte ..}
|
||||
< read:Int
|
||||
< err:Error
|
||||
|
||||
face ro Destroyer:Face
|
||||
destroy
|
||||
@@ -1,14 +1,18 @@
|
||||
:arf
|
||||
---
|
||||
type ro Basic:Obj
|
||||
objt ro Basic:Obj
|
||||
ro that:Basic
|
||||
ro this:Basic
|
||||
|
||||
type ro Init:Obj
|
||||
objt ro BitFields:Obj
|
||||
ro that:Int & 1
|
||||
ro this:Int & 24 298
|
||||
|
||||
objt ro Init:Obj
|
||||
ro that:String "hello world"
|
||||
ro this:Int 23
|
||||
|
||||
type ro ComplexInit:Obj
|
||||
objt ro ComplexInit:Obj
|
||||
ro whatever:{Int 3}
|
||||
230984
|
||||
849 394580
|
||||
|
||||
Reference in New Issue
Block a user