Compare commits

...

8 Commits

Author SHA1 Message Date
Sasha Koshka
0ac71fa1c3 Added progress heatmap 2022-08-25 00:51:54 -04:00
Sasha Koshka
9232432c35 Implemented thos bad boys 2022-08-25 00:05:40 -04:00
Sasha Koshka
b536b01eeb Added new tokens to test case 2022-08-25 00:01:28 -04:00
Sasha Koshka
8175a9d4c5 Added some more tokens to the TokenKind enum 2022-08-24 23:58:21 -04:00
Sasha Koshka
3dd2ea83d3 I forgot the 2022-08-24 23:55:34 -04:00
Sasha Koshka
b7631530bc yeah 2022-08-24 23:54:06 -04:00
Sasha Koshka
fa1d8efe55 Its just as I feared. Identifier parsing doesn't work! 2022-08-24 23:50:16 -04:00
e74aff3299 Merge pull request 'tree-cleanup' (#9) from tree-cleanup into main
Reviewed-on: #9
2022-08-25 00:24:41 +00:00
9 changed files with 64 additions and 11 deletions

View File

@@ -16,7 +16,7 @@ A directory of ARF files is called a module, and modules will compile to object
files (one per module) using C as an intermediate language (maybe LLVM IR in the files (one per module) using C as an intermediate language (maybe LLVM IR in the
future). future).
## Design aspects ## Design Aspects
These are some design goals that I have followed/am following: These are some design goals that I have followed/am following:
@@ -32,7 +32,7 @@ These are some design goals that I have followed/am following:
- One line at a time - the language's syntax should encourage writing code that - One line at a time - the language's syntax should encourage writing code that
flows vertically and not horizontally, with minimal nesting flows vertically and not horizontally, with minimal nesting
## Planned features ## Planned Features
- Type definition through inheritence - Type definition through inheritence
- Struct member functions - Struct member functions
@@ -49,3 +49,11 @@ These are some design goals that I have followed/am following:
- [ ] Semantic tree -> C -> object file - [ ] Semantic tree -> C -> object file
- [ ] Figure out HOW to implement generics - [ ] Figure out HOW to implement generics
- [ ] Create a standard library - [ ] Create a standard library
## Compiler Progress
<img src="assets/heatmap.png" alt="Progress heatmap" width="400">
- Yellow: needs to be completed for the MVP
- Lime: ongoing progress in this area
- Green: Already completed

BIN
assets/heatmap.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 119 KiB

View File

@@ -242,9 +242,15 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
err = lexer.nextRune() err = lexer.nextRune()
case '!': case '!':
token := lexer.newToken() token := lexer.newToken()
token.kind = TokenKindExclamation
lexer.addToken(token)
err = lexer.nextRune() err = lexer.nextRune()
if err != nil { return }
token.kind = TokenKindExclamation
if lexer.char == '=' {
token.kind = TokenKindNotEqualTo
err = lexer.nextRune()
token.location.SetWidth(2)
}
lexer.addToken(token)
case '%': case '%':
token := lexer.newToken() token := lexer.newToken()
token.kind = TokenKindPercent token.kind = TokenKindPercent
@@ -255,6 +261,11 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
token.kind = TokenKindTilde token.kind = TokenKindTilde
lexer.addToken(token) lexer.addToken(token)
err = lexer.nextRune() err = lexer.nextRune()
case '=':
token := lexer.newToken()
token.kind = TokenKindEqualTo
lexer.addToken(token)
err = lexer.nextRune()
case '<': case '<':
token := lexer.newToken() token := lexer.newToken()
err = lexer.nextRune() err = lexer.nextRune()
@@ -264,6 +275,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
token.kind = TokenKindLShift token.kind = TokenKindLShift
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2) token.location.SetWidth(2)
} else if lexer.char == '=' {
token.kind = TokenKindLessThanEqualTo
err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
case '>': case '>':
@@ -275,6 +290,10 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
token.kind = TokenKindRShift token.kind = TokenKindRShift
err = lexer.nextRune() err = lexer.nextRune()
token.location.SetWidth(2) token.location.SetWidth(2)
} else if lexer.char == '=' {
token.kind = TokenKindGreaterThanEqualTo
err = lexer.nextRune()
token.location.SetWidth(2)
} }
lexer.addToken(token) lexer.addToken(token)
case '|': case '|':

View File

@@ -153,9 +153,13 @@ func TestTokenizeAll (test *testing.T) {
quickToken(1, TokenKindExclamation, nil), quickToken(1, TokenKindExclamation, nil),
quickToken(1, TokenKindPercent, nil), quickToken(1, TokenKindPercent, nil),
quickToken(1, TokenKindTilde, nil), quickToken(1, TokenKindTilde, nil),
quickToken(1, TokenKindEqualTo, nil),
quickToken(2, TokenKindNotEqualTo, nil),
quickToken(1, TokenKindLessThan, nil), quickToken(1, TokenKindLessThan, nil),
quickToken(2, TokenKindLessThanEqualTo, nil),
quickToken(2, TokenKindLShift, nil), quickToken(2, TokenKindLShift, nil),
quickToken(1, TokenKindGreaterThan, nil), quickToken(1, TokenKindGreaterThan, nil),
quickToken(2, TokenKindGreaterThanEqualTo, nil),
quickToken(2, TokenKindRShift, nil), quickToken(2, TokenKindRShift, nil),
quickToken(1, TokenKindBinaryOr, nil), quickToken(1, TokenKindBinaryOr, nil),
quickToken(2, TokenKindLogicalOr, nil), quickToken(2, TokenKindLogicalOr, nil),

View File

@@ -45,11 +45,13 @@ const (
TokenKindPercent TokenKindPercent
TokenKindTilde TokenKindTilde
// TODO: add equal to, less than or equal to, greater than or equal to, TokenKindEqualTo
// not equal to TokenKindNotEqualTo
TokenKindLessThanEqualTo
TokenKindLessThan TokenKindLessThan
TokenKindLShift TokenKindLShift
TokenKindGreaterThan TokenKindGreaterThan
TokenKindGreaterThanEqualTo
TokenKindRShift TokenKindRShift
TokenKindBinaryOr TokenKindBinaryOr
TokenKindLogicalOr TokenKindLogicalOr
@@ -175,12 +177,20 @@ func (tokenKind TokenKind) Describe () (description string) {
description = "Percent" description = "Percent"
case TokenKindTilde: case TokenKindTilde:
description = "Tilde" description = "Tilde"
case TokenKindEqualTo:
description = "EqualTo"
case TokenKindNotEqualTo:
description = "NotEqualTo"
case TokenKindLessThan: case TokenKindLessThan:
description = "LessThan" description = "LessThan"
case TokenKindLessThanEqualTo:
description = "LessThanEqualTo"
case TokenKindLShift: case TokenKindLShift:
description = "LShift" description = "LShift"
case TokenKindGreaterThan: case TokenKindGreaterThan:
description = "GreaterThan" description = "GreaterThan"
case TokenKindGreaterThanEqualTo:
description = "GreaterThanEqualTo"
case TokenKindRShift: case TokenKindRShift:
description = "RShift" description = "RShift"
case TokenKindBinaryOr: case TokenKindBinaryOr:

View File

@@ -289,8 +289,6 @@ func (parser *ParsingOperation) parseIdentifier () (
identifier.location = parser.token.Location() identifier.location = parser.token.Location()
for { for {
// TODO: eat up newlines and tabs after the dot, but not before
// it.
if !parser.token.Is(lexer.TokenKindName) { break } if !parser.token.Is(lexer.TokenKindName) { break }
identifier.trail = append ( identifier.trail = append (
@@ -301,6 +299,18 @@ func (parser *ParsingOperation) parseIdentifier () (
if err != nil { return } if err != nil { return }
if !parser.token.Is(lexer.TokenKindDot) { break } if !parser.token.Is(lexer.TokenKindDot) { break }
err = parser.nextToken()
if err != nil { return }
// allow the identifier to continue on to the next line if there
// is a line break right after the dot
for parser.token.Is(lexer.TokenKindNewline) ||
parser.token.Is(lexer.TokenKindIndent) {
err = parser.nextToken()
if err != nil { return }
}
} }
return return

View File

@@ -31,7 +31,7 @@ data ro nestedObject:Obj
.this .this
.bird0 324 .bird0 324
.bird1 "hello world" .bird1 "hello world"
data ro object:Obj data ro object:thing.thing.thing.thing
.that 2139 .that 2139
.this 324 .this 324
`, test) `, test)

View File

@@ -1,3 +1,3 @@
:arf :arf
--- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.,..[]{} --- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.,..[]{}
+ - ++ -- * / @ ! % ~ < << > >> | || & && + - ++ -- * / @ ! % ~ = != < <= << > >= >> | || & &&

View File

@@ -22,7 +22,9 @@ data ro integerArrayInitialized:{Int 16}
# data wr mutIntegerPointerInit:{Int}:mut [& integer] # data wr mutIntegerPointerInit:{Int}:mut [& integer]
data ro object:Obj # TODO: maybe test identifiers somewhere else?
data ro object:thing.thing.
thing.thing
.this 324 .this 324
.that 2139 .that 2139