commit
e069569c3c
@ -0,0 +1,91 @@
|
||||
package parser
|
||||
|
||||
import "git.tebibyte.media/sashakoshka/arf/file"
|
||||
import "git.tebibyte.media/sashakoshka/arf/lexer"
|
||||
|
||||
var validArgumentStartTokens = []lexer.TokenKind {
|
||||
lexer.TokenKindName,
|
||||
|
||||
lexer.TokenKindInt,
|
||||
lexer.TokenKindUInt,
|
||||
lexer.TokenKindFloat,
|
||||
lexer.TokenKindString,
|
||||
lexer.TokenKindRune,
|
||||
|
||||
lexer.TokenKindLBrace,
|
||||
lexer.TokenKindLBracket,
|
||||
}
|
||||
|
||||
func (parser *ParsingOperation) parseArgument () (argument Argument, err error) {
|
||||
argument.location = parser.token.Location()
|
||||
|
||||
err = parser.expect(validArgumentStartTokens...)
|
||||
if err != nil { return }
|
||||
|
||||
switch parser.token.Kind() {
|
||||
case lexer.TokenKindName:
|
||||
var identifier Identifier
|
||||
identifier, err = parser.parseIdentifier()
|
||||
if err != nil { return }
|
||||
|
||||
if parser.token.Is(lexer.TokenKindColon) {
|
||||
var what Type
|
||||
what, err = parser.parseType()
|
||||
if err != nil { return }
|
||||
|
||||
if len(identifier.trail) != 1 {
|
||||
err = parser.token.NewError (
|
||||
"cannot use member selection in " +
|
||||
"a variable definition",
|
||||
file.ErrorKindError)
|
||||
return
|
||||
}
|
||||
|
||||
argument.kind = ArgumentKindDeclaration
|
||||
argument.value = Declaration {
|
||||
location: argument.location,
|
||||
name: identifier.trail[0],
|
||||
what: what,
|
||||
}
|
||||
} else {
|
||||
argument.kind = ArgumentKindIdentifier
|
||||
argument.value = identifier
|
||||
}
|
||||
|
||||
case lexer.TokenKindInt:
|
||||
argument.kind = ArgumentKindInt
|
||||
argument.value = parser.token.Value().(int64)
|
||||
err = parser.nextToken()
|
||||
|
||||
case lexer.TokenKindUInt:
|
||||
argument.kind = ArgumentKindUInt
|
||||
argument.value = parser.token.Value().(uint64)
|
||||
err = parser.nextToken()
|
||||
|
||||
case lexer.TokenKindFloat:
|
||||
argument.kind = ArgumentKindFloat
|
||||
argument.value = parser.token.Value().(float64)
|
||||
err = parser.nextToken()
|
||||
|
||||
case lexer.TokenKindString:
|
||||
argument.kind = ArgumentKindString
|
||||
argument.value = parser.token.Value().(string)
|
||||
parser.nextToken()
|
||||
|
||||
case lexer.TokenKindRune:
|
||||
argument.kind = ArgumentKindRune
|
||||
argument.value = parser.token.Value().(rune)
|
||||
parser.nextToken()
|
||||
|
||||
// case lexer.TokenKindLBrace:
|
||||
|
||||
// case lexer.TokenKindLBracket:
|
||||
|
||||
default:
|
||||
panic (
|
||||
"unimplemented argument kind " +
|
||||
parser.token.Kind().Describe())
|
||||
}
|
||||
|
||||
return
|
||||
}
|
@ -1,18 +1,34 @@
|
||||
package parser
|
||||
|
||||
import "git.tebibyte.media/sashakoshka/arf/file"
|
||||
import "git.tebibyte.media/sashakoshka/arf/lexer"
|
||||
|
||||
// parse body parses the body of an arf file, after the metadata header.
|
||||
func (parser *ParsingOperation) parseBody () (err error) {
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
for {
|
||||
err = parser.expect(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
|
||||
switch parser.token.Value().(string) {
|
||||
case "data":
|
||||
case "type":
|
||||
case "func":
|
||||
case "face":
|
||||
sectionType := parser.token.Value().(string)
|
||||
switch sectionType {
|
||||
case "data":
|
||||
var section *DataSection
|
||||
section, err = parser.parseDataSection()
|
||||
if parser.tree.dataSections == nil {
|
||||
parser.tree.dataSections =
|
||||
make(map[string] *DataSection)
|
||||
}
|
||||
parser.tree.dataSections[section.name] = section
|
||||
if err != nil { return }
|
||||
case "type":
|
||||
case "face":
|
||||
case "enum":
|
||||
case "func":
|
||||
default:
|
||||
err = parser.token.NewError (
|
||||
"unknown section type \"" + sectionType + "\"",
|
||||
file.ErrorKindError)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
@ -1,6 +1,312 @@
|
||||
package parser
|
||||
|
||||
// parseData parses a data section
|
||||
func (parser *ParsingOperation) parseData () (err error) {
|
||||
import "git.tebibyte.media/sashakoshka/arf/file"
|
||||
import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
import "git.tebibyte.media/sashakoshka/arf/lexer"
|
||||
|
||||
// parseData parses a data section.
|
||||
func (parser *ParsingOperation) parseDataSection () (
|
||||
section *DataSection,
|
||||
err error,
|
||||
) {
|
||||
err = parser.expect(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
|
||||
section = &DataSection { location: parser.token.Location() }
|
||||
|
||||
err = parser.nextToken(lexer.TokenKindPermission)
|
||||
if err != nil { return }
|
||||
section.permission = parser.token.Value().(types.Permission)
|
||||
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
section.name = parser.token.Value().(string)
|
||||
|
||||
err = parser.nextToken(lexer.TokenKindColon)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
section.what, err = parser.parseType()
|
||||
if err != nil { return }
|
||||
|
||||
if parser.token.Is(lexer.TokenKindNewline) {
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
section.value, err = parser.parseInitializationValues(0)
|
||||
if err != nil { return }
|
||||
} else {
|
||||
section.value, err = parser.parseArgument()
|
||||
if err != nil { return }
|
||||
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseInitializationValues starts on the line after a data section, or a set
|
||||
// phrase. It checks for an indent greater than the indent of the aforementioned
|
||||
// data section or set phrase (passed through baseIndent), and if there is,
|
||||
// it parses initialization values.
|
||||
func (parser *ParsingOperation) parseInitializationValues (
|
||||
baseIndent int,
|
||||
) (
|
||||
initializationArgument Argument,
|
||||
err error,
|
||||
) {
|
||||
// check if line is indented one more than baseIndent
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { return }
|
||||
if parser.token.Value().(int) != baseIndent + 1 { return }
|
||||
|
||||
initializationArgument.location = parser.token.Location()
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
if parser.token.Is(lexer.TokenKindDot) {
|
||||
|
||||
// object initialization
|
||||
parser.previousToken()
|
||||
var initializationValues ObjectInitializationValues
|
||||
initializationValues, err = parser.parseObjectInitializationValues()
|
||||
initializationArgument.kind = ArgumentKindObjectInitializationValues
|
||||
initializationArgument.value = &initializationValues
|
||||
|
||||
} else {
|
||||
|
||||
// array initialization
|
||||
parser.previousToken()
|
||||
var initializationValues ArrayInitializationValues
|
||||
initializationValues, err = parser.parseArrayInitializationValues()
|
||||
initializationArgument.kind = ArgumentKindArrayInitializationValues
|
||||
initializationArgument.value = &initializationValues
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// parseObjectInitializationValues parses a list of object initialization
|
||||
// values until the indentation level drops.
|
||||
func (parser *ParsingOperation) parseObjectInitializationValues () (
|
||||
initializationValues ObjectInitializationValues,
|
||||
err error,
|
||||
) {
|
||||
println("BEGIN")
|
||||
defer println("END")
|
||||
|
||||
initializationValues.attributes = make(map[string] Argument)
|
||||
|
||||
baseIndent := 0
|
||||
begin := true
|
||||
|
||||
for {
|
||||
// if there is no indent we can just stop parsing
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { break}
|
||||
indent := parser.token.Value().(int)
|
||||
|
||||
if begin == true {
|
||||
initializationValues.location = parser.token.Location()
|
||||
baseIndent = indent
|
||||
begin = false
|
||||
}
|
||||
|
||||
// do not parse any further if the indent has changed
|
||||
if indent != baseIndent { break }
|
||||
|
||||
println("HIT")
|
||||
|
||||
// move on to the beginning of the line, which must contain
|
||||
// a member initialization value
|
||||
err = parser.nextToken(lexer.TokenKindDot)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
name := parser.token.Value().(string)
|
||||
|
||||
// if the member has already been listed, throw an error
|
||||
_, exists := initializationValues.attributes[name]
|
||||
if exists {
|
||||
err = parser.token.NewError (
|
||||
"duplicate member \"" + name + "\" in object " +
|
||||
"member initialization",
|
||||
file.ErrorKindError)
|
||||
return
|
||||
}
|
||||
|
||||
// parse the argument determining the member initialization
|
||||
// value
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
var value Argument
|
||||
if parser.token.Is(lexer.TokenKindNewline) {
|
||||
|
||||
// recurse
|
||||
err = parser.nextToken(lexer.TokenKindIndent)
|
||||
if err != nil { return }
|
||||
|
||||
value, err = parser.parseInitializationValues(baseIndent)
|
||||
initializationValues.attributes[name] = value
|
||||
if err != nil { return }
|
||||
|
||||
} else {
|
||||
|
||||
// parse as normal argument
|
||||
value, err = parser.parseArgument()
|
||||
initializationValues.attributes[name] = value
|
||||
if err != nil { return }
|
||||
|
||||
err = parser.expect(lexer.TokenKindNewline)
|
||||
if err != nil { return }
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// parseArrayInitializationValues parses a list of array initialization values
|
||||
// until the indentation lexel drops.
|
||||
func (parser *ParsingOperation) parseArrayInitializationValues () (
|
||||
initializationValues ArrayInitializationValues,
|
||||
err error,
|
||||
) {
|
||||
baseIndent := 0
|
||||
begin := true
|
||||
|
||||
for {
|
||||
// if there is no indent we can just stop parsing
|
||||
if !parser.token.Is(lexer.TokenKindIndent) { break}
|
||||
indent := parser.token.Value().(int)
|
||||
|
||||
if begin == true {
|
||||
initializationValues.location = parser.token.Location()
|
||||
baseIndent = indent
|
||||
begin = false
|
||||
}
|
||||
|
||||
// do not parse any further if the indent has changed
|
||||
if indent != baseIndent { break }
|
||||
|
||||
// move on to the beginning of the line, which must contain
|
||||
// arguments
|
||||
err = parser.nextToken(validArgumentStartTokens...)
|
||||
if err != nil { return }
|
||||
|
||||
for {
|
||||
// stop parsing this line and go on to the next if a
|
||||
// newline token is encountered
|
||||
if parser.token.Is(lexer.TokenKindNewline) {
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
break
|
||||
}
|
||||
|
||||
// otherwise, parse the argument
|
||||
var argument Argument
|
||||
argument, err = parser.parseArgument()
|
||||
if err != nil { return }
|
||||
initializationValues.values = append (
|
||||
initializationValues.values,
|
||||
argument)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// parseType parses a type notation of the form Name, {Name}, etc.
|
||||
func (parser *ParsingOperation) parseType () (what Type, err error) {
|
||||
err = parser.expect(lexer.TokenKindName, lexer.TokenKindLBrace)
|
||||
if err != nil { return }
|
||||
what.location = parser.token.Location()
|
||||
|
||||
if parser.token.Is(lexer.TokenKindLBrace) {
|
||||
what.kind = TypeKindPointer
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
var points Type
|
||||
points, err = parser.parseType()
|
||||
if err != nil { return }
|
||||
what.points = &points
|
||||
|
||||
err = parser.expect (
|
||||
lexer.TokenKindUInt,
|
||||
lexer.TokenKindRBrace,
|
||||
lexer.TokenKindElipsis)
|
||||
if err != nil { return }
|
||||
|
||||
if parser.token.Is(lexer.TokenKindUInt) {
|
||||
what.kind = TypeKindArray
|
||||
|
||||
what.length = parser.token.Value().(uint64)
|
||||
|
||||
err = parser.nextToken(lexer.TokenKindRBrace)
|
||||
if err != nil { return }
|
||||
} else if parser.token.Is(lexer.TokenKindElipsis) {
|
||||
what.kind = TypeKindArray
|
||||
|
||||
err = parser.nextToken(lexer.TokenKindRBrace)
|
||||
if err != nil { return }
|
||||
}
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
} else {
|
||||
what.name, err = parser.parseIdentifier()
|
||||
if err != nil { return }
|
||||
}
|
||||
|
||||
if parser.token.Is(lexer.TokenKindColon) {
|
||||
err = parser.nextToken(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
|
||||
qualifier := parser.token.Value().(string)
|
||||
switch qualifier {
|
||||
case "mut":
|
||||
what.mutable = true
|
||||
default:
|
||||
err = parser.token.NewError (
|
||||
"unknown type qualifier \"" + qualifier + "\"",
|
||||
file.ErrorKindError)
|
||||
return
|
||||
}
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// parseIdentifier parses an identifier made out of dot separated names.
|
||||
func (parser *ParsingOperation) parseIdentifier () (
|
||||
identifier Identifier,
|
||||
err error,
|
||||
) {
|
||||
err = parser.expect(lexer.TokenKindName)
|
||||
if err != nil { return }
|
||||
identifier.location = parser.token.Location()
|
||||
|
||||
for {
|
||||
// TODO: eat up newlines and tabs after the dot, but not before
|
||||
// it.
|
||||
if !parser.token.Is(lexer.TokenKindName) { break }
|
||||
|
||||
identifier.trail = append (
|
||||
identifier.trail,
|
||||
parser.token.Value().(string))
|
||||
|
||||
err = parser.nextToken()
|
||||
if err != nil { return }
|
||||
|
||||
if !parser.token.Is(lexer.TokenKindDot) { break }
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
@ -1,33 +1,75 @@
|
||||
package parser
|
||||
|
||||
import "reflect"
|
||||
import "io"
|
||||
import "testing"
|
||||
// import "git.tebibyte.media/sashakoshka/arf/types"
|
||||
|
||||
func checkTree (modulePath string, correct *SyntaxTree, test *testing.T) {
|
||||
func checkTree (modulePath string, correct string, test *testing.T) {
|
||||
tree, err := Parse(modulePath)
|
||||
treeString := tree.ToString(0)
|
||||
|
||||
if err != nil {
|
||||
test.Log("CORRECT TREE:")
|
||||
test.Log(correct)
|
||||
test.Log("WHAT WAS PARSED:")
|
||||
test.Log(treeString)
|
||||
|
||||
if err != io.EOF && err != nil {
|
||||
test.Log("returned error:")
|
||||
test.Log(err.Error())
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(tree, correct) {
|
||||
test.Log("trees not equal")
|
||||
if treeString != correct {
|
||||
test.Log("trees not equal!")
|
||||
test.Fail()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestMeta (test *testing.T) {
|
||||
checkTree("../tests/parser/meta",&SyntaxTree {
|
||||
license: "GPLv3",
|
||||
author: "Sasha Koshka",
|
||||
|
||||
requires: []string {
|
||||
"someModule",
|
||||
"otherModule",
|
||||
},
|
||||
}, test)
|
||||
checkTree ("../tests/parser/meta",
|
||||
`:arf
|
||||
author "Sasha Koshka"
|
||||
license "GPLv3"
|
||||
require "someModule"
|
||||
require "otherModule"
|
||||
---
|
||||
`, test)
|
||||
}
|
||||
|
||||
func TestData (test *testing.T) {
|
||||
checkTree ("../tests/parser/data",
|
||||
`:arf
|
||||
---
|
||||
data wr integer:Int 3202
|
||||
data wr integerArray16:{Int 16}
|
||||
data wr integerArrayInitialized:{Int 16}
|
||||
3948
|
||||
293
|
||||
293049
|
||||
948
|
||||
912
|
||||
340
|
||||
0
|
||||
2304
|
||||
0
|
||||
4785
|
||||
92
|
||||
data wr integerArrayVariable:{Int ..}
|
||||
data wr integerPointer:{Int}
|
||||
data wr mutInteger:Int:mut 3202
|
||||
data wr mutIntegerPointer:{Int}:mut
|
||||
data wr nestedObject:Obj
|
||||
.that
|
||||
.bird2 123.8439
|
||||
.bird3 9328.21348239
|
||||
.this
|
||||
.bird0 324
|
||||
.bird1 "hello world"
|
||||
data wr object:Obj
|
||||
.that 2139
|
||||
.this 324
|
||||
`, test)
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,248 @@
|
||||
package parser
|
||||
|
||||
import "fmt"
|
||||
import "sort"
|
||||
|
||||
func doIndent (indent int, input ...string) (output string) {
|
||||
for index := 0; index < indent; index ++ {
|
||||
output += "\t"
|
||||
}
|
||||
for _, inputSection := range input {
|
||||
output += inputSection
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func sortMapKeysAlphabetically[KEY_TYPE any] (
|
||||
unsortedMap map[string] KEY_TYPE,
|
||||
) (
|
||||
sortedKeys []string,
|
||||
) {
|
||||
sortedKeys = make([]string, len(unsortedMap))
|
||||
index := 0
|
||||
for key, _ := range unsortedMap {
|
||||
sortedKeys[index] = key
|
||||
index ++
|
||||
}
|
||||
sort.Strings(sortedKeys)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (tree *SyntaxTree) ToString (indent int) (output string) {
|
||||
output += doIndent(indent, ":arf\n")
|
||||
|
||||
if tree.author != "" {
|
||||
output += doIndent(indent, "author \"", tree.author, "\"\n")
|
||||
}
|
||||
|
||||
if tree.license != "" {
|
||||
output += doIndent(indent, "license \"", tree.license, "\"\n")
|
||||
}
|
||||
|
||||
for _, require := range tree.requires {
|
||||
output += doIndent(indent, "require \"", require, "\"\n")
|
||||
}
|
||||
|
||||
output += doIndent(indent, "---\n")
|
||||
|
||||
dataSectionKeys := sortMapKeysAlphabetically(tree.dataSections)
|
||||
for _, name := range dataSectionKeys {
|
||||
output += tree.dataSections[name].ToString(indent)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (identifier *Identifier) ToString () (output string) {
|
||||
for index, trailItem := range identifier.trail {
|
||||
if index > 0 {
|
||||
output += "."
|
||||
}
|
||||
|
||||
output += trailItem
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (what *Type) ToString () (output string) {
|
||||
if what.kind == TypeKindBasic {
|
||||
output += what.name.ToString()
|
||||
} else {
|
||||
output += "{"
|
||||
output += what.points.ToString()
|
||||
|
||||
if what.kind == TypeKindArray {
|
||||
output += " "
|
||||
if what.length == 0 {
|
||||
output += ".."
|
||||
} else {
|
||||
output += fmt.Sprint(what.length)
|
||||
}
|
||||
}
|
||||
|
||||
output += "}"
|
||||
}
|
||||
|
||||
if what.mutable {
|
||||
output += ":mut"
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (declaration *Declaration) ToString () (output string) {
|
||||
output += declaration.name + ":"
|
||||
output += declaration.what.ToString()
|
||||
return
|
||||
}
|
||||
|
||||
func (attributes *ObjectInitializationValues) ToString (
|
||||
indent int,
|
||||
) (
|
||||
output string,
|
||||
) {
|
||||
for _, name := range sortMapKeysAlphabetically(attributes.attributes) {
|
||||
value := attributes.attributes[name]
|
||||
|
||||
output += doIndent(indent, ".", name, " ")
|
||||
if value.kind == ArgumentKindObjectInitializationValues {
|
||||
output += "\n"
|
||||
output += value.ToString(indent + 1, true)
|
||||
} else {
|
||||
output += value.ToString(0, false) + "\n"
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (values *ArrayInitializationValues) ToString (
|
||||
indent int,
|
||||
) (
|
||||
output string,
|
||||
) {
|
||||
for _, value := range values.values {
|
||||
output += value.ToString(indent, true)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (phrase *Phrase) ToString (indent int, breakLine bool) (output string) {
|
||||
if breakLine {
|
||||
output += doIndent (
|
||||
indent,
|
||||
"[", phrase.command.ToString(0, false))
|
||||
output += "\n"
|
||||
for _, argument := range phrase.arguments {
|
||||
output += doIndent (
|
||||
indent,
|
||||
argument.ToString(indent + 1, true))
|
||||
}
|
||||
} else {
|
||||
output += "[" + phrase.command.ToString(0, false)
|
||||
for _, argument := range phrase.arguments {
|
||||
output += " " + argument.ToString(0, false)
|
||||
}
|
||||
}
|
||||
|
||||
output += "]"
|
||||
|
||||
if len(phrase.returnsTo) > 0 {
|
||||
output += " ->"
|
||||
for _, returnItem := range phrase.returnsTo {
|
||||
output += " " + returnItem.ToString(0, false)
|
||||
}
|
||||
}
|
||||
|
||||
if breakLine {
|
||||
output += "\n"
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (argument *Argument) ToString (indent int, breakLine bool) (output string) {
|
||||
if !breakLine { indent = 0 }
|
||||
if argument.kind == ArgumentKindNil {
|
||||
output += "NIL-ARGUMENT"
|
||||
if breakLine { output += "\n" }
|
||||
return
|
||||
}
|
||||
|
||||
switch argument.kind {
|
||||
case ArgumentKindPhrase:
|
||||
output += argument.value.(*Phrase).ToString (
|
||||
indent,
|
||||
breakLine)
|
||||
|
||||
case ArgumentKindObjectInitializationValues:
|
||||
// this should only appear in contexts where breakLine is true
|
||||
output += argument.value.(*ObjectInitializationValues).
|
||||
ToString(indent)
|
||||
|
||||
case ArgumentKindArrayInitializationValues:
|
||||
// this should only appear in contexts where breakLine is true
|
||||
output += argument.value.(*ArrayInitializationValues).
|
||||
ToString(indent)
|
||||
|
||||
case ArgumentKindIdentifier:
|
||||
output += doIndent (
|
||||
indent,
|
||||
argument.value.(*Identifier).ToString())
|
||||
if breakLine { output += "\n" }
|
||||
|
||||
case ArgumentKindDeclaration:
|
||||
output += doIndent (
|
||||
indent,
|
||||
argument.value.(*Declaration).ToString())
|
||||
if breakLine { output += "\n" }
|
||||
|
||||
case ArgumentKindInt, ArgumentKindUInt, ArgumentKindFloat:
|
||||
output += doIndent(indent, fmt.Sprint(argument.value))
|
||||
if breakLine { output += "\n" }
|
||||
|
||||
case ArgumentKindString:
|
||||
output += doIndent (
|
||||
indent,
|
||||
"\"" + argument.value.(string) + "\"")
|
||||
if breakLine { output += "\n" }
|
||||
|
||||
case ArgumentKindRune:
|
||||
output += doIndent (
|
||||
indent,
|
||||
"'" + string(argument.value.(rune)) + "'")
|
||||
if breakLine { output += "\n" }
|
||||
|
||||
case ArgumentKindOperator:
|
||||
// TODO
|
||||
// also when parsing this argument kind, don't do it in the
|
||||
// argument parsing function. do it specifically when parsing a
|
||||
// phrase command.
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (section *DataSection) ToString (indent int) (output string) {
|
||||
output += doIndent (
|
||||
indent,
|
||||
"data ",
|
||||
section.permission.ToString(), " ",
|
||||
section.name, ":",
|
||||
section.what.ToString())
|
||||
|
||||
isComplexInitialization :=
|
||||
section.value.kind == ArgumentKindObjectInitializationValues ||
|
||||
section.value.kind == ArgumentKindArrayInitializationValues
|
||||
|
||||
if section.value.value == nil {
|
||||
output += "\n"
|
||||
} else if isComplexInitialization {
|
||||
output += "\n"
|
||||
output += section.value.ToString(indent + 1, true)
|
||||
} else {
|
||||
output += " " + section.value.ToString(0, false)
|
||||
output += "\n"
|
||||
}
|
||||
return
|
||||
}
|
@ -1,3 +1,3 @@
|
||||
:arf
|
||||
--- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.[]{}
|
||||
--- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.,..[]{}
|
||||
+ - ++ -- * / @ ! % ~ < << > >> | || & &&
|
||||
|
Reference in New Issue