Merge pull request 'remove-rune-literal' (#8) from remove-rune-literal into main
Reviewed-on: arf/arf#8
This commit is contained in:
commit
48b53e48f3
@ -16,7 +16,6 @@ type Argument interface {
|
|||||||
// UIntLiteral
|
// UIntLiteral
|
||||||
// FloatLiteral
|
// FloatLiteral
|
||||||
// StringLiteral
|
// StringLiteral
|
||||||
// RuneLiteral
|
|
||||||
|
|
||||||
ToString (indent int) (output string)
|
ToString (indent int) (output string)
|
||||||
canBePassedAs (what Type) (allowed bool)
|
canBePassedAs (what Type) (allowed bool)
|
||||||
@ -62,9 +61,6 @@ func (analyzer AnalysisOperation) analyzeArgument (
|
|||||||
|
|
||||||
case parser.ArgumentKindString:
|
case parser.ArgumentKindString:
|
||||||
outputArgument = StringLiteral(inputArgument.Value().(string))
|
outputArgument = StringLiteral(inputArgument.Value().(string))
|
||||||
|
|
||||||
case parser.ArgumentKindRune:
|
|
||||||
outputArgument = RuneLiteral(inputArgument.Value().(rune))
|
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ var PrimitiveFace = createPrimitive("Face", Type {})
|
|||||||
var PrimitiveFunc = createPrimitive("Func", Type {})
|
var PrimitiveFunc = createPrimitive("Func", Type {})
|
||||||
|
|
||||||
var BuiltInString = createPrimitive("String", Type {
|
var BuiltInString = createPrimitive("String", Type {
|
||||||
actual: PrimitiveU32,
|
actual: &PrimitiveU32,
|
||||||
kind: TypeKindVariableArray,
|
kind: TypeKindVariableArray,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -167,10 +167,8 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
|
|||||||
token.kind = TokenKindNewline
|
token.kind = TokenKindNewline
|
||||||
lexer.addToken(token)
|
lexer.addToken(token)
|
||||||
err = lexer.nextRune()
|
err = lexer.nextRune()
|
||||||
case '"':
|
|
||||||
err = lexer.tokenizeString(false)
|
|
||||||
case '\'':
|
case '\'':
|
||||||
err = lexer.tokenizeString(true)
|
err = lexer.tokenizeString()
|
||||||
case ':':
|
case ':':
|
||||||
token := lexer.newToken()
|
token := lexer.newToken()
|
||||||
token.kind = TokenKindColon
|
token.kind = TokenKindColon
|
||||||
|
@ -73,7 +73,7 @@ func compareErr (
|
|||||||
correctWidth int,
|
correctWidth int,
|
||||||
test *testing.T,
|
test *testing.T,
|
||||||
) {
|
) {
|
||||||
test.Log("testing errors in", filePath)
|
test.Log("testing error in", filePath)
|
||||||
file, err := file.Open(filePath)
|
file, err := file.Open(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
test.Log(err)
|
test.Log(err)
|
||||||
@ -81,12 +81,28 @@ func compareErr (
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = Tokenize(file)
|
tokens, err := Tokenize(file)
|
||||||
check := err.(infoerr.Error)
|
check, isCorrectType := err.(infoerr.Error)
|
||||||
|
|
||||||
|
for index, token := range tokens {
|
||||||
|
test.Log(index, "\tgot token:", token.Describe())
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
test.Log("no error was recieved, test failed.")
|
||||||
|
test.Fail()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
test.Log("error that was recieved:")
|
test.Log("error that was recieved:")
|
||||||
test.Log(check)
|
test.Log(check)
|
||||||
|
|
||||||
|
if !isCorrectType {
|
||||||
|
test.Log("error is not infoerr.Error, something has gone wrong.")
|
||||||
|
test.Fail()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if check.Kind() != correctKind {
|
if check.Kind() != correctKind {
|
||||||
test.Log("mismatched error kind")
|
test.Log("mismatched error kind")
|
||||||
test.Log("- want:", correctKind)
|
test.Log("- want:", correctKind)
|
||||||
@ -132,7 +148,7 @@ func TestTokenizeAll (test *testing.T) {
|
|||||||
quickToken(9, TokenKindUInt, uint64(932748397)),
|
quickToken(9, TokenKindUInt, uint64(932748397)),
|
||||||
quickToken(12, TokenKindFloat, 239485.37520),
|
quickToken(12, TokenKindFloat, 239485.37520),
|
||||||
quickToken(16, TokenKindString, "hello world!\n"),
|
quickToken(16, TokenKindString, "hello world!\n"),
|
||||||
quickToken(3, TokenKindRune, 'E'),
|
quickToken(3, TokenKindString, "E"),
|
||||||
quickToken(10, TokenKindName, "helloWorld"),
|
quickToken(10, TokenKindName, "helloWorld"),
|
||||||
quickToken(1, TokenKindColon, nil),
|
quickToken(1, TokenKindColon, nil),
|
||||||
quickToken(1, TokenKindDot, nil),
|
quickToken(1, TokenKindDot, nil),
|
||||||
@ -215,18 +231,17 @@ func TestTokenizeNumbers (test *testing.T) {
|
|||||||
|
|
||||||
func TestTokenizeText (test *testing.T) {
|
func TestTokenizeText (test *testing.T) {
|
||||||
checkTokenSlice("../tests/lexer/text.arf", test,
|
checkTokenSlice("../tests/lexer/text.arf", test,
|
||||||
quickToken(34, TokenKindString, "hello world!\a\b\f\n\r\t\v'\"\\"),
|
quickToken(32, TokenKindString, "hello world!\a\b\f\n\r\t\v'\\"),
|
||||||
quickToken(1, TokenKindNewline, nil),
|
quickToken(1, TokenKindNewline, nil),
|
||||||
quickToken(4, TokenKindRune, '\a'),
|
quickToken(4, TokenKindString, "\a"),
|
||||||
quickToken(4, TokenKindRune, '\b'),
|
quickToken(4, TokenKindString, "\b"),
|
||||||
quickToken(4, TokenKindRune, '\f'),
|
quickToken(4, TokenKindString, "\f"),
|
||||||
quickToken(4, TokenKindRune, '\n'),
|
quickToken(4, TokenKindString, "\n"),
|
||||||
quickToken(4, TokenKindRune, '\r'),
|
quickToken(4, TokenKindString, "\r"),
|
||||||
quickToken(4, TokenKindRune, '\t'),
|
quickToken(4, TokenKindString, "\t"),
|
||||||
quickToken(4, TokenKindRune, '\v'),
|
quickToken(4, TokenKindString, "\v"),
|
||||||
quickToken(4, TokenKindRune, '\''),
|
quickToken(4, TokenKindString, "'"),
|
||||||
quickToken(4, TokenKindRune, '"' ),
|
quickToken(4, TokenKindString, "\\"),
|
||||||
quickToken(4, TokenKindRune, '\\'),
|
|
||||||
quickToken(1, TokenKindNewline, nil),
|
quickToken(1, TokenKindNewline, nil),
|
||||||
quickToken(35, TokenKindString, "hello world \x40\u0040\U00000040!"),
|
quickToken(35, TokenKindString, "hello world \x40\u0040\U00000040!"),
|
||||||
quickToken(1, TokenKindNewline, nil),
|
quickToken(1, TokenKindNewline, nil),
|
||||||
@ -251,21 +266,16 @@ func TestTokenizeIndent (test *testing.T) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTokenizeErr (test *testing.T) {
|
func TestTokenizeErrUnexpectedSymbol (test *testing.T) {
|
||||||
compareErr (
|
compareErr (
|
||||||
"../tests/lexer/error/unexpectedSymbol.arf",
|
"../tests/lexer/error/unexpectedSymbol.arf",
|
||||||
infoerr.ErrorKindError,
|
infoerr.ErrorKindError,
|
||||||
"unexpected symbol character ;",
|
"unexpected symbol character ;",
|
||||||
1, 5, 1,
|
1, 5, 1,
|
||||||
test)
|
test)
|
||||||
|
}
|
||||||
compareErr (
|
|
||||||
"../tests/lexer/error/excessDataRune.arf",
|
func TestTokenizeErrUnknownEscape (test *testing.T) {
|
||||||
infoerr.ErrorKindError,
|
|
||||||
"excess data in rune literal",
|
|
||||||
1, 1, 7,
|
|
||||||
test)
|
|
||||||
|
|
||||||
compareErr (
|
compareErr (
|
||||||
"../tests/lexer/error/unknownEscape.arf",
|
"../tests/lexer/error/unknownEscape.arf",
|
||||||
infoerr.ErrorKindError,
|
infoerr.ErrorKindError,
|
||||||
|
@ -4,7 +4,7 @@ import "strconv"
|
|||||||
import "git.tebibyte.media/arf/arf/infoerr"
|
import "git.tebibyte.media/arf/arf/infoerr"
|
||||||
|
|
||||||
// tokenizeString tokenizes a string or rune literal.
|
// tokenizeString tokenizes a string or rune literal.
|
||||||
func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
|
func (lexer *LexingOperation) tokenizeString () (err error) {
|
||||||
err = lexer.nextRune()
|
err = lexer.nextRune()
|
||||||
if err != nil { return }
|
if err != nil { return }
|
||||||
|
|
||||||
@ -12,7 +12,6 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
|
|||||||
got := ""
|
got := ""
|
||||||
tokenWidth := 2
|
tokenWidth := 2
|
||||||
|
|
||||||
beginning := lexer.file.Location(1)
|
|
||||||
for {
|
for {
|
||||||
if lexer.char == '\\' {
|
if lexer.char == '\\' {
|
||||||
err = lexer.nextRune()
|
err = lexer.nextRune()
|
||||||
@ -34,32 +33,14 @@ func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {
|
|||||||
if err != nil { return }
|
if err != nil { return }
|
||||||
}
|
}
|
||||||
|
|
||||||
if isRuneLiteral {
|
if lexer.char == '\'' { break }
|
||||||
if lexer.char == '\'' { break }
|
|
||||||
} else {
|
|
||||||
if lexer.char == '"' { break }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = lexer.nextRune()
|
err = lexer.nextRune()
|
||||||
if err != nil { return }
|
if err != nil { return }
|
||||||
|
|
||||||
beginning.SetWidth(len(got))
|
token.kind = TokenKindString
|
||||||
if isRuneLiteral {
|
token.value = got
|
||||||
if len(got) > 1 {
|
|
||||||
err = infoerr.NewError (
|
|
||||||
beginning,
|
|
||||||
"excess data in rune literal",
|
|
||||||
infoerr.ErrorKindError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
token.kind = TokenKindRune
|
|
||||||
token.value = rune([]rune(got)[0])
|
|
||||||
} else {
|
|
||||||
token.kind = TokenKindString
|
|
||||||
token.value = got
|
|
||||||
}
|
|
||||||
|
|
||||||
token.location.SetWidth(tokenWidth)
|
token.location.SetWidth(tokenWidth)
|
||||||
lexer.addToken(token)
|
lexer.addToken(token)
|
||||||
@ -77,7 +58,6 @@ var escapeSequenceMap = map[rune] rune {
|
|||||||
't': '\x09',
|
't': '\x09',
|
||||||
'v': '\x0b',
|
'v': '\x0b',
|
||||||
'\'': '\'',
|
'\'': '\'',
|
||||||
'"': '"',
|
|
||||||
'\\': '\\',
|
'\\': '\\',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,7 +19,6 @@ const (
|
|||||||
TokenKindUInt
|
TokenKindUInt
|
||||||
TokenKindFloat
|
TokenKindFloat
|
||||||
TokenKindString
|
TokenKindString
|
||||||
TokenKindRune
|
|
||||||
|
|
||||||
TokenKindName
|
TokenKindName
|
||||||
|
|
||||||
@ -156,8 +155,6 @@ func (tokenKind TokenKind) Describe () (description string) {
|
|||||||
description = "Float"
|
description = "Float"
|
||||||
case TokenKindString:
|
case TokenKindString:
|
||||||
description = "String"
|
description = "String"
|
||||||
case TokenKindRune:
|
|
||||||
description = "Rune"
|
|
||||||
case TokenKindName:
|
case TokenKindName:
|
||||||
description = "Name"
|
description = "Name"
|
||||||
case TokenKindColon:
|
case TokenKindColon:
|
||||||
|
@ -12,7 +12,6 @@ var validArgumentStartTokens = []lexer.TokenKind {
|
|||||||
lexer.TokenKindUInt,
|
lexer.TokenKindUInt,
|
||||||
lexer.TokenKindFloat,
|
lexer.TokenKindFloat,
|
||||||
lexer.TokenKindString,
|
lexer.TokenKindString,
|
||||||
lexer.TokenKindRune,
|
|
||||||
|
|
||||||
lexer.TokenKindLBracket,
|
lexer.TokenKindLBracket,
|
||||||
lexer.TokenKindLParen,
|
lexer.TokenKindLParen,
|
||||||
@ -78,11 +77,6 @@ func (parser *ParsingOperation) parseArgument () (argument Argument, err error)
|
|||||||
argument.value = parser.token.Value().(string)
|
argument.value = parser.token.Value().(string)
|
||||||
parser.nextToken()
|
parser.nextToken()
|
||||||
|
|
||||||
case lexer.TokenKindRune:
|
|
||||||
argument.kind = ArgumentKindRune
|
|
||||||
argument.value = parser.token.Value().(rune)
|
|
||||||
parser.nextToken()
|
|
||||||
|
|
||||||
case lexer.TokenKindLBracket:
|
case lexer.TokenKindLBracket:
|
||||||
argument.kind = ArgumentKindPhrase
|
argument.kind = ArgumentKindPhrase
|
||||||
argument.value, err = parser.parseArgumentLevelPhrase()
|
argument.value, err = parser.parseArgumentLevelPhrase()
|
||||||
|
@ -25,7 +25,7 @@ func ro cBasicPhrases
|
|||||||
[fn [gn 329 983 57] 123]
|
[fn [gn 329 983 57] 123]
|
||||||
func ro dArgumentTypes
|
func ro dArgumentTypes
|
||||||
---
|
---
|
||||||
[bird tree butterfly.wing "hello world" grass:Int:8:mut]
|
[bird tree butterfly.wing 'hello world' grass:Int:8:mut]
|
||||||
func ro eMath
|
func ro eMath
|
||||||
> x:Int
|
> x:Int
|
||||||
> y:Int
|
> y:Int
|
||||||
|
@ -8,11 +8,11 @@ func TestMeta (test *testing.T) {
|
|||||||
cwd, _ := os.Getwd()
|
cwd, _ := os.Getwd()
|
||||||
checkTree ("../tests/parser/meta", false,
|
checkTree ("../tests/parser/meta", false,
|
||||||
`:arf
|
`:arf
|
||||||
author "Sasha Koshka"
|
author 'Sasha Koshka'
|
||||||
license "GPLv3"
|
license 'GPLv3'
|
||||||
require "` + filepath.Join(cwd, "./some/local/module") + `"
|
require '` + filepath.Join(cwd, "./some/local/module") + `'
|
||||||
require "/usr/local/include/arf/someLibraryInstalledInStandardLocation"
|
require '/usr/local/include/arf/someLibraryInstalledInStandardLocation'
|
||||||
require "/some/absolute/path/to/someModule"
|
require '/some/absolute/path/to/someModule'
|
||||||
---
|
---
|
||||||
`, test)
|
`, test)
|
||||||
}
|
}
|
||||||
|
@ -34,16 +34,16 @@ func (tree SyntaxTree) ToString (indent int) (output string) {
|
|||||||
output += doIndent(indent, ":arf\n")
|
output += doIndent(indent, ":arf\n")
|
||||||
|
|
||||||
if tree.author != "" {
|
if tree.author != "" {
|
||||||
output += doIndent(indent, "author \"", tree.author, "\"\n")
|
output += doIndent(indent, "author '", tree.author, "'\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
if tree.license != "" {
|
if tree.license != "" {
|
||||||
output += doIndent(indent, "license \"", tree.license, "\"\n")
|
output += doIndent(indent, "license '", tree.license, "'\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, name := range sortMapKeysAlphabetically(tree.requires) {
|
for _, name := range sortMapKeysAlphabetically(tree.requires) {
|
||||||
require := tree.requires[name]
|
require := tree.requires[name]
|
||||||
output += doIndent(indent, "require \"", require, "\"\n")
|
output += doIndent(indent, "require '", require, "'\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
output += doIndent(indent, "---\n")
|
output += doIndent(indent, "---\n")
|
||||||
@ -153,13 +153,7 @@ func (argument Argument) ToString (indent int, breakLine bool) (output string) {
|
|||||||
case ArgumentKindString:
|
case ArgumentKindString:
|
||||||
output += doIndent (
|
output += doIndent (
|
||||||
indent,
|
indent,
|
||||||
"\"" + argument.value.(string) + "\"")
|
"'" + argument.value.(string) + "'")
|
||||||
if breakLine { output += "\n" }
|
|
||||||
|
|
||||||
case ArgumentKindRune:
|
|
||||||
output += doIndent (
|
|
||||||
indent,
|
|
||||||
"'" + string(argument.value.(rune)) + "'")
|
|
||||||
if breakLine { output += "\n" }
|
if breakLine { output += "\n" }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,11 +123,8 @@ const (
|
|||||||
// 0.44
|
// 0.44
|
||||||
ArgumentKindFloat
|
ArgumentKindFloat
|
||||||
|
|
||||||
// "hello world"
|
// 'hello world'
|
||||||
ArgumentKindString
|
ArgumentKindString
|
||||||
|
|
||||||
// 'S'
|
|
||||||
ArgumentKindRune
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Argument represents a value that can be placed anywhere a value goes. This
|
// Argument represents a value that can be placed anywhere a value goes. This
|
||||||
|
@ -13,7 +13,7 @@ type ro bBitFields:Obj
|
|||||||
ro that:Int & 1
|
ro that:Int & 1
|
||||||
ro this:Int 298 & 24
|
ro this:Int 298 & 24
|
||||||
type ro cInit:Obj
|
type ro cInit:Obj
|
||||||
ro that:String "hello world"
|
ro that:String 'hello world'
|
||||||
ro this:Int 23
|
ro this:Int 23
|
||||||
type ro dInitInherit:aBasic
|
type ro dInitInherit:aBasic
|
||||||
ro that 9384
|
ro that 9384
|
||||||
@ -24,7 +24,7 @@ type ro eInitAndDefine:aBasic
|
|||||||
ro born:Int 4
|
ro born:Int 4
|
||||||
ro in:Int
|
ro in:Int
|
||||||
ro the:Int:3 (9348 92384 92834)
|
ro the:Int:3 (9348 92384 92834)
|
||||||
ro walls:String "live in the walls, die in the walls."
|
ro walls:String 'live in the walls, die in the walls.'
|
||||||
type ro fBasic:Int
|
type ro fBasic:Int
|
||||||
type ro gBasicInit:Int
|
type ro gBasicInit:Int
|
||||||
6
|
6
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
:arf
|
:arf
|
||||||
--- rw -> -349820394 932748397 239485.37520 "hello world!\n" 'E' helloWorld:.,..()[]{}
|
--- rw -> -349820394 932748397 239485.37520 'hello world!\n' 'E' helloWorld:.,..()[]{}
|
||||||
+ - ++ -- * / @ ! % %= ~ ~= = == != < <= << <<= > >= >> >>= | |= || & &= && ^ ^=
|
+ - ++ -- * / @ ! % %= ~ ~= = == != < <= << <<= > >= >> >>= | |= || & &= && ^ ^=
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
:arf
|
|
||||||
'aaaaaaa'
|
|
@ -1,2 +1,2 @@
|
|||||||
:arf
|
:arf
|
||||||
"\g"
|
'\g'
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
:arf
|
:arf
|
||||||
"hello world!\a\b\f\n\r\t\v\'\"\\"
|
'hello world!\a\b\f\n\r\t\v\'\\'
|
||||||
'\a' '\b' '\f' '\n' '\r' '\t' '\v' '\'' '\"' '\\'
|
'\a' '\b' '\f' '\n' '\r' '\t' '\v' '\'' '\\'
|
||||||
"hello world \x40\u0040\U00000040!"
|
'hello world \x40\u0040\U00000040!'
|
||||||
|
@ -5,11 +5,11 @@ require "io"
|
|||||||
---
|
---
|
||||||
|
|
||||||
# this is a global variable
|
# this is a global variable
|
||||||
data pv helloText:String "Hello, world!"
|
data pv helloText:String 'Hello, world!'
|
||||||
|
|
||||||
# this is a struct definition
|
# this is a struct definition
|
||||||
objt ro Greeter:Obj
|
objt ro Greeter:Obj
|
||||||
rw text:String "Hi."
|
rw text:String 'Hi.'
|
||||||
|
|
||||||
# this is a function
|
# this is a function
|
||||||
func ro main
|
func ro main
|
||||||
|
@ -27,7 +27,7 @@ func ro cBasicPhrases
|
|||||||
|
|
||||||
func ro dArgumentTypes
|
func ro dArgumentTypes
|
||||||
---
|
---
|
||||||
[bird tree butterfly.wing "hello world"
|
[bird tree butterfly.wing 'hello world'
|
||||||
grass:Int:mut:8]
|
grass:Int:mut:8]
|
||||||
|
|
||||||
func ro eMath
|
func ro eMath
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
:arf
|
:arf
|
||||||
author "Sasha Koshka"
|
author 'Sasha Koshka'
|
||||||
license "GPLv3"
|
license 'GPLv3'
|
||||||
require "./some/local/module"
|
require './some/local/module'
|
||||||
require "/some/absolute/path/to/someModule"
|
require '/some/absolute/path/to/someModule'
|
||||||
require "someLibraryInstalledInStandardLocation"
|
require 'someLibraryInstalledInStandardLocation'
|
||||||
---
|
---
|
||||||
|
@ -7,7 +7,7 @@ data ro aExternalData:Int
|
|||||||
data ro bSingleValue:Int 342
|
data ro bSingleValue:Int 342
|
||||||
|
|
||||||
data ro cNestedObject:Obj (
|
data ro cNestedObject:Obj (
|
||||||
(324 "hello world")
|
(324 'hello world')
|
||||||
(123.8439 9328.21348239)
|
(123.8439 9328.21348239)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ type ro bBitFields:Obj
|
|||||||
ro this:Int 298 & 24
|
ro this:Int 298 & 24
|
||||||
|
|
||||||
type ro cInit:Obj
|
type ro cInit:Obj
|
||||||
ro that:String "hello world"
|
ro that:String 'hello world'
|
||||||
ro this:Int 23
|
ro this:Int 23
|
||||||
|
|
||||||
# the semantic analyzer should let these sections restrict the permissions of
|
# the semantic analyzer should let these sections restrict the permissions of
|
||||||
@ -24,7 +24,7 @@ type ro eInitAndDefine:aBasic
|
|||||||
ro born:Int 4
|
ro born:Int 4
|
||||||
ro in:Int
|
ro in:Int
|
||||||
ro the:Int:3 (9348 92384 92834)
|
ro the:Int:3 (9348 92384 92834)
|
||||||
ro walls:String "live in the walls, die in the walls."
|
ro walls:String 'live in the walls, die in the walls.'
|
||||||
|
|
||||||
type ro fBasic:Int
|
type ro fBasic:Int
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user