ton of JSON testing

This commit is contained in:
Skye Terran 2021-08-23 15:08:23 -07:00
parent 6be68f74c6
commit 59c3d7561d
5 changed files with 434 additions and 44 deletions

208
NeutrinoJSONTest.ipynb Normal file
View File

@ -0,0 +1,208 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 239,
"source": [
"import json\r\n",
"\r\n",
"# create a new scene graph\r\n",
"def new_scene(name):\r\n",
" # create empty neutrino data\r\n",
" data = {\r\n",
" \"graph\": {\r\n",
" \"scene\": {\r\n",
" \"meta\": {\r\n",
" \"name\": name\r\n",
" },\r\n",
" \"objects\": {}\r\n",
" },\r\n",
" \"assets\": {}\r\n",
" },\r\n",
" \"meta\": {\r\n",
" \"max_key\": 0\r\n",
" }\r\n",
" }\r\n",
"\r\n",
" # return that empty data\r\n",
" return data\r\n",
"\r\n",
"# write the data to a JSON file\r\n",
"def save_scene(data):\r\n",
" clean_data = {\r\n",
" \"graph\": data[\"graph\"]\r\n",
" }\r\n",
"\r\n",
" # include cache if relevant\r\n",
" if \"cache\" in data:\r\n",
" clean_data[\"cache\"] = data[\"cache\"]\r\n",
"\r\n",
" filename = data[\"graph\"][\"scene\"][\"meta\"][\"name\"].replace(\" \", \"\") + \".json\"\r\n",
" with open(filename, \"w\") as outfile:\r\n",
" json.dump(clean_data, outfile, indent = 4)\r\n",
"\r\n",
"# get a new indexed object key and increment the scene's max key\r\n",
"def add_key(data):\r\n",
" # get the indexed key\r\n",
" key = hex(data[\"meta\"][\"max_key\"] + 1)\r\n",
"\r\n",
" # index the max key\r\n",
" data[\"meta\"][\"max_key\"] += 1\r\n",
"\r\n",
" return key\r\n",
"\r\n",
"# add an asset to the graph\r\n",
"def add_asset(data, name):\r\n",
" asset_data = {\r\n",
" \"name\": {\"t\": \"name\", \"v\": name}\r\n",
" }\r\n",
" \r\n",
" # add the asset to the graph\r\n",
" data[\"graph\"][\"assets\"][add_key(data)] = asset_data\r\n",
"\r\n",
"# add an object to the scene\r\n",
"def spawn_object(data, name, asset):\r\n",
" object_data = {\r\n",
" \"name\": {\"t\": \"name\", \"v\": name},\r\n",
" \"asset\": {\"t\": \"asset\", \"v\": asset},\r\n",
" \"trans\": {\"t\": \"trans\", \"v\": [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]]}\r\n",
" }\r\n",
"\r\n",
" # get an asset key by the provided name\r\n",
" for key, value in data[\"graph\"][\"assets\"].items():\r\n",
" if value[\"name\"][\"v\"] == asset:\r\n",
" object_data[\"asset\"][\"v\"] = f\"*{key}\"\r\n",
"\r\n",
" # add the object to the scene\r\n",
" data[\"graph\"][\"scene\"][\"objects\"][add_key(data)] = object_data"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 240,
"source": [
"# cache the scene\r\n",
"def cache_scene(data):\r\n",
" # add the cache object to the scene data\r\n",
" data[\"cache\"] = {}\r\n",
"\r\n",
" containers = [\r\n",
" data[\"graph\"][\"scene\"][\"objects\"],\r\n",
" data[\"graph\"][\"assets\"]\r\n",
" ]\r\n",
"\r\n",
" for objects in containers:\r\n",
" # temp cache\r\n",
" hash_cache = {}\r\n",
"\r\n",
" # hash all values\r\n",
" for key, value in objects.items():\r\n",
" for key, value in value.items():\r\n",
" # ignore pointers\r\n",
" if type(value) == str:\r\n",
" is_pointer = value[0] == \"*\"\r\n",
" else:\r\n",
" is_pointer = False\r\n",
" if not is_pointer:\r\n",
" # convert into string and hash that\r\n",
" value_hash = hash(str(value))\r\n",
"\r\n",
" # track in temp cache\r\n",
" if value_hash not in hash_cache:\r\n",
" hash_cache[value_hash] = {\"value\": value, \"count\": 1}\r\n",
" else:\r\n",
" hash_cache[value_hash][\"count\"] += 1\r\n",
"\r\n",
" # throw out all non-repeated values\r\n",
" bad_keys = []\r\n",
" for key, value in hash_cache.items():\r\n",
" if value[\"count\"] < 2:\r\n",
" bad_keys.append(key)\r\n",
" for key in bad_keys:\r\n",
" del hash_cache[key]\r\n",
"\r\n",
" # create hash objects for each repeated value\r\n",
" for key, value in hash_cache.items():\r\n",
" cache_pointer = f\"#{add_key(data)}\"\r\n",
" data[\"cache\"][cache_pointer] = value[\"value\"]\r\n",
" hash_cache[key][\"pointer\"] = cache_pointer\r\n",
"\r\n",
" # replace all instances of cached values in the graph with corresponding cache pointers\r\n",
" for object_key, object_value in objects.items():\r\n",
" for value_key, value_value in object_value.items():\r\n",
" # ignore pointers\r\n",
" if type(value_value) == str:\r\n",
" is_pointer = value_value[0] == \"*\"\r\n",
" else:\r\n",
" is_pointer = False\r\n",
" if not is_pointer:\r\n",
" # convert into string and hash that\r\n",
" value_hash = hash(str(value_value))\r\n",
"\r\n",
" # if this value is cached, replace it with its cache pointer\r\n",
" if value_hash in hash_cache:\r\n",
" objects[object_key][value_key] = hash_cache[value_hash][\"pointer\"]"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 241,
"source": [
"# just returns a random string\r\n",
"import random\r\n",
"import string\r\n",
"def random_string(length):\r\n",
" return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(length))\r\n",
"\r\n",
"# create test scene\r\n",
"test_scene = new_scene(\"Neutrino Test Scene\")\r\n",
"\r\n",
"# populate assets\r\n",
"asset_names = []\r\n",
"for i in range(3):\r\n",
" name = random_string(8)\r\n",
" add_asset(test_scene, name)\r\n",
" asset_names.append(name)\r\n",
"\r\n",
"# populate objects in scene\r\n",
"for i in range(5):\r\n",
" spawn_object(test_scene, random_string(8), random.choice(asset_names))\r\n",
"\r\n",
"# cache the scene\r\n",
"cache_scene(test_scene)\r\n",
"\r\n",
"save_scene(test_scene)"
],
"outputs": [],
"metadata": {}
}
],
"metadata": {
"orig_nbformat": 4,
"language_info": {
"name": "python",
"version": "3.7.8",
"mimetype": "text/x-python",
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"pygments_lexer": "ipython3",
"nbconvert_exporter": "python",
"file_extension": ".py"
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3.7.8 64-bit"
},
"interpreter": {
"hash": "57baa5815c940fdaff4d14510622de9616cae602444507ba5d0b6727c008cbd6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -2,42 +2,32 @@
"cells": [ "cells": [
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 25, "execution_count": 86,
"source": [ "source": [
"file = open(\"test.neu\")\r\n", "file = open(\"testB.neu\")\r\n",
"dirty_blob = file.read()\r\n", "dirty_blob = file.read()\r\n",
"\r\n", "\r\n",
"# get rid of comments and leading/trailing whitespace\r\n", "# get rid of comments and leading/trailing whitespace\r\n",
"lines = dirty_blob.split(\"\\n\")\r\n", "lines = dirty_blob.split(\"\\n\")\r\n",
"for i, line in enumerate(lines):\r\n", "for i, line in enumerate(lines):\r\n",
" lines[i] = line.strip()\r\n",
" if line.strip()[:2] == \"//\":\r\n", " if line.strip()[:2] == \"//\":\r\n",
" del lines[i]\r\n", " del lines[i]\r\n",
"\r\n", "\r\n",
"tidy_blob = \" \".join(lines)\r\n", "for i, line in enumerate(lines):\r\n",
"lines" " lines[i] = line.strip()\r\n",
], "\r\n",
"outputs": [ "tidy_blob = \" \".join(lines)"
{
"output_type": "execute_result",
"data": {
"text/plain": [
"['meta =', 'nice', ',', 'graph =', ',', 'cache =', ',']"
]
},
"metadata": {},
"execution_count": 25
}
], ],
"outputs": [],
"metadata": {} "metadata": {}
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 26, "execution_count": 87,
"source": [ "source": [
"data = []\r\n", "data = []\r\n",
"\r\n", "\r\n",
"# get objects\r\n", "# get blocks\r\n",
"for line in lines:\r\n", "for line in lines:\r\n",
" object = {}\r\n", " object = {}\r\n",
" if \"=\" in line:\r\n", " if \"=\" in line:\r\n",
@ -47,19 +37,72 @@
" if len(keytype) > 1:\r\n", " if len(keytype) > 1:\r\n",
" type = keytype[1]\r\n", " type = keytype[1]\r\n",
" object[\"type\"] = type\r\n", " object[\"type\"] = type\r\n",
" data.append(object)\r\n", " data.append(object)"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 88,
"source": [
"words = tidy_blob.split(\" \")"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 89,
"source": [
"def extract_objects(keywords):\r\n",
" blocks = []\r\n",
" depth = -1\r\n",
" i = -1\r\n",
" last = \"\"\r\n",
" for word in keywords:\r\n",
" #print(f\"Word: {word} | Depth: {depth} | i: {i}\")\r\n",
" if word == \"{\":\r\n",
" depth += 1\r\n",
" if depth == 1:\r\n",
" #print(\"New block of depth 1\")\r\n",
" blocks.append({\"key\": last, \"object\": []})\r\n",
" i += 1\r\n",
" elif word == \"}\":\r\n",
" depth -= 1\r\n",
" else:\r\n",
" last = word\r\n",
" if depth >= 1:\r\n",
" blocks[i][\"object\"].append(word)\r\n",
" return blocks\r\n",
"\r\n", "\r\n",
"data" "data = extract_objects(words)\r\n",
"temp = []\r\n",
"for blob in data:\r\n",
" temp.append(extract_objects(blob[\"object\"]))\r\n",
"\r\n",
" \r\n",
"\r\n",
"temp"
], ],
"outputs": [ "outputs": [
{ {
"output_type": "stream", "output_type": "execute_result",
"name": "stdout", "data": {
"text": [ "text/plain": [
"{'key': 'meta'}\n", "[[{'key': 'aa', 'object': ['{', 'aaa']},\n",
"{'key': 'graph'}\n", " {'key': 'ab', 'object': ['{', 'aba']},\n",
"{'key': 'cache'}\n" " {'key': 'ac', 'object': ['{', 'abb']}],\n",
" [{'key': 'ba', 'object': ['{']},\n",
" {'key': 'bb', 'object': ['{']},\n",
" {'key': 'bc', 'object': ['{']}],\n",
" [{'key': 'ba', 'object': ['{']},\n",
" {'key': 'bb', 'object': ['{']},\n",
" {'key': 'bc', 'object': ['{']}]]"
] ]
},
"metadata": {},
"execution_count": 89
} }
], ],
"metadata": {} "metadata": {}

103
NeutrinoTestScene.json Normal file
View File

@ -0,0 +1,103 @@
{
"graph": {
"scene": {
"meta": {
"name": "Neutrino Test Scene"
},
"objects": {
"0x4": {
"name": {
"t": "name",
"v": "EY59QAA0"
},
"asset": "#0x9",
"trans": "#0xa"
},
"0x5": {
"name": {
"t": "name",
"v": "DK7AGBDR"
},
"asset": {
"t": "asset",
"v": "*0x1"
},
"trans": "#0xa"
},
"0x6": {
"name": {
"t": "name",
"v": "TJFARWI7"
},
"asset": {
"t": "asset",
"v": "*0x2"
},
"trans": "#0xa"
},
"0x7": {
"name": {
"t": "name",
"v": "NPW1NN32"
},
"asset": "#0x9",
"trans": "#0xa"
},
"0x8": {
"name": {
"t": "name",
"v": "ZUYHYVNZ"
},
"asset": "#0x9",
"trans": "#0xa"
}
}
},
"assets": {
"0x1": {
"name": {
"t": "name",
"v": "ZDENIALL"
}
},
"0x2": {
"name": {
"t": "name",
"v": "CLO2S21B"
}
},
"0x3": {
"name": {
"t": "name",
"v": "XEG2P4VV"
}
}
}
},
"cache": {
"#0x9": {
"t": "asset",
"v": "*0x3"
},
"#0xa": {
"t": "trans",
"v": [
[
0.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0
],
[
1.0,
1.0,
1.0
]
]
}
}
}

View File

@ -1,18 +1,19 @@
// this is a comment // this is a comment
meta = meta = {
scale:float = 1.0, scale:float = 1.0
, }
graph = graph = {
&0 = &0 = {
name:string = "SM_LargeWindow_A", name:string = "SM_LargeWindow_A"
mesh:mesh = *2, mesh:mesh = *2
transform:trans = #b, transform:trans = #b
, }
&1 = &1 = {
name:string = "SM_LargeWindow_A2", name:string = "SM_LargeWindow_A2"
mesh:mesh = *2, mesh:mesh = *2
transform:trans = #b, transform:trans = #b
, }
cache = }
cache = {
// //
, }

35
testB.neu Normal file
View File

@ -0,0 +1,35 @@
{
a {
aa {
aaa
}
ab {
aba
}
ac {
abb
}
}
b {
ba {
//
}
bb {
//
}
bc {
//
}
}
c {
ba {
//
}
bb {
//
}
bc {
//
}
}
}