Neutrino/NeutrinoJSONTest.ipynb

210 lines
9.5 KiB
Plaintext

{
"cells": [
{
"cell_type": "code",
"execution_count": 7,
"source": [
"import json\r\n",
"\r\n",
"# create a new scene graph\r\n",
"def new_scene(name):\r\n",
" # create empty neutrino data\r\n",
" data = {\r\n",
" \"meta\": {\r\n",
" \"name\": (\"name\", name),\r\n",
" \"scale\": (\"float\", 1.0),\r\n",
" \"asset_path\": (\"path\", \"./\"),\r\n",
" },\r\n",
" \"graph\": {\r\n",
" \"scene\": {},\r\n",
" \"assets\": {}\r\n",
" },\r\n",
" \"internal\": {\r\n",
" \"max_object_key\": 0,\r\n",
" \"max_cache_key\": 0\r\n",
" }\r\n",
" }\r\n",
"\r\n",
" # return that empty data\r\n",
" return data\r\n",
"\r\n",
"# write the data to a JSON file\r\n",
"def save_scene(data):\r\n",
" # create working copy of the scene data\r\n",
" clean_data = data.copy()\r\n",
"\r\n",
" # get rid of internal data (not to be exported)\r\n",
" del clean_data[\"internal\"]\r\n",
" \r\n",
" filename = data[\"meta\"][\"name\"][1].replace(\" \", \"\") + \".json\"\r\n",
" with open(filename, \"w\") as outfile:\r\n",
" json.dump(clean_data, outfile, indent = 4)\r\n",
"\r\n",
"# get a new indexed object key and track it\r\n",
"def object_key(data):\r\n",
" # get the indexed key\r\n",
" key = hex(data[\"internal\"][\"max_object_key\"] + 1)\r\n",
"\r\n",
" # index the max key\r\n",
" data[\"internal\"][\"max_object_key\"] += 1\r\n",
"\r\n",
" return key\r\n",
"\r\n",
"# get a new indexed cache key and track it\r\n",
"def cache_key(data):\r\n",
" # get the indexed key\r\n",
" key = hex(data[\"internal\"][\"max_cache_key\"] + 1)\r\n",
"\r\n",
" # index the max key\r\n",
" data[\"internal\"][\"max_cache_key\"] += 1\r\n",
"\r\n",
" return key\r\n",
"\r\n",
"# add an asset to the graph\r\n",
"def add_asset(data, name, path):\r\n",
" asset_data = {\r\n",
" \"name\": (\"name\", name),\r\n",
" \"file\": (\"path\", path)\r\n",
" }\r\n",
" \r\n",
" # add the asset to the graph\r\n",
" data[\"graph\"][\"assets\"][object_key(data)] = (\"asset\", asset_data)\r\n",
"\r\n",
"# add an object to the scene\r\n",
"def spawn_object(data, name, asset):\r\n",
" object_data = {\r\n",
" \"name\": (\"name\", name),\r\n",
" \"asset\": \"\",\r\n",
" \"trans\": (\"trans\", [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]])\r\n",
" }\r\n",
"\r\n",
" # get an asset key by the provided name\r\n",
" for key, value in data[\"graph\"][\"assets\"].items():\r\n",
" if value[1][\"name\"][1] == asset:\r\n",
" object_data[\"asset\"] = f\"*{key}\"\r\n",
"\r\n",
" # add the object to the scene\r\n",
" data[\"graph\"][\"scene\"][object_key(data)] = (\"object\", object_data)"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 8,
"source": [
"# cache the scene\r\n",
"def cache_scene(data):\r\n",
" # add the cache object to the scene data\r\n",
" data[\"cache\"] = {}\r\n",
"\r\n",
" containers = [\r\n",
" data[\"graph\"][\"scene\"],\r\n",
" data[\"graph\"][\"assets\"]\r\n",
" ]\r\n",
"\r\n",
" for objects in containers:\r\n",
" # temp cache\r\n",
" hash_cache = {}\r\n",
"\r\n",
" # hash all values\r\n",
" for key, value in objects.items():\r\n",
" for key, value in value[1].items():\r\n",
" # ignore pointers (the only non-tuple object)\r\n",
" if type(value) == tuple:\r\n",
" # convert into string and hash that\r\n",
" value_hash = hash(str(value))\r\n",
"\r\n",
" # track in temp cache\r\n",
" if value_hash not in hash_cache:\r\n",
" hash_cache[value_hash] = {\"value\": value, \"count\": 1}\r\n",
" else:\r\n",
" hash_cache[value_hash][\"count\"] += 1\r\n",
"\r\n",
" # throw out all non-repeated values\r\n",
" bad_keys = []\r\n",
" for key, value in hash_cache.items():\r\n",
" if value[\"count\"] < 2:\r\n",
" bad_keys.append(key)\r\n",
" for key in bad_keys:\r\n",
" del hash_cache[key]\r\n",
"\r\n",
" # create hash objects for each repeated value\r\n",
" for key, value in hash_cache.items():\r\n",
" cache_pointer = cache_key(data)\r\n",
" data[\"cache\"][cache_pointer] = value[\"value\"]\r\n",
" hash_cache[key][\"pointer\"] = cache_pointer\r\n",
"\r\n",
" # replace all instances of cached values in the graph with corresponding cache pointers\r\n",
" for object_key, object_value in objects.items():\r\n",
" for value_key, value_value in object_value[1].items():\r\n",
" # ignore pointers (the only non-tuple object)\r\n",
" if type(value_value) == tuple:\r\n",
" # convert into string and hash that\r\n",
" value_hash = hash(str(value_value))\r\n",
"\r\n",
" # if this value is cached, replace it with its cache pointer\r\n",
" if value_hash in hash_cache:\r\n",
" objects[object_key][1][value_key] = \"#\" + hash_cache[value_hash][\"pointer\"]"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 9,
"source": [
"# just returns a random string\r\n",
"import random\r\n",
"import string\r\n",
"def random_string(length):\r\n",
" return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(length))\r\n",
"\r\n",
"# create test scene\r\n",
"test_scene = new_scene(\"Neutrino Test Scene\")\r\n",
"\r\n",
"# populate assets\r\n",
"asset_names = []\r\n",
"for i in range(3):\r\n",
" name = random_string(8)\r\n",
" add_asset(test_scene, name, \"Assets/TestAsset.obj\")\r\n",
" asset_names.append(name)\r\n",
"\r\n",
"# populate objects in scene\r\n",
"for i in range(5):\r\n",
" spawn_object(test_scene, random_string(8), random.choice(asset_names))\r\n",
"\r\n",
"# cache the scene\r\n",
"cache_scene(test_scene)\r\n",
"\r\n",
"save_scene(test_scene)"
],
"outputs": [],
"metadata": {}
}
],
"metadata": {
"orig_nbformat": 4,
"language_info": {
"name": "python",
"version": "3.7.8",
"mimetype": "text/x-python",
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"pygments_lexer": "ipython3",
"nbconvert_exporter": "python",
"file_extension": ".py"
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3.7.8 64-bit"
},
"interpreter": {
"hash": "57baa5815c940fdaff4d14510622de9616cae602444507ba5d0b6727c008cbd6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}