2021-08-23 16:08:23 -06:00
|
|
|
{
|
|
|
|
"cells": [
|
|
|
|
{
|
|
|
|
"cell_type": "code",
|
2021-08-24 01:29:35 -06:00
|
|
|
"execution_count": 217,
|
2021-08-23 16:08:23 -06:00
|
|
|
"source": [
|
|
|
|
"import json\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# create a new scene graph\r\n",
|
|
|
|
"def new_scene(name):\r\n",
|
|
|
|
" # create empty neutrino data\r\n",
|
|
|
|
" data = {\r\n",
|
2021-08-23 17:16:23 -06:00
|
|
|
" \"meta\": {\r\n",
|
|
|
|
" \"name\": (\"name\", name),\r\n",
|
|
|
|
" \"scale\": (\"float\", 1.0),\r\n",
|
|
|
|
" \"asset_path\": (\"path\", \"./\"),\r\n",
|
|
|
|
" },\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" \"graph\": {\r\n",
|
2021-08-23 17:16:23 -06:00
|
|
|
" \"scene\": {},\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" \"assets\": {}\r\n",
|
|
|
|
" },\r\n",
|
2021-08-23 17:16:23 -06:00
|
|
|
" \"internal\": {\r\n",
|
2021-08-24 01:20:08 -06:00
|
|
|
" \"max_object_key\": {\"index\": 0},\r\n",
|
|
|
|
" \"max_cache_key\": {\"index\": 0}\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" }\r\n",
|
|
|
|
" }\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # return that empty data\r\n",
|
|
|
|
" return data\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# write the data to a JSON file\r\n",
|
2021-08-24 01:29:35 -06:00
|
|
|
"def save_scene(data, readable):\r\n",
|
2021-08-23 17:16:23 -06:00
|
|
|
" # create working copy of the scene data\r\n",
|
|
|
|
" clean_data = data.copy()\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
"\r\n",
|
2021-08-23 17:16:23 -06:00
|
|
|
" # get rid of internal data (not to be exported)\r\n",
|
|
|
|
" del clean_data[\"internal\"]\r\n",
|
|
|
|
" \r\n",
|
|
|
|
" filename = data[\"meta\"][\"name\"][1].replace(\" \", \"\") + \".json\"\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" with open(filename, \"w\") as outfile:\r\n",
|
2021-08-24 01:29:35 -06:00
|
|
|
" if readable:\r\n",
|
|
|
|
" json.dump(clean_data, outfile, indent = 4)\r\n",
|
|
|
|
" else:\r\n",
|
|
|
|
" json.dump(clean_data, outfile)\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
"\r\n",
|
2021-08-23 17:42:26 -06:00
|
|
|
"# get a new indexed object key and track it\r\n",
|
2021-08-24 01:20:08 -06:00
|
|
|
"def new_key(index):\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" # get the indexed key\r\n",
|
2021-08-24 01:20:08 -06:00
|
|
|
" key = hex(index[\"index\"] + 1)\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
"\r\n",
|
|
|
|
" # index the max key\r\n",
|
2021-08-24 01:20:08 -06:00
|
|
|
" index[\"index\"] += 1\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
"\r\n",
|
|
|
|
" return key\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# add an asset to the graph\r\n",
|
2021-08-23 17:19:59 -06:00
|
|
|
"def add_asset(data, name, path):\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" asset_data = {\r\n",
|
2021-08-23 17:19:59 -06:00
|
|
|
" \"name\": (\"name\", name),\r\n",
|
|
|
|
" \"file\": (\"path\", path)\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" }\r\n",
|
|
|
|
" \r\n",
|
|
|
|
" # add the asset to the graph\r\n",
|
2021-08-24 01:20:08 -06:00
|
|
|
" data[\"graph\"][\"assets\"][new_key(data[\"internal\"][\"max_object_key\"])] = (\"asset\", asset_data)\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
"\r\n",
|
|
|
|
"# add an object to the scene\r\n",
|
|
|
|
"def spawn_object(data, name, asset):\r\n",
|
|
|
|
" object_data = {\r\n",
|
2021-08-23 17:16:23 -06:00
|
|
|
" \"name\": (\"name\", name),\r\n",
|
2021-08-23 17:30:02 -06:00
|
|
|
" \"asset\": \"\",\r\n",
|
2021-08-24 01:20:08 -06:00
|
|
|
" \"trans\": (\"trans\", {\r\n",
|
|
|
|
" \"position\": (\"vec3\", [0.0, 0.0, 0.0]),\r\n",
|
|
|
|
" \"rotation\": (\"vec3\", [0.0, 0.0, 0.0]),\r\n",
|
|
|
|
" \"scale\": (\"vec3\", [1.0, 1.0, 1.0])\r\n",
|
|
|
|
" })\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" }\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # get an asset key by the provided name\r\n",
|
|
|
|
" for key, value in data[\"graph\"][\"assets\"].items():\r\n",
|
2021-08-23 17:30:02 -06:00
|
|
|
" if value[1][\"name\"][1] == asset:\r\n",
|
|
|
|
" object_data[\"asset\"] = f\"*{key}\"\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
"\r\n",
|
|
|
|
" # add the object to the scene\r\n",
|
2021-08-24 01:20:08 -06:00
|
|
|
" data[\"graph\"][\"scene\"][new_key(data[\"internal\"][\"max_object_key\"])] = (\"object\", object_data)"
|
2021-08-23 16:08:23 -06:00
|
|
|
],
|
|
|
|
"outputs": [],
|
|
|
|
"metadata": {}
|
|
|
|
},
|
|
|
|
{
|
2021-08-24 01:29:35 -06:00
|
|
|
"cell_type": "markdown",
|
2021-08-23 16:08:23 -06:00
|
|
|
"source": [
|
2021-08-24 01:29:35 -06:00
|
|
|
"### Implement SPORC for storage/memory optimization\r\n",
|
|
|
|
"(Single-Pointer Objective Cache)"
|
2021-08-23 16:08:23 -06:00
|
|
|
],
|
|
|
|
"metadata": {}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"cell_type": "code",
|
2021-08-24 01:29:35 -06:00
|
|
|
"execution_count": 218,
|
2021-08-24 01:20:08 -06:00
|
|
|
"source": [
|
|
|
|
"# recursively cache a single typeval tuple object\r\n",
|
|
|
|
"def cache_typeval(cache, typeval):\r\n",
|
|
|
|
" # ignore if not typeval\r\n",
|
|
|
|
" if type(typeval) == tuple:\r\n",
|
|
|
|
" for key, value in typeval[1].items():\r\n",
|
|
|
|
" # refuse to cache pointers (that's just... that would just be a nightmare)\r\n",
|
|
|
|
" if type(value) == str:\r\n",
|
|
|
|
" is_pointer = (\"*\" in value)\r\n",
|
|
|
|
" else:\r\n",
|
|
|
|
" is_pointer = False\r\n",
|
|
|
|
" if not is_pointer:\r\n",
|
|
|
|
" # cache member objects if it's a dictionary object\r\n",
|
|
|
|
" if type(value[1]) == dict:\r\n",
|
|
|
|
" cache_typeval(cache, value)\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" value_hash = hash(str(value))\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # track in cache\r\n",
|
|
|
|
" if value_hash not in cache[\"objects\"]:\r\n",
|
|
|
|
" cache_pointer = new_key(cache[\"key_index\"])\r\n",
|
|
|
|
" cache[\"objects\"][value_hash] = {\"key\": cache_pointer, \"value\": value, \"count\": 1}\r\n",
|
|
|
|
" else:\r\n",
|
|
|
|
" cache_pointer = cache[\"objects\"][value_hash][\"key\"]\r\n",
|
|
|
|
" cache[\"objects\"][value_hash][\"count\"] += 1\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # replace real value with hash\r\n",
|
|
|
|
" typeval[1][key] = \"#\" + cache_pointer\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# if there's only one instance of a certain value, convert it back to the original value and destroy the cached version\r\n",
|
|
|
|
"def uncache_typeval(cache, typeval):\r\n",
|
|
|
|
" for key, value in typeval[1].items():\r\n",
|
|
|
|
" # refuse to cache pointers (that's just... that would just be a nightmare)\r\n",
|
|
|
|
" if type(value) == str:\r\n",
|
|
|
|
" is_pointer = (\"*\" in value)\r\n",
|
|
|
|
" else:\r\n",
|
|
|
|
" is_pointer = False\r\n",
|
|
|
|
" if not is_pointer:\r\n",
|
|
|
|
" # cache member objects if it's a dictionary object\r\n",
|
|
|
|
" if type(value[1]) == dict:\r\n",
|
|
|
|
" uncache_typeval(cache, value)\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" value_hash = hash(str(value))\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # check if it occurs only once\r\n",
|
|
|
|
" cache_key = value.replace(\"#\", \"\")\r\n",
|
|
|
|
" if cache[cache_key][\"count\"] <= 1:\r\n",
|
|
|
|
" # replace the cache pointer in the scene data with its original value\r\n",
|
|
|
|
" typeval[1][key] = cache[cache_key][\"value\"]\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # delete this object from the cache\r\n",
|
|
|
|
" del cache[cache_key]\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# cache the scene\r\n",
|
|
|
|
"def cache_scene(data):\r\n",
|
|
|
|
" # add the cache object to the scene data\r\n",
|
|
|
|
" data[\"cache\"] = {}\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" containers = [\r\n",
|
|
|
|
" data[\"graph\"][\"scene\"],\r\n",
|
|
|
|
" data[\"graph\"][\"assets\"]\r\n",
|
|
|
|
" ]\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # build a cache of value hashes and pointers\r\n",
|
|
|
|
" hash_cache = {\"key_index\": {\"index\": 0}, \"objects\": {}}\r\n",
|
|
|
|
" for objects in containers:\r\n",
|
|
|
|
" for key, value in objects.items():\r\n",
|
|
|
|
" cache_typeval(hash_cache, value)\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # create a cache hashed with pointer keys instead of value hashes\r\n",
|
|
|
|
" key_cache = {}\r\n",
|
|
|
|
" for key, value in hash_cache[\"objects\"].items():\r\n",
|
|
|
|
" key_cache[value[\"key\"]] = {\"value\": value[\"value\"], \"count\": value[\"count\"]}\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # prune the cache to only redirect repeat values\r\n",
|
|
|
|
" for objects in containers:\r\n",
|
|
|
|
" for key, value in objects.items():\r\n",
|
|
|
|
" uncache_typeval(key_cache, value)\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # create a serialized cache usable by neutrino\r\n",
|
|
|
|
" serial_cache = {}\r\n",
|
|
|
|
" for key, value in key_cache.items():\r\n",
|
|
|
|
" serial_cache[key] = value[\"value\"]\r\n",
|
|
|
|
"\r\n",
|
|
|
|
" # add that cache to the neutrino scene data\r\n",
|
2021-08-24 01:29:35 -06:00
|
|
|
" data[\"cache\"] = serial_cache"
|
2021-08-24 01:20:08 -06:00
|
|
|
],
|
|
|
|
"outputs": [],
|
|
|
|
"metadata": {}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"cell_type": "code",
|
2021-08-24 01:29:35 -06:00
|
|
|
"execution_count": 219,
|
2021-08-23 16:08:23 -06:00
|
|
|
"source": [
|
|
|
|
"# just returns a random string\r\n",
|
|
|
|
"import random\r\n",
|
|
|
|
"import string\r\n",
|
|
|
|
"def random_string(length):\r\n",
|
|
|
|
" return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(length))\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# create test scene\r\n",
|
|
|
|
"test_scene = new_scene(\"Neutrino Test Scene\")\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# populate assets\r\n",
|
|
|
|
"asset_names = []\r\n",
|
2021-08-24 01:29:35 -06:00
|
|
|
"for i in range(10):\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" name = random_string(8)\r\n",
|
2021-08-23 17:19:59 -06:00
|
|
|
" add_asset(test_scene, name, \"Assets/TestAsset.obj\")\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" asset_names.append(name)\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"# populate objects in scene\r\n",
|
2021-08-24 01:29:35 -06:00
|
|
|
"for i in range(50):\r\n",
|
2021-08-23 16:08:23 -06:00
|
|
|
" spawn_object(test_scene, random_string(8), random.choice(asset_names))\r\n",
|
|
|
|
"\r\n",
|
|
|
|
"cache_scene(test_scene)\r\n",
|
2021-08-24 01:29:35 -06:00
|
|
|
"save_scene(test_scene, False)"
|
2021-08-23 16:08:23 -06:00
|
|
|
],
|
|
|
|
"outputs": [],
|
|
|
|
"metadata": {}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"metadata": {
|
|
|
|
"orig_nbformat": 4,
|
|
|
|
"language_info": {
|
|
|
|
"name": "python",
|
|
|
|
"version": "3.7.8",
|
|
|
|
"mimetype": "text/x-python",
|
|
|
|
"codemirror_mode": {
|
|
|
|
"name": "ipython",
|
|
|
|
"version": 3
|
|
|
|
},
|
|
|
|
"pygments_lexer": "ipython3",
|
|
|
|
"nbconvert_exporter": "python",
|
|
|
|
"file_extension": ".py"
|
|
|
|
},
|
|
|
|
"kernelspec": {
|
|
|
|
"name": "python3",
|
|
|
|
"display_name": "Python 3.7.8 64-bit"
|
|
|
|
},
|
|
|
|
"interpreter": {
|
|
|
|
"hash": "57baa5815c940fdaff4d14510622de9616cae602444507ba5d0b6727c008cbd6"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"nbformat": 4,
|
|
|
|
"nbformat_minor": 2
|
|
|
|
}
|