caching update

This commit is contained in:
Skye Terran 2021-08-24 00:29:35 -07:00
parent 6249803b0f
commit 82c9a37c16
2 changed files with 16 additions and 212 deletions

View File

@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 157,
"execution_count": 217,
"source": [
"import json\r\n",
"\r\n",
@ -29,7 +29,7 @@
" return data\r\n",
"\r\n",
"# write the data to a JSON file\r\n",
"def save_scene(data):\r\n",
"def save_scene(data, readable):\r\n",
" # create working copy of the scene data\r\n",
" clean_data = data.copy()\r\n",
"\r\n",
@ -38,7 +38,10 @@
" \r\n",
" filename = data[\"meta\"][\"name\"][1].replace(\" \", \"\") + \".json\"\r\n",
" with open(filename, \"w\") as outfile:\r\n",
" json.dump(clean_data, outfile, indent = 4)\r\n",
" if readable:\r\n",
" json.dump(clean_data, outfile, indent = 4)\r\n",
" else:\r\n",
" json.dump(clean_data, outfile)\r\n",
"\r\n",
"# get a new indexed object key and track it\r\n",
"def new_key(index):\r\n",
@ -84,69 +87,16 @@
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 158,
"cell_type": "markdown",
"source": [
"# cache the scene\r\n",
"def cache_scene(data):\r\n",
" # add the cache object to the scene data\r\n",
" data[\"cache\"] = {}\r\n",
"\r\n",
" containers = [\r\n",
" data[\"graph\"][\"scene\"],\r\n",
" data[\"graph\"][\"assets\"]\r\n",
" ]\r\n",
"\r\n",
" for objects in containers:\r\n",
" # temp cache\r\n",
" hash_cache = {}\r\n",
"\r\n",
" # hash all values\r\n",
" for key, value in objects.items():\r\n",
" for key, value in value[1].items():\r\n",
" # ignore pointers (the only non-tuple object)\r\n",
" if type(value) == tuple:\r\n",
" # convert into string and hash that\r\n",
" value_hash = hash(str(value))\r\n",
"\r\n",
" # track in temp cache\r\n",
" if value_hash not in hash_cache:\r\n",
" hash_cache[value_hash] = {\"value\": value, \"count\": 1}\r\n",
" else:\r\n",
" hash_cache[value_hash][\"count\"] += 1\r\n",
"\r\n",
" # throw out all non-repeated values\r\n",
" bad_keys = []\r\n",
" for key, value in hash_cache.items():\r\n",
" if value[\"count\"] < 2:\r\n",
" bad_keys.append(key)\r\n",
" for key in bad_keys:\r\n",
" del hash_cache[key]\r\n",
"\r\n",
" # create hash objects for each repeated value\r\n",
" for key, value in hash_cache.items():\r\n",
" cache_pointer = cache_key(data)\r\n",
" data[\"cache\"][cache_pointer] = value[\"value\"]\r\n",
" hash_cache[key][\"pointer\"] = cache_pointer\r\n",
"\r\n",
" # replace all instances of cached values in the graph with corresponding cache pointers\r\n",
" for object_key, object_value in objects.items():\r\n",
" for value_key, value_value in object_value[1].items():\r\n",
" # ignore pointers (the only non-tuple object)\r\n",
" if type(value_value) == tuple:\r\n",
" # convert into string and hash that\r\n",
" value_hash = hash(str(value_value))\r\n",
"\r\n",
" # if this value is cached, replace it with its cache pointer\r\n",
" if value_hash in hash_cache:\r\n",
" objects[object_key][1][value_key] = \"#\" + hash_cache[value_hash][\"pointer\"]"
"### Implement SPORC for storage/memory optimization\r\n",
"(Single-Pointer Objective Cache)"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 159,
"execution_count": 218,
"source": [
"# recursively cache a single typeval tuple object\r\n",
"def cache_typeval(cache, typeval):\r\n",
@ -232,22 +182,14 @@
" serial_cache[key] = value[\"value\"]\r\n",
"\r\n",
" # add that cache to the neutrino scene data\r\n",
" data[\"cache\"] = serial_cache\r\n",
"\r\n",
" # DEBUG\r\n",
" with open(\"hash_cache.json\", \"w\") as outfile:\r\n",
" json.dump(hash_cache, outfile, indent = 4)\r\n",
" with open(\"key_cache.json\", \"w\") as outfile:\r\n",
" json.dump(key_cache, outfile, indent = 4)\r\n",
" with open(\"serial_cache.json\", \"w\") as outfile:\r\n",
" json.dump(serial_cache, outfile, indent = 4)"
" data[\"cache\"] = serial_cache"
],
"outputs": [],
"metadata": {}
},
{
"cell_type": "code",
"execution_count": 160,
"execution_count": 219,
"source": [
"# just returns a random string\r\n",
"import random\r\n",
@ -260,19 +202,17 @@
"\r\n",
"# populate assets\r\n",
"asset_names = []\r\n",
"for i in range(3):\r\n",
"for i in range(10):\r\n",
" name = random_string(8)\r\n",
" add_asset(test_scene, name, \"Assets/TestAsset.obj\")\r\n",
" asset_names.append(name)\r\n",
"\r\n",
"# populate objects in scene\r\n",
"for i in range(5):\r\n",
"for i in range(50):\r\n",
" spawn_object(test_scene, random_string(8), random.choice(asset_names))\r\n",
"\r\n",
"# cache the scene\r\n",
"cache_scene(test_scene)\r\n",
"\r\n",
"save_scene(test_scene)"
"save_scene(test_scene, False)"
],
"outputs": [],
"metadata": {}

File diff suppressed because one or more lines are too long