{ "cells": [ { "cell_type": "code", "execution_count": 109, "source": [ "# load up the test file\r\n", "data = open(\"Test.neu\").read()\r\n", "\r\n", "# deal with newlines\r\n", "data = data.split(\"\\n\")\r\n", "for i, line in enumerate(data):\r\n", " data[i] = line.strip()\r\n", "data = \"\".join(data)\r\n", "\r\n", "data" ], "outputs": [ { "output_type": "execute_result", "data": { "text/plain": [ "'@{string,float,int}#{2:\"100\"}*{&0,0:\"Hello, world! :@#*\",1:\"512.0\",&0}'" ] }, "metadata": {}, "execution_count": 109 } ], "metadata": {} }, { "cell_type": "code", "execution_count": 110, "source": [ "cache = {\r\n", " \"names\": [],\r\n", " \"cache\": [],\r\n", " \"objects\": []\r\n", "}\r\n", "\r\n", "# parse a string entry according to its type and return a dict\r\n", "def parse_entry(entry_type, entry_value):\r\n", " # convert values to proper types as needed\r\n", " if entry_type == \"float\":\r\n", " converted_value = float(entry_value)\r\n", " elif entry_type == \"int\":\r\n", " converted_value = int(entry_value)\r\n", " else:\r\n", " converted_value = entry_value\r\n", "\r\n", " # return a dictionary of the parsed entry\r\n", " return {\"type\": entry_type, \"value\": converted_value}\r\n", "\r\n", "# iterate through each character in the raw data and create a cache of parsed strings\r\n", "stage = \"\"\r\n", "entry_type = \"\"\r\n", "entry_value = \"\"\r\n", "consume = False\r\n", "is_literal = False\r\n", "is_reference = False\r\n", "for char in data:\r\n", " if char == '\"':\r\n", " # enter/exit literal consumption (handle special characters in values)\r\n", " is_literal = not is_literal\r\n", " continue\r\n", " # ignore special characters if they're within a literal value\r\n", " if not is_literal:\r\n", " if char == \"@\":\r\n", " stage = \"names\"\r\n", " continue\r\n", " elif char == \"*\":\r\n", " stage = \"objects\"\r\n", " continue\r\n", " elif char == \"#\":\r\n", " stage = \"cache\"\r\n", " continue\r\n", " elif char == \"{\":\r\n", " # start consuming at the start of each stage\r\n", " consume = True\r\n", " continue\r\n", " elif char == \"}\":\r\n", " # commit entry\r\n", " if stage != \"names\":\r\n", " cache[stage].append(parse_entry(entry_type, entry_value))\r\n", " else:\r\n", " cache[stage].append(entry_value)\r\n", " entry_value = \"\"\r\n", "\r\n", " is_reference = False\r\n", "\r\n", " # stop consuming at the end of each stage\r\n", " consume = False\r\n", " elif char == \",\":\r\n", " # commit entry\r\n", " if stage != \"names\":\r\n", " cache[stage].append(parse_entry(entry_type, entry_value))\r\n", " else:\r\n", " cache[stage].append(entry_value)\r\n", " entry_value = \"\"\r\n", " \r\n", " is_reference = False\r\n", "\r\n", " continue\r\n", " elif char == \":\":\r\n", " # handle names/types\r\n", " entry_type = cache[\"names\"][int(entry_value, base = 16)]\r\n", " entry_value = \"\"\r\n", " continue\r\n", "\r\n", " if consume:\r\n", " # handle cache references\r\n", " if entry_value == \"&\":\r\n", " cached_object = cache[\"cache\"][int(char, base = 16)]\r\n", " entry_type = cached_object[\"type\"]\r\n", " entry_value = cached_object[\"value\"]\r\n", " continue\r\n", " else:\r\n", " entry_value += char" ], "outputs": [], "metadata": {} }, { "cell_type": "code", "execution_count": 111, "source": [ "graph = cache[\"objects\"]" ], "outputs": [], "metadata": {} }, { "cell_type": "code", "execution_count": 112, "source": [ "# DEBUG\r\n", "import json\r\n", "with open(\"NeutrinoTest_Cache.json\", \"w\") as outfile:\r\n", " json.dump(graph, outfile, indent = 4)" ], "outputs": [], "metadata": {} } ], "metadata": { "orig_nbformat": 4, "language_info": { "name": "python", "version": "3.7.8", "mimetype": "text/x-python", "codemirror_mode": { "name": "ipython", "version": 3 }, "pygments_lexer": "ipython3", "nbconvert_exporter": "python", "file_extension": ".py" }, "kernelspec": { "name": "python3", "display_name": "Python 3.7.8 64-bit" }, "interpreter": { "hash": "57baa5815c940fdaff4d14510622de9616cae602444507ba5d0b6727c008cbd6" } }, "nbformat": 4, "nbformat_minor": 2 }