|
@@ -23,16 +23,26 @@ def md5digest(data):
|
|
|
hasher.update(data)
|
|
|
return hasher.hexdigest()
|
|
|
|
|
|
+fetch_caches = {}
|
|
|
+
|
|
|
def fetch_cached(data, mode=None):
|
|
|
+ global fetch_caches
|
|
|
+
|
|
|
try:
|
|
|
md5 = md5digest(data)
|
|
|
+
|
|
|
+ if md5 in fetch_caches:
|
|
|
+ return fetch_caches[md5]
|
|
|
+
|
|
|
cache_folder = os.path.abspath("%s/../caches/" % (os.path.dirname(os.path.abspath(__file__))))
|
|
|
if mode is None:
|
|
|
picklefile = cache_folder + "/%s.pickle" % md5
|
|
|
else:
|
|
|
picklefile = cache_folder + "/%s_%s.pickle" % (mode, md5)
|
|
|
with open(picklefile, "rb") as f:
|
|
|
- return pickle.load(f)
|
|
|
+ d = pickle.load(f)
|
|
|
+ fetch_caches[md5] = d
|
|
|
+ return d
|
|
|
except:
|
|
|
return None
|
|
|
|
|
@@ -151,7 +161,10 @@ def do_compile(inputfile, grammarfile, visitors=[], include_paths = [], mode="")
|
|
|
else:
|
|
|
# The outer for finally finished, so there were no includes remaining, thus terminate the infinite while loop
|
|
|
break
|
|
|
- tree_data = pickle.dumps(result["tree"], pickle.HIGHEST_PROTOCOL)
|
|
|
+
|
|
|
+ pruned = result["tree"].prune()
|
|
|
+ import json
|
|
|
+ tree_data = json.dumps(pruned)
|
|
|
new_result = fetch_cached(tree_data, mode)
|
|
|
if new_result is None:
|
|
|
result["tree"].fix_tracability(inputfile)
|