Pārlūkot izejas kodu

Cache the final result of do_compile as well, much faster bootstrap generation

Yentl Van Tendeloo 8 gadi atpakaļ
vecāks
revīzija
268252cef6
1 mainītis faili ar 60 papildinājumiem un 48 dzēšanām
  1. 60 48
      interface/HUTN/hutn_compiler/compiler.py

+ 60 - 48
interface/HUTN/hutn_compiler/compiler.py

@@ -23,20 +23,26 @@ def md5digest(filename):
         hasher.update(afile.read())
     return hasher.hexdigest()
 
-def fetch_cached(filename):
+def fetch_cached(filename, mode=None):
     try:
         md5 = md5digest(filename)
         cache_folder = os.path.abspath("%s/../caches/" % (os.path.dirname(os.path.abspath(__file__))))
-        picklefile = cache_folder + "/%s.pickle" % md5
+        if mode is None:
+            picklefile = cache_folder + "/%s.pickle" % md5
+        else:
+            picklefile = cache_folder + "/%s_%s.pickle" % (mode, md5)
         with open(picklefile, "rb") as f:
             return pickle.load(f)
     except:
         return None
 
-def make_cached(filename, data):
+def make_cached(filename, data, mode=None):
     md5 = md5digest(filename)
     cache_folder = os.path.abspath("%s/../caches/" % (os.path.dirname(os.path.abspath(__file__))))
-    picklefile = cache_folder + "/%s.pickle" % md5
+    if mode is None:
+        picklefile = cache_folder + "/%s.pickle" % md5
+    else:
+        picklefile = cache_folder + "/%s_%s.pickle" % (mode, md5)
     with open(picklefile, "wb") as f:
         pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
 
@@ -88,58 +94,64 @@ def find_file(filename, include_paths):
     else:
         raise Exception("Could not resolve file %s. Tried: %s" % (filename, attempts))
 
-def do_compile(inputfile, grammarfile, visitors=[], include_paths = []):
+def do_compile(inputfile, grammarfile, visitors=[], include_paths = [], mode=""):
     import os.path
     global working_file
     working_file = os.path.abspath(inputfile)
 
-    result = do_parse(inputfile, grammarfile)
-    error = result["status"] != Parser.Constants.Success
-    if error:
-        msg = "%s:%s:%s: %s" % (inputfile, result["line"], result["column"], result["text"])
-        raise Exception(msg)
-    else:
-        for child in result["tree"].tail:
-            child.inputfile = inputfile
-        included = set()
-        while True:
-            for i, v in enumerate(result["tree"].tail):
-                if v.head == "include":
-                    # Expand this node
-                    for j in v.tail:
-                        if j.head == "STRVALUE":
-                            f = str(j.tail[0])[1:-1]
-                            if f in included:
-                                subtree = []
-                            else:
-                                name = str(j.tail[0])[1:-1]
-                                subtree = do_parse(find_file(name, include_paths), grammarfile)["tree"].tail
-                                if subtree is None:
-                                    raise Exception("Parsing error for included file %s" % find_file(name, include_paths))
-
-                                for t in subtree:
-                                    t.inputfile = name
-                                included.add(f)
-                            # Found the string value, so break from the inner for ("searching for element")
-                            break
-
-                    # Merge all nodes in
-                    before = result["tree"].tail[:i]
-                    after = result["tree"].tail[i+1:]
-                    result["tree"].tail = before + subtree + after
-                    # Found an include node, but to prevent corruption of the tree, we need to start over again, so break from the outer for loop
+    result = fetch_cached(inputfile, mode)
+    if result is None:
+        result = do_parse(inputfile, grammarfile)
+        error = result["status"] != Parser.Constants.Success
+        if error:
+            msg = "%s:%s:%s: %s" % (inputfile, result["line"], result["column"], result["text"])
+            raise Exception(msg)
+        else:
+            for child in result["tree"].tail:
+                child.inputfile = inputfile
+            included = set()
+            while True:
+                for i, v in enumerate(result["tree"].tail):
+                    if v.head == "include":
+                        # Expand this node
+                        for j in v.tail:
+                            if j.head == "STRVALUE":
+                                f = str(j.tail[0])[1:-1]
+                                if f in included:
+                                    subtree = []
+                                else:
+                                    name = str(j.tail[0])[1:-1]
+                                    subtree = do_parse(find_file(name, include_paths), grammarfile)["tree"].tail
+                                    if subtree is None:
+                                        raise Exception("Parsing error for included file %s" % find_file(name, include_paths))
+
+                                    for t in subtree:
+                                        t.inputfile = name
+                                    included.add(f)
+                                # Found the string value, so break from the inner for ("searching for element")
+                                break
+
+                        # Merge all nodes in
+                        before = result["tree"].tail[:i]
+                        after = result["tree"].tail[i+1:]
+                        result["tree"].tail = before + subtree + after
+                        # Found an include node, but to prevent corruption of the tree, we need to start over again, so break from the outer for loop
+                        break
+                else:
+                    # The outer for finally finished, so there were no includes remaining, thus terminate the infinite while loop
                     break
-            else:
-                # The outer for finally finished, so there were no includes remaining, thus terminate the infinite while loop
-                break
 
-    result["tree"].fix_tracability(inputfile)
+        result["tree"].fix_tracability(inputfile)
+
+        for visitor in visitors:
+            visitor.visit(result["tree"])
 
-    for visitor in visitors:
-        visitor.visit(result["tree"])
+        if visitors:
+            result = visitors[-1].dump()
+            make_cached(inputfile, result, mode)
 
     if visitors:
-        return visitors[-1].dump()
+        return result
 
 def main(input_file, grammar_file, mode, args=[]):
     from prettyprint_visitor import PrettyPrintVisitor
@@ -161,7 +173,7 @@ def main(input_file, grammar_file, mode, args=[]):
     }
     try:
         visitors = [v(args) for v in modes[mode]]
-        result = do_compile(input_file, grammar_file, visitors)
+        result = do_compile(input_file, grammar_file, visitors, mode=mode)
     except CachedException:
         return True
     return result