Browse Source

Bundle the PO constructor together and increase number of parallel
threads

Yentl Van Tendeloo 9 years ago
parent
commit
9d0f1805f1
2 changed files with 14 additions and 12 deletions
  1. 13 11
      interface/HUTN/hutn_compiler/primitives_object_visitor.py
  2. 1 1
      scripts/make_parallel.py

+ 13 - 11
interface/HUTN/hutn_compiler/primitives_object_visitor.py

@@ -3,6 +3,7 @@ from primitives_visitor import PrimitivesVisitor
 
 import urllib
 import urllib2
+import json
 
 class PrimitivesObjectVisitor(PrimitivesVisitor):
     def __init__(self, args):
@@ -48,32 +49,33 @@ class PrimitivesObjectVisitor(PrimitivesVisitor):
                 print("[CACHED] %s" % simple_filename)
 
     def dump(self):
+        print("DUMP")
         v = PrimitivesVisitor.dump(self)
+        data = []
 
-        import json
         # Set up interface
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": '3', "username": self.username}))).read()
+        data.append(["V", "3"])
 
         # Start uploading the code
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": '"upload"', "username": self.username}))).read()
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": '"%s"' % self.obj_file, "username": self.username}))).read()
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": '"%s"' % self.hash_file, "username": self.username}))).read()
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": 'false', "username": self.username}))).read() # Use old interface
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": json.dumps(v), "username": self.username}))).read()
+        data.append(["V", '"upload"'])
+        data.append(["V", '"%s"' % (self.obj_file)])
+        data.append(["V", '"%s"' % (self.hash_file)])
+        data.append(["V", 'false'])
+        data.append(["V", json.dumps(v)])
 
         # Upload symbol table
-        data = []
         for e, v in self.object_symbols.iteritems():
             data.append(["V", "true"])
             data.append(["V", '"%s"' % e])
             data.append(["V", "true" if v else "false"])
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "data": json.dumps(data), "username": self.username}))).read()
 
         # Finish the symbol table
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": 'false', "username": self.username}))).read()
+        data.append(["V", 'false'])
 
         # Wait for kernel to signal that it finished
-        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "element_type": "V", "value": '2', "username": self.username}))).read()
+        data.append(["V", '2'])
+
+        urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "set_input", "data": json.dumps(data), "username": self.username}))).read()
         v = urllib2.urlopen(urllib2.Request(self.address, urllib.urlencode({"op": "get_output", "username": self.username}))).read()
         v = v.split("=", 2)[2]
         if v == "DONE":

+ 1 - 1
scripts/make_parallel.py

@@ -19,7 +19,7 @@ def do_compile_wrapper(filename):
     do_compile(address, filename, str(random.random()), filename, "PO", ["--debug"])
 
 if __name__ == "__main__":
-    p = multiprocessing.Pool(multiprocessing.cpu_count())
+    p = multiprocessing.Pool(multiprocessing.cpu_count() * 2)
     p.map(do_compile_wrapper, files)
 
     link_and_load(address, username, files, True)