Browse Source

Merge branch 'jit' into MvC

Yentl Van Tendeloo 8 years ago
parent
commit
cc0f06e377

+ 1 - 0
.gitignore

@@ -6,3 +6,4 @@
 *.swo
 __pycache__
 hybrid_server/server.py
+perf_data.txt

BIN
bootstrap/bootstrap.m.gz


+ 37 - 28
bootstrap/bootstrap.py

@@ -108,6 +108,11 @@ def bootstrap():
                     "time": ["Float"],
                 }
 
+    jit_primitives = {
+        "get_jit_enabled": ["Boolean"],
+        "set_jit_enabled": ["Void", "Boolean"]
+    }
+
     ### Actual script to generate the file
     import os
     import sys
@@ -136,38 +141,42 @@ def bootstrap():
                     f.write('Node __%s("%s")\n' % (node, node))
                     f.write("Edge ___%s(_%s, __%s)\n" % (node, node, node))
 
-                f.write("Node primitives()\n")
-                f.write("Edge _primitives(__hierarchy, primitives)\n")
-                f.write('Node __primitives("primitives")\n')
-                f.write("Edge ___primitives(_primitives, __primitives)\n")
+                def declare_primitive_class(primitive_class_name, primitive_decls):
+                    f.write("Node %s()\n" % primitive_class_name)
+                    f.write("Edge _%s(__hierarchy, %s)\n" % (primitive_class_name, primitive_class_name))
+                    f.write('Node __%s("%s")\n' % (primitive_class_name, primitive_class_name))
+                    f.write("Edge ___%s(_%s, __%s)\n" % (primitive_class_name, primitive_class_name, primitive_class_name))
+
+                    # Define all primitive functions
+                    for function, parameters in primitive_decls.iteritems():
+                        f.write("Node _func_signature_%s()\n" % function)
+                        f.write("Node _func_params_%s()\n" % function)
+                        f.write("Node _func_body_%s()\n" % function)
+                        f.write("Edge _%s_%s(%s, _func_signature_%s)\n" % (primitive_class_name, function, primitive_class_name, function))
+                        f.write('Node _name_%s("%s")\n' % (function, function))
+                        f.write("Edge _%s_name_%s(_%s_%s, _name_%s)\n" % (primitive_class_name, function, primitive_class_name, function, function))
 
-                # Define all primitive functions
-                for function, parameters in primitives.iteritems():
-                    f.write("Node _func_signature_%s()\n" % function)
-                    f.write("Node _func_params_%s()\n" % function)
-                    f.write("Node _func_body_%s()\n" % function)
-                    f.write("Edge _primitives_%s(primitives, _func_signature_%s)\n" % (function, function))
-                    f.write('Node _name_%s("%s")\n' % (function, function))
-                    f.write("Edge _primitives_name_%s(_primitives_%s, _name_%s)\n" % (function, function, function))
+                        f.write('Node _body_%s("body")\n' % function)
+                        f.write("Edge _signature_body_%s(_func_signature_%s, _func_body_%s)\n" % (function, function, function))
+                        f.write("Edge _signature_body_str_%s(_signature_body_%s, _body_%s)\n" % (function, function, function))
 
-                    f.write('Node _body_%s("body")\n' % function)
-                    f.write("Edge _signature_body_%s(_func_signature_%s, _func_body_%s)\n" % (function, function, function))
-                    f.write("Edge _signature_body_str_%s(_signature_body_%s, _body_%s)\n" % (function, function, function))
+                        f.write('Node _params_%s("params")\n' % function)
+                        f.write("Edge _signature_params_%s(_func_signature_%s, _func_params_%s)\n" % (function, function, function))
+                        f.write("Edge _signature_params_str_%s(_signature_params_%s, _params_%s)\n" % (function, function, function))
 
-                    f.write('Node _params_%s("params")\n' % function)
-                    f.write("Edge _signature_params_%s(_func_signature_%s, _func_params_%s)\n" % (function, function, function))
-                    f.write("Edge _signature_params_str_%s(_signature_params_%s, _params_%s)\n" % (function, function, function))
+                        parameter_names = "abcdefghijklmnopqrstuvwxyz"
+                        for number, param in enumerate(parameters[1:]):
+                            param_encoding = "%s_%s" % (function, parameter_names[number])
+                            f.write("Node _func_params_%s()\n" % (param_encoding))
+                            f.write('Node _name_%s("%s")\n' % (param_encoding, parameter_names[number]))
+                            f.write("Edge _param_link_%s(_func_params_%s, _func_params_%s)\n" % (param_encoding, function, param_encoding))
+                            f.write("Edge _param_link_str_%s(_param_link_%s, _name_%s)\n" % (param_encoding, param_encoding, param_encoding))
+                            f.write('Node _name_str_%s("name")\n' % param_encoding)
+                            f.write("Edge _param_name_%s(_func_params_%s, _name_%s)\n" % (param_encoding, param_encoding, param_encoding))
+                            f.write("Edge _param_name_str_%s(_param_name_%s, _name_str_%s)\n" % (param_encoding, param_encoding, param_encoding))
 
-                    parameter_names = "abcdefghijklmnopqrstuvwxyz"
-                    for number, param in enumerate(parameters[1:]):
-                        param_encoding = "%s_%s" % (function, parameter_names[number])
-                        f.write("Node _func_params_%s()\n" % (param_encoding))
-                        f.write('Node _name_%s("%s")\n' % (param_encoding, parameter_names[number]))
-                        f.write("Edge _param_link_%s(_func_params_%s, _func_params_%s)\n" % (param_encoding, function, param_encoding))
-                        f.write("Edge _param_link_str_%s(_param_link_%s, _name_%s)\n" % (param_encoding, param_encoding, param_encoding))
-                        f.write('Node _name_str_%s("name")\n' % param_encoding)
-                        f.write("Edge _param_name_%s(_func_params_%s, _name_%s)\n" % (param_encoding, param_encoding, param_encoding))
-                        f.write("Edge _param_name_str_%s(_param_name_%s, _name_str_%s)\n" % (param_encoding, param_encoding, param_encoding))
+                declare_primitive_class('primitives', primitives)
+                declare_primitive_class('jit', jit_primitives)
 
                 # Create the initial user
                 f.write("Node user_root()\n")

+ 2 - 0
bootstrap/jit.alc

@@ -0,0 +1,2 @@
+Boolean function get_jit_enabled() = ?jit/get_jit_enabled
+Void function set_jit_enabled(a: Boolean) = ?jit/set_jit_enabled

+ 265 - 246
hybrid_server/classes/mvkcontroller.xml

@@ -1,246 +1,265 @@
-<class name="MvKController">
-    <relationships>
-        <association name="to_mvi" class="Server" min="1" max="1"/>
-    </relationships>
-    <constructor>
-        <parameter name="params"/>
-        <body>
-            <![CDATA[
-            self.mvs = ModelverseState("../bootstrap/bootstrap.m.gz")
-            # Enable Garbage Collection
-            self.mvs.GC = True
-            self.root = self.mvs.read_root()[0]
-            self.mvk = ModelverseKernel(self.root)
-            self.all_failed = False
-            self.timeout = False
-            self.init_time = time.time()
-
-            self.users = set()
-            self.input_queue = defaultdict(list)
-            self.output_queue = defaultdict(list)
-            self.source = None
-            self.port = int(sys.argv[1])
-            self.count = 0
-            
-            self.debugged_users = set()
-            self.debug_info = {}
-            self.done_something = False
-
-            self.mvs_operations = {
-                    "CN": self.mvs.create_node,
-                    "CE": self.mvs.create_edge,
-                    "CNV": self.mvs.create_nodevalue,
-                    "CD": self.mvs.create_dict,
-
-                    "RV": self.mvs.read_value,
-                    "RO": self.mvs.read_outgoing,
-                    "RI": self.mvs.read_incoming,
-                    "RE": self.mvs.read_edge,
-                    "RD": self.mvs.read_dict,
-                    "RDN": self.mvs.read_dict_node,
-                    "RDNE": self.mvs.read_dict_node_edge,
-                    "RDE": self.mvs.read_dict_edge,
-                    "RRD": self.mvs.read_reverse_dict,
-                    "RR": self.mvs.read_root,
-                    "RDK": self.mvs.read_dict_keys,
-
-                    "DE": self.mvs.delete_edge,
-                    "DN": self.mvs.delete_node,
-                }
-
-            self.execute_modelverse("", "load_primitives", [])
-            ]]>
-        </body>
-    </constructor>
-
-    <method name="execute_modelverse">
-        <parameter name="username"/>
-        <parameter name="operation"/>
-        <parameter name="params"/>
-        <body>
-            <![CDATA[
-            reply = None
-            commands = []
-            while 1:
-                commands = self.mvk.execute_yields(username, operation, params, reply)
-                if commands is None:
-                    break
-                reply = [self.mvs_operations[command[0]](*(command[1]))[0] for command in commands]
-                #for c, r in zip(commands, reply):
-                #    print("%s --> %s" % (c, r))
-            ]]>
-        </body>
-    </method>
-
-    <scxml initial="init_server">
-        <state id="init_server">
-            <onentry>
-                <raise scope="cd" event="create_instance">
-                    <parameter expr="'to_mvi'"/>
-                    <parameter expr="'Server'"/>
-                    <parameter expr="''"/>
-                    <parameter expr="self.port"/>
-                </raise>
-            </onentry>
-            <transition event="instance_created" target="../running">
-                <parameter name="instancename"/>
-                <raise scope="cd" event="start_instance">
-                    <parameter expr="instancename"/>
-                </raise>
-            </transition>
-        </state>
-
-        <parallel id="running">
-            <state id="wait_for_requests">
-                <state id="wait">
-                    <transition event="from_mvi" target=".">
-                        <parameter name="source"/>
-                        <parameter name="data"/>
-                        <script>
-                            # No JSON encoding necessary, as it is not complex
-                            try:
-                                self.done_something = False
-                                if data["op"] == "set_input":
-                                    if "value" in data:
-                                        value = [json.loads(data["value"])]
-                                    else:
-                                        value = json.loads(data["data"])
-                                    self.input_queue[data["username"]].append((source, value))
-                                elif data["op"] == "get_output":
-                                    self.output_queue[data["username"]].append(source)
-                                elif data["op"] == "attach_debugger":
-                                    self.debugged_users.add(data["username"])
-                                    self.done_something = True
-                                    self.source = source
-                                    self.debug_info[data["username"]] = {'state': 'running', 'breakpoints': []}
-                                elif data["op"] == "detach_debugger":
-                                    self.debugged_users.discard(data["username"])
-                                    self.done_something = True
-                                    self.source = source
-                                    del self.debug_info[data["username"]]
-                                elif data["op"] == "pause":
-                                    if data["username"] in self.debugged_users:
-                                        self.debug_info[data["username"]]['state'] = 'paused'
-                                    self.done_something = True
-                                    self.source = source
-                                elif data["op"] == "resume":
-                                    if data["username"] in self.debugged_users:
-                                        self.debug_info[data["username"]]['state'] = 'running'
-                                    self.done_something = True
-                                    self.source = source
-                                elif data["op"] == "step_over":
-                                    pass
-                                    self.done_something = True
-                                    self.source = source
-                                elif data["op"] == "step_into":
-                                    pass
-                                    self.done_something = True
-                                    self.source = source
-                                else:
-                                    raise Exception("DROPPING unknown operation: " + str(data["op"]))
-                            except ValueError:
-                                print("Error when deserializing request: " + str(data))
-                                raise
-                        </script>
-                    </transition>
-
-                    <transition cond="self.done_something" target=".">
-                        <raise event="HTTP_input" scope="narrow" target="'to_mvi/%s' % self.source">
-                            <parameter expr="json.dumps(True)"/>
-                        </raise>
-                        <script>
-                            self.done_something = False
-                        </script>
-                    </transition>
-                </state>
-            </state>
-
-            <state id="execution">
-                <state id="execution">
-                    <onentry>
-                        <script>
-                            self.timeout = False
-                            self.destination = None
-                            if self.users:
-                                user = self.users.pop()
-                                if not user in self.debugged_users or self.debug_info[user]['state'] == 'running':
-                                    # Check if there are values to input
-                                    if self.input_queue[user]:
-                                        source, args = self.input_queue[user].pop(0)
-                                        for args_entry in args:
-                                            self.execute_modelverse(user, "set_input", [args_entry])
-
-                                        self.destination = source
-                                        self.value = "OK"
-                                        self.all_failed = False
-
-                                    nr_of_steps = 1 if user in self.debugged_users else 100
-                                    # Now process for some steps, or until we are again blocked for input
-                                    for x in xrange(nr_of_steps):
-                                        self.execute_modelverse(user, "execute_rule", [])
-
-                                        if not self.mvk.success:
-                                            # Blocking or broken, so quit already to stop wasting CPU
-                                            break
-
-                                        # Could at least execute one instruction, so mark it as "not failed"
-                                        self.all_failed = False
-
-                                    # Check that we don't have anything to output yet, otherwise we wait
-                                    if self.destination is None:
-                                        # Perform output if there is anything
-                                        if self.output_queue[user]:
-                                            self.execute_modelverse(user, "get_output", [])
-                                            if self.mvk.success:
-                                                self.destination = self.output_queue[user].pop(0)
-                                                self.value = self.mvk.returnvalue
-                                                self.all_failed = False
-
-                            else:
-                                if self.count >= 2000:
-                                    self.count = 0
-                                    self.mvs.purge()
-                                else:
-                                    self.count += 1
-                                    self.mvs.garbage_collect()
-                                out = self.mvs.read_outgoing(self.root)[0]
-                                for m in out:
-                                    src, user = self.mvs.read_edge(m)[0]
-                                    outgoing = self.mvs.read_outgoing(m)[0]
-                                    first = self.mvs.read_edge(outgoing[0])[0]
-                                    dest = first[1]
-                                    name = self.mvs.read_value(dest)[0]
-                                    if name.startswith("__"):
-                                        continue
-                                    self.users.add(name)
-                                self.timeout = self.all_failed
-                                self.all_failed = True
-                        </script>
-                    </onentry>
-
-                    <transition cond="self.destination is not None" after="self.sccd_yield()" target=".">
-                        <raise event="HTTP_input" scope="narrow" target="'to_mvi/%s' % self.destination">
-                            <parameter expr="json.dumps(self.value)"/>
-                        </raise>
-                    </transition>
-
-                    <transition cond="self.timeout and self.destination is None" after="self.sccd_yield() + 0.10" target="."/>
-
-                    <transition cond="not self.timeout and self.destination is None" after="self.sccd_yield()" target="."/>
-                </state>
-            </state>
-
-            <state id="remove_sockets">
-                <state id="remove_sockets">
-                    <transition event="delete_socket" target=".">
-                        <parameter name="socket"/>
-                        <script>
-                            for user in self.output_queue.keys():
-                                self.output_queue[user] = [s for s in self.output_queue[user] if s != socket]
-                        </script>
-                    </transition>
-                </state>
-            </state>
-        </parallel>
-    </scxml>
-</class>
+<class name="MvKController">
+    <relationships>
+        <association name="to_mvi" class="Server" min="1" max="1"/>
+    </relationships>
+    <constructor>
+        <parameter name="params"/>
+        <body>
+            <![CDATA[
+            self.mvs = ModelverseState("../bootstrap/bootstrap.m.gz")
+            # Enable Garbage Collection
+            self.mvs.GC = True
+            self.root = self.mvs.read_root()[0]
+
+            # Instantiate the kernel.
+            default_kernel_type = 'baseline-jit'
+            kernel_type = default_kernel_type
+            for parameter in params:
+                if parameter.startswith('--kernel='):
+                    kernel_type = parameter[len('--kernel='):]
+
+            if kernel_type == 'legacy-interpreter':
+                self.mvk = LegacyModelverseKernel(self.root)
+            elif kernel_type == 'interpreter':
+                self.mvk = ModelverseKernel(self.root)
+                self.mvk.jit.set_jit_enabled(False)
+            else:
+                if kernel_type != default_kernel_type:
+                    print(
+                        "warning: unknown kernel type '%s'. Defaulting to '%s'."
+                        % (kernel_type, default_kernel_type))
+                self.mvk = ModelverseKernel(self.root)
+                
+            self.all_failed = False
+            self.timeout = False
+            self.init_time = time.time()
+
+            self.users = set()
+            self.input_queue = defaultdict(list)
+            self.output_queue = defaultdict(list)
+            self.source = None
+            self.port = int(sys.argv[1])
+            self.count = 0
+            
+            self.debugged_users = set()
+            self.debug_info = {}
+            self.done_something = False
+
+            self.mvs_operations = {
+                    "CN": self.mvs.create_node,
+                    "CE": self.mvs.create_edge,
+                    "CNV": self.mvs.create_nodevalue,
+                    "CD": self.mvs.create_dict,
+
+                    "RV": self.mvs.read_value,
+                    "RO": self.mvs.read_outgoing,
+                    "RI": self.mvs.read_incoming,
+                    "RE": self.mvs.read_edge,
+                    "RD": self.mvs.read_dict,
+                    "RDN": self.mvs.read_dict_node,
+                    "RDNE": self.mvs.read_dict_node_edge,
+                    "RDE": self.mvs.read_dict_edge,
+                    "RRD": self.mvs.read_reverse_dict,
+                    "RR": self.mvs.read_root,
+                    "RDK": self.mvs.read_dict_keys,
+
+                    "DE": self.mvs.delete_edge,
+                    "DN": self.mvs.delete_node,
+                }
+
+            self.execute_modelverse("", "load_primitives", [])
+            ]]>
+        </body>
+    </constructor>
+
+    <method name="execute_modelverse">
+        <parameter name="username"/>
+        <parameter name="operation"/>
+        <parameter name="params"/>
+        <body>
+            <![CDATA[
+            reply = None
+            commands = []
+            while 1:
+                commands = self.mvk.execute_yields(username, operation, params, reply)
+                if commands is None:
+                    break
+                reply = [self.mvs_operations[command[0]](*(command[1]))[0] for command in commands]
+                #for c, r in zip(commands, reply):
+                #    print("%s --> %s" % (c, r))
+            ]]>
+        </body>
+    </method>
+
+    <scxml initial="init_server">
+        <state id="init_server">
+            <onentry>
+                <raise scope="cd" event="create_instance">
+                    <parameter expr="'to_mvi'"/>
+                    <parameter expr="'Server'"/>
+                    <parameter expr="''"/>
+                    <parameter expr="self.port"/>
+                </raise>
+            </onentry>
+            <transition event="instance_created" target="../running">
+                <parameter name="instancename"/>
+                <raise scope="cd" event="start_instance">
+                    <parameter expr="instancename"/>
+                </raise>
+            </transition>
+        </state>
+
+        <parallel id="running">
+            <state id="wait_for_requests">
+                <state id="wait">
+                    <transition event="from_mvi" target=".">
+                        <parameter name="source"/>
+                        <parameter name="data"/>
+                        <script>
+                            # No JSON encoding necessary, as it is not complex
+                            try:
+                                self.done_something = False
+                                if data["op"] == "set_input":
+                                    if "value" in data:
+                                        value = [json.loads(data["value"])]
+                                    else:
+                                        value = json.loads(data["data"])
+                                    self.input_queue[data["username"]].append((source, value))
+                                elif data["op"] == "get_output":
+                                    self.output_queue[data["username"]].append(source)
+                                elif data["op"] == "attach_debugger":
+                                    self.debugged_users.add(data["username"])
+                                    self.done_something = True
+                                    self.source = source
+                                    self.debug_info[data["username"]] = {'state': 'running', 'breakpoints': []}
+                                elif data["op"] == "detach_debugger":
+                                    self.debugged_users.discard(data["username"])
+                                    self.done_something = True
+                                    self.source = source
+                                    del self.debug_info[data["username"]]
+                                elif data["op"] == "pause":
+                                    if data["username"] in self.debugged_users:
+                                        self.debug_info[data["username"]]['state'] = 'paused'
+                                    self.done_something = True
+                                    self.source = source
+                                elif data["op"] == "resume":
+                                    if data["username"] in self.debugged_users:
+                                        self.debug_info[data["username"]]['state'] = 'running'
+                                    self.done_something = True
+                                    self.source = source
+                                elif data["op"] == "step_over":
+                                    pass
+                                    self.done_something = True
+                                    self.source = source
+                                elif data["op"] == "step_into":
+                                    pass
+                                    self.done_something = True
+                                    self.source = source
+                                else:
+                                    raise Exception("DROPPING unknown operation: " + str(data["op"]))
+                            except ValueError:
+                                print("Error when deserializing request: " + str(data))
+                                raise
+                        </script>
+                    </transition>
+
+                    <transition cond="self.done_something" target=".">
+                        <raise event="HTTP_input" scope="narrow" target="'to_mvi/%s' % self.source">
+                            <parameter expr="json.dumps(True)"/>
+                        </raise>
+                        <script>
+                            self.done_something = False
+                        </script>
+                    </transition>
+                </state>
+            </state>
+
+            <state id="execution">
+                <state id="execution">
+                    <onentry>
+                        <script>
+                            self.timeout = False
+                            self.destination = None
+                            if self.users:
+                                user = self.users.pop()
+                                if not user in self.debugged_users or self.debug_info[user]['state'] == 'running':
+                                    # Check if there are values to input
+                                    if self.input_queue[user]:
+                                        source, args = self.input_queue[user].pop(0)
+                                        for args_entry in args:
+                                            self.execute_modelverse(user, "set_input", [args_entry])
+
+                                        self.destination = source
+                                        self.value = "OK"
+                                        self.all_failed = False
+
+                                    nr_of_steps = 1 if user in self.debugged_users else 100
+                                    # Now process for some steps, or until we are again blocked for input
+                                    for x in xrange(nr_of_steps):
+                                        self.execute_modelverse(user, "execute_rule", [])
+
+                                        if not self.mvk.success:
+                                            # Blocking or broken, so quit already to stop wasting CPU
+                                            break
+
+                                        # Could at least execute one instruction, so mark it as "not failed"
+                                        self.all_failed = False
+
+                                    # Check that we don't have anything to output yet, otherwise we wait
+                                    if self.destination is None:
+                                        # Perform output if there is anything
+                                        if self.output_queue[user]:
+                                            self.execute_modelverse(user, "get_output", [])
+                                            if self.mvk.success:
+                                                self.destination = self.output_queue[user].pop(0)
+                                                self.value = self.mvk.returnvalue
+                                                self.all_failed = False
+
+                            else:
+                                if self.count >= 2000:
+                                    self.count = 0
+                                    self.mvs.purge()
+                                else:
+                                    self.count += 1
+                                    self.mvs.garbage_collect()
+                                out = self.mvs.read_outgoing(self.root)[0]
+                                for m in out:
+                                    src, user = self.mvs.read_edge(m)[0]
+                                    outgoing = self.mvs.read_outgoing(m)[0]
+                                    first = self.mvs.read_edge(outgoing[0])[0]
+                                    dest = first[1]
+                                    name = self.mvs.read_value(dest)[0]
+                                    if name.startswith("__"):
+                                        continue
+                                    self.users.add(name)
+                                self.timeout = self.all_failed
+                                self.all_failed = True
+                        </script>
+                    </onentry>
+
+                    <transition cond="self.destination is not None" after="self.sccd_yield()" target=".">
+                        <raise event="HTTP_input" scope="narrow" target="'to_mvi/%s' % self.destination">
+                            <parameter expr="json.dumps(self.value)"/>
+                        </raise>
+                    </transition>
+
+                    <transition cond="self.timeout and self.destination is None" after="self.sccd_yield() + 0.10" target="."/>
+
+                    <transition cond="not self.timeout and self.destination is None" after="self.sccd_yield()" target="."/>
+                </state>
+            </state>
+
+            <state id="remove_sockets">
+                <state id="remove_sockets">
+                    <transition event="delete_socket" target=".">
+                        <parameter name="socket"/>
+                        <script>
+                            for user in self.output_queue.keys():
+                                self.output_queue[user] = [s for s in self.output_queue[user] if s != socket]
+                        </script>
+                    </transition>
+                </state>
+            </state>
+        </parallel>
+    </scxml>
+</class>

+ 1 - 0
hybrid_server/server.xml

@@ -16,6 +16,7 @@
         sys.path.append("../kernel/")
         sys.path.append("../state/")
         from modelverse_kernel.main import ModelverseKernel
+        from modelverse_kernel.legacy import ModelverseKernel as LegacyModelverseKernel
         from modelverse_state.main import ModelverseState
         #from modelverse_state.rdf import ModelverseState
     </top>

+ 6 - 15
integration/utils.py

@@ -19,25 +19,16 @@ from check_objects import to_recompile
 username = "test_user"
 parallel_push = True
 
-ports = []
+ports = set()
 
 def getFreePort():
+    """Gets a unique new port."""
     while 1:
         port = random.randint(10000, 20000)
-        ports.append(port)
-
-        exists = False
-        for p in ports:
-            if p == port:
-                if not exists:
-                    # We have hopefully found our own
-                    exists = True
-                else:
-                    # We seem to be the second entry, so chose another one
-                    ports.remove(port)
-                    break
-        else:
-            # Didn't find a duplicate
+        # Check if this port is in the set of ports.
+        if port not in ports:
+            # We have found a unique port. Add it to the set and return.
+            ports.add(port)
             return port
 
 def execute(scriptname, parameters=[], wait=False):

+ 2 - 0
interface/HUTN/includes/jit.alh

@@ -0,0 +1,2 @@
+Boolean function get_jit_enabled()
+Void function set_jit_enabled(a: Boolean)

+ 0 - 0
kernel/modelverse_jit/__init__.py


+ 302 - 0
kernel/modelverse_jit/intrinsics.py

@@ -0,0 +1,302 @@
+import jit
+import tree_ir
+import time
+
+BINARY_INTRINSICS = {
+    'value_eq' : '==',
+    'value_neq' : '!=',
+
+    'bool_and' : 'and',
+    'bool_or' : 'or',
+
+    'integer_addition' : '+',
+    'integer_subtraction' : '-',
+    'integer_multiplication' : '*',
+    'integer_division' : '/',
+    'integer_gt' : '>',
+    'integer_gte' : '>=',
+    'integer_lt' : '<',
+    'integer_lte' : '<=',
+
+    'float_addition' : '+',
+    'float_subtraction' : '-',
+    'float_multiplication' : '*',
+    'float_division' : '/',
+    'float_gt' : '>',
+    'float_gte' : '>=',
+    'float_lt' : '<',
+    'float_lte' : '<='
+}
+
+UNARY_INTRINSICS = {
+    'bool_not' : 'not',
+    'integer_neg' : '-',
+    'float_neg' : '-'
+}
+
+CAST_INTRINSICS = {
+    'cast_i2f' : float,
+    'cast_i2s' : str,
+    'cast_i2b' : bool,
+    'cast_f2i' : int,
+    'cast_f2s' : str,
+    'cast_f2b' : bool,
+    'cast_s2i' : int,
+    'cast_s2f' : float,
+    'cast_s2b' : bool,
+    'cast_b2i' : int,
+    'cast_b2f' : float,
+    'cast_b2s' : str
+}
+
+def create_get_length(expression):
+    """Creates an expression that evaluates the given expression, and then
+       computes the length of its result."""
+    return tree_ir.CallInstruction(
+        tree_ir.LoadGlobalInstruction('len'),
+        [expression])
+
+# Don't compain about the variable names, pylint. It's important that we
+# get them right.
+# pylint: disable=I0011,C0103
+def __set_add(a, b):
+    tmp = tree_ir.StoreLocalInstruction(None, a)
+    return tree_ir.create_block(
+        tmp,
+        tree_ir.CreateEdgeInstruction(tmp.create_load(), b),
+        tmp.create_load())
+
+def __dict_add(a, b, c):
+    a_tmp = tree_ir.StoreLocalInstruction(None, a)
+    b_tmp = tree_ir.StoreLocalInstruction(None, b)
+    return tree_ir.create_block(
+        a_tmp,
+        b_tmp,
+        tree_ir.CreateEdgeInstruction(
+            tree_ir.CreateEdgeInstruction(a_tmp.create_load(), c),
+            b_tmp.create_load()),
+        a_tmp.create_load())
+
+def __list_read(a, b):
+    # The statements in this function generate the following code:
+    #
+    # a_tmp = a # To make sure a is evaluated before b.
+    # b_value, = yield [("RV", [b])]
+    # result, = yield [("RD", [a_tmp, b_value])]
+    # if result is None:
+    #     raise Exception("List read out of bounds: %s" % b_value)
+    # result
+
+    a_tmp = tree_ir.StoreLocalInstruction(None, a)
+    b_val = tree_ir.StoreLocalInstruction(
+        None,
+        tree_ir.ReadValueInstruction(b))
+    result = tree_ir.StoreLocalInstruction(
+        None,
+        tree_ir.ReadDictionaryValueInstruction(
+            a_tmp.create_load(), b_val.create_load()))
+
+    return tree_ir.create_block(
+        a_tmp,
+        b_val,
+        result,
+        tree_ir.SelectInstruction(
+            tree_ir.BinaryInstruction(
+                result.create_load(),
+                'is',
+                tree_ir.LiteralInstruction(None)),
+            tree_ir.RaiseInstruction(
+                tree_ir.CallInstruction(
+                    tree_ir.LoadGlobalInstruction('Exception'),
+                    [tree_ir.BinaryInstruction(
+                        tree_ir.LiteralInstruction('List read out of bounds: %s'),
+                        '%',
+                        b_val.create_load())])),
+            tree_ir.NopInstruction()),
+        result.create_load())
+
+def __list_append(a, b):
+    # We want to generate code that is more or less equivalent to:
+    #
+    # a_tmp = a
+    # b_tmp = b
+    # a_outgoing, = yield [("RO", [a_tmp])]
+    # _ = yield [("CD", [a_tmp, len(a_outgoing), b_tmp])]
+    # a
+
+    a_tmp = tree_ir.StoreLocalInstruction(None, a)
+    b_tmp = tree_ir.StoreLocalInstruction(None, b)
+    return tree_ir.create_block(
+        a_tmp,
+        tree_ir.CreateDictionaryEdgeInstruction(
+            a_tmp.create_load(),
+            create_get_length(
+                tree_ir.ReadOutgoingEdgesInstruction(
+                    a_tmp.create_load())),
+            b_tmp),
+        a_tmp.create_load())
+
+def __log(a):
+    # Original definition:
+    #
+    # def log(a, **remainder):
+    #     a_value, = yield [("RV", [a])]
+    #     print("== LOG == " + str(a_value))
+    #     raise PrimitiveFinished(a)
+
+    a_tmp = tree_ir.StoreLocalInstruction(None, a)
+    return tree_ir.CompoundInstruction(
+        tree_ir.create_block(
+            a_tmp,
+            tree_ir.PrintInstruction(
+                tree_ir.BinaryInstruction(
+                    tree_ir.LiteralInstruction("== LOG == "),
+                    '+',
+                    tree_ir.CallInstruction(
+                        tree_ir.LoadGlobalInstruction('str'),
+                        [tree_ir.ReadValueInstruction(a_tmp.create_load())])))),
+        a_tmp.create_load())
+
+MISC_INTRINSICS = {
+    # Reference equality
+    'element_eq' :
+        lambda a, b:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.BinaryInstruction(a, '==', b)),
+    'element_neq' :
+        lambda a, b:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.BinaryInstruction(a, '!=', b)),
+
+    # Strings
+    'string_get' :
+        lambda a, b:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.LoadIndexInstruction(
+                tree_ir.ReadValueInstruction(a),
+                tree_ir.ReadValueInstruction(b))),
+    'string_len' :
+        lambda a:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.CallInstruction(
+                tree_ir.LoadGlobalInstruction('len'),
+                [tree_ir.ReadValueInstruction(a)])),
+    'string_join' :
+        lambda a, b:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.BinaryInstruction(
+                tree_ir.CallInstruction(
+                    tree_ir.LoadGlobalInstruction('str'),
+                    [tree_ir.ReadValueInstruction(a)]),
+                '+',
+                tree_ir.CallInstruction(
+                    tree_ir.LoadGlobalInstruction('str'),
+                    [tree_ir.ReadValueInstruction(b)]))),
+    'string_startswith' :
+        lambda a, b:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.CallInstruction(
+                tree_ir.LoadMemberInstruction(
+                    tree_ir.ReadValueInstruction(a),
+                    'startswith'),
+                [tree_ir.ReadValueInstruction(b)])),
+
+    # State creation
+    'create_node' : tree_ir.CreateNodeInstruction,
+    'create_edge' :
+        # Lambda is totally necessary here, pylint.
+        # You totally dropped the ball on this one.
+        # pylint: disable=I0011,W0108
+        lambda a, b:
+        tree_ir.CreateEdgeInstruction(a, b),
+    'create_value' :
+        lambda a:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.ReadValueInstruction(a)),
+
+    # State reads
+    'read_edge_src' :
+        lambda a:
+        tree_ir.LoadIndexInstruction(
+            tree_ir.ReadEdgeInstruction(a),
+            tree_ir.LiteralInstruction(0)),
+    'read_edge_dst' :
+        lambda a:
+        tree_ir.LoadIndexInstruction(
+            tree_ir.ReadEdgeInstruction(a),
+            tree_ir.LiteralInstruction(1)),
+    'is_edge' :
+        lambda a:
+        tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.BinaryInstruction(
+                tree_ir.LoadIndexInstruction(
+                    tree_ir.ReadEdgeInstruction(a),
+                    tree_ir.LiteralInstruction(0)),
+                'is not',
+                tree_ir.LiteralInstruction(None))),
+
+    # read_root
+    'read_root' :
+        lambda:
+        tree_ir.LoadIndexInstruction(
+            tree_ir.LoadLocalInstruction(jit.KWARGS_PARAMETER_NAME),
+            tree_ir.LiteralInstruction('root')),
+
+    # read_userroot
+    'read_userroot' :
+        lambda:
+        tree_ir.LoadIndexInstruction(
+            tree_ir.LoadLocalInstruction(jit.KWARGS_PARAMETER_NAME),
+            tree_ir.LiteralInstruction('user_root')),
+
+    # Dictionary operations
+    'dict_read' :
+        lambda a, b:
+        tree_ir.ReadDictionaryValueInstruction(
+            a, tree_ir.ReadValueInstruction(b)),
+
+    'dict_read_edge' :
+        lambda a, b:
+        tree_ir.ReadDictionaryEdgeInstruction(
+            a, tree_ir.ReadValueInstruction(b)),
+
+    'dict_add' : __dict_add,
+
+    # Set operations
+    'set_add' : __set_add,
+
+    # List operations
+    'list_len' :
+        lambda a:
+        tree_ir.CreateNodeWithValueInstruction(
+            create_get_length(tree_ir.ReadOutgoingEdgesInstruction(a))),
+
+    'list_read' : __list_read,
+    'list_append' : __list_append,
+
+    # log
+    'log' : __log
+}
+
+def register_time_intrinsic(target_jit):
+    """Registers the time() intrinsic with the given JIT."""
+    import_name = target_jit.import_value(time.time, 'time')
+    target_jit.register_intrinsic(
+        'time',
+        lambda: tree_ir.CreateNodeWithValueInstruction(
+            tree_ir.CallInstruction(
+                tree_ir.LoadGlobalInstruction(import_name),
+                [])))
+
+def register_intrinsics(target_jit):
+    """Registers all intrinsics in the module with the given JIT."""
+    for (key, value) in BINARY_INTRINSICS.items():
+        target_jit.register_binary_intrinsic(key, value)
+    for (key, value) in UNARY_INTRINSICS.items():
+        target_jit.register_unary_intrinsic(key, value)
+    for (key, value) in CAST_INTRINSICS.items():
+        target_jit.register_cast_intrinsic(key, value)
+    for (key, value) in MISC_INTRINSICS.items():
+        target_jit.register_intrinsic(key, value)
+
+    register_time_intrinsic(target_jit)

File diff suppressed because it is too large
+ 999 - 0
kernel/modelverse_jit/jit.py


+ 16 - 0
kernel/modelverse_jit/jit_primitives.py

@@ -0,0 +1,16 @@
+"""Primitive functions that interface with the Modelverse JIT."""
+
+import modelverse_kernel.primitives as primitive_functions
+
+# pylint: disable=I0011,C0103
+
+def get_jit_enabled(**kwargs):
+    """Checks if the JIT is enabled."""
+    result, = yield [("CNV", [kwargs['mvk'].jit.jit_enabled])]
+    raise primitive_functions.PrimitiveFinished(result)
+
+def set_jit_enabled(a, **kwargs):
+    """Enables or disables the JIT."""
+    val, result = yield [("RV", [a]), ("CN", [])]
+    kwargs['mvk'].jit.set_jit_enabled(val)
+    raise primitive_functions.PrimitiveFinished(result)

+ 105 - 0
kernel/modelverse_jit/runtime.py

@@ -0,0 +1,105 @@
+import modelverse_kernel.primitives as primitive_functions
+
+class JitCompilationFailedException(Exception):
+    """A type of exception that is raised when the jit fails to compile a function."""
+    pass
+
+def call_function(function_id, named_arguments, **kwargs):
+    """Runs the function with the given id, passing it the specified argument dictionary."""
+    user_root = kwargs['user_root']
+    kernel = kwargs['mvk']
+    body_id, = yield [("RD", [function_id, "body"])]
+    kernel.jit.mark_entry_point(body_id)
+
+    # Try to jit the function here. We might be able to avoid building the stack
+    # frame.
+    def handle_jit_failed(_):
+        interpreter_args = {'function_id' : function_id, 'named_arguments' : named_arguments}
+        interpreter_args.update(kwargs)
+        yield [("TAIL_CALL_KWARGS", [interpret_function, interpreter_args])]
+
+    yield [("TRY", [])]
+    yield [("CATCH", [JitCompilationFailedException, handle_jit_failed])]
+    # Try to compile.
+    compiled_func, = yield [("CALL_ARGS", [kernel.jit_compile, (user_root, body_id)])]
+    yield [("END_TRY", [])]
+    # Add the keyword arguments to the argument dictionary.
+    named_arguments.update(kwargs)
+    # Run the function.
+    yield [("TAIL_CALL_KWARGS", [compiled_func, named_arguments])]
+
+def interpret_function(function_id, named_arguments, **kwargs):
+    """Makes the interpreter run the function with the given id for the specified
+       argument dictionary."""
+    user_root = kwargs['user_root']
+    kernel = kwargs['mvk']
+    user_frame, = yield [("RD", [user_root, "frame"])]
+    inst, body_id = yield [("RD", [user_frame, "IP"]), ("RD", [function_id, "body"])]
+    kernel.jit.mark_entry_point(body_id)
+
+    # Create a new stack frame.
+    frame_link, new_phase, new_frame, new_evalstack, new_symbols, \
+        new_returnvalue, intrinsic_return = \
+                    yield [("RDE", [user_root, "frame"]),
+                           ("CNV", ["init"]),
+                           ("CN", []),
+                           ("CN", []),
+                           ("CN", []),
+                           ("CN", []),
+                           ("CN", [])
+                          ]
+
+    _, _, _, _, _, _, _, _, _, _ = \
+                    yield [("CD", [user_root, "frame", new_frame]),
+                           ("CD", [new_frame, "evalstack", new_evalstack]),
+                           ("CD", [new_frame, "symbols", new_symbols]),
+                           ("CD", [new_frame, "returnvalue", new_returnvalue]),
+                           ("CD", [new_frame, "caller", inst]),
+                           ("CD", [new_frame, "phase", new_phase]),
+                           ("CD", [new_frame, "IP", body_id]),
+                           ("CD", [new_frame, "prev", user_frame]),
+                           ("CD", [
+                               new_frame,
+                               primitive_functions.EXCEPTION_RETURN_KEY,
+                               intrinsic_return]),
+                           ("DE", [frame_link])
+                          ]
+
+    # Put the parameters in the new stack frame's symbol table.
+    (parameter_vars, parameter_names), = yield [
+        ("CALL_ARGS", [kernel.jit.jit_parameters, (body_id,)])]
+    parameter_dict = dict(zip(parameter_names, parameter_vars))
+
+    for (key, value) in named_arguments.items():
+        param_var = parameter_dict[key]
+        variable, = yield [("CN", [])]
+        yield [("CD", [variable, "value", value])]
+        symbol_edge, = yield [("CE", [new_symbols, variable])]
+        yield [("CE", [symbol_edge, param_var])]
+
+    username = kwargs['username']
+    def exception_handler(ex):
+        # print('Returning from interpreted function. Result: %s' % ex.result)
+        raise primitive_functions.PrimitiveFinished(ex.result)
+
+    # Create an exception handler to catch and translate InterpretedFunctionFinished.
+    yield [("TRY", [])]
+    yield [("CATCH", [primitive_functions.InterpretedFunctionFinished, exception_handler])]
+    while 1:
+        result, = yield [("CALL_ARGS", [kernel.execute_rule, (username,)])]
+        # An instruction has completed. Forward it.
+        yield result
+
+def get_input(**parameters):
+    """Retrieves input."""
+    mvk = parameters["mvk"]
+    user_root = parameters["user_root"]
+    while 1:
+        yield [("CALL_ARGS", [mvk.input_init, (user_root,)])]
+        # Finished
+        if mvk.success:
+            # Got some input, so we can access it
+            raise primitive_functions.PrimitiveFinished(mvk.input_value)
+        else:
+            # No input, so yield None but don't stop
+            yield None

File diff suppressed because it is too large
+ 1482 - 0
kernel/modelverse_jit/tree_ir.py


+ 11 - 46
kernel/modelverse_kernel/compiled.py

@@ -1,4 +1,5 @@
 from modelverse_kernel.primitives import PrimitiveFinished
+import modelverse_jit.runtime as jit_runtime 
 
 def reverseKeyLookup(a, b, **remainder):
     edges, = yield [("RO", [a])]
@@ -340,16 +341,10 @@ def selectPossibleIncoming(a, b, c, **remainder):
     name_values =   yield [("RV", [i]) for i in limit_set_names]
     limit_set =     yield [("RD", [model_dict, i]) for i in name_values]
 
-    try:
-        gen = get_superclasses(a, b)
-        inp = None
-        while 1:
-            inp =   yield gen.send(inp)
-    except PrimitiveFinished as e:
-        superclasses = e.result
-        vals, = yield [("RO", [superclasses])]
-        superclasses = yield [("RE", [i]) for i in vals]
-        superclasses = [i[1] for i in superclasses]
+    superclasses, = yield [("CALL_ARGS", [get_superclasses, (a, b)])]
+    vals, = yield [("RO", [superclasses])]
+    superclasses = yield [("RE", [i]) for i in vals]
+    superclasses = [i[1] for i in superclasses]
 
     superclass_names = yield [("RV", [i]) for i in superclasses]
     elems =         yield [("RD", [model_dict, i]) for i in superclass_names]
@@ -373,16 +368,10 @@ def selectPossibleOutgoing(a, b, c, **remainder):
     name_values =   yield [("RV", [i]) for i in limit_set_names]
     limit_set =     yield [("RD", [model_dict, i]) for i in name_values]
 
-    try:
-        gen = get_superclasses(a, b)
-        inp = None
-        while 1:
-            inp =  yield gen.send(inp)
-    except PrimitiveFinished as e:
-        superclasses = e.result
-        vals, = yield [("RO", [superclasses])]
-        superclasses = yield [("RE", [i]) for i in vals]
-        superclasses = [i[1] for i in superclasses]
+    superclasses, = yield [("CALL_ARGS", [get_superclasses, (a, b)])]
+    vals, = yield [("RO", [superclasses])]
+    superclasses = yield [("RE", [i]) for i in vals]
+    superclasses = [i[1] for i in superclasses]
 
     superclass_names = yield [("RV", [i]) for i in superclasses]
     elems =         yield [("RD", [model_dict, i]) for i in superclass_names]
@@ -437,13 +426,7 @@ def construct_const(**remainder):
     v, = yield [("CNV", [{"value": "constant"}])]
 
     # Get input: keep trying until we get something
-    try:
-        gen = __get_input(remainder)
-        inp = None
-        while 1:
-            inp = yield gen.send(inp)
-    except PrimitiveFinished as e:
-        inp = e.result
+    inp, = yield [("CALL_KWARGS", [jit_runtime.get_input, remainder])]
 
     yield [("CD", [v, "node", inp])]
 
@@ -469,22 +452,4 @@ def retype(a, b, c, **remainder):
         yield [("DE", [prev_edge])]
     t, =        yield [("CE", [tm, mm_ref])]
     yield [("CE", [t, m_ref])]
-    raise PrimitiveFinished(None)
-
-def __get_input(parameters):
-    mvk = parameters["mvk"]
-    user_root = parameters["user_root"]
-    while 1:
-        try:
-            gen = mvk.input_init(user_root)
-            inp = None
-            while 1:
-                inp = yield gen.send(inp)
-        except StopIteration:
-            # Finished
-            if mvk.success:
-                # Got some input, so we can access it
-                raise PrimitiveFinished(mvk.input_value)
-            else:
-                # No input, so yield None but don't stop
-                yield None
+    raise PrimitiveFinished(None)

+ 488 - 0
kernel/modelverse_kernel/compiled_legacy.py

@@ -0,0 +1,488 @@
+from modelverse_kernel.primitives import PrimitiveFinished
+
+def reverseKeyLookup(a, b, **remainder):
+    edges, = yield [("RO", [a])]
+    expanded_edges = yield [("RE", [i]) for i in edges]
+    for i, edge in enumerate(expanded_edges):
+        if b == edge[1]:
+            # Found our edge: edges[i]
+            outgoing, = yield [("RO", [edges[i]])]
+            result, = yield [("RE", [outgoing[0]])]
+            raise PrimitiveFinished(result[1])
+
+    result, = yield [("CNV", ["(unknown: %s)" % b])]
+    raise PrimitiveFinished(result)
+
+def read_attribute(a, b, c, **remainder):
+    model_dict, b_val, c_val, type_mapping = \
+                    yield [("RD", [a, "model"]),
+                           ("RV", [b]),
+                           ("RV", [c]),
+                           ("RD", [a, "type_mapping"]),
+                           ]
+    model_instance, = \
+                    yield [("RD", [model_dict, b_val])]
+    edges, =        yield [("RO", [model_instance])]
+    edge_types =    yield [("RDN", [type_mapping, i]) for i in edges]
+    type_edge_val = yield [("RE", [i]) for i in edge_types]
+
+    src_nodes = set([i[0] for i in type_edge_val])
+
+    found_edges =   yield [("RDE", [i, c_val]) for i in src_nodes]
+
+    for e1 in found_edges:
+        if e1 is not None:
+            # Found an edge!
+            for i, e2 in enumerate(edge_types):
+                if e1 == e2:
+                    # The instance of this edge is the one we want!
+                    edge = edges[i]
+                    edge_val, = yield [("RE", [edge])]
+                    result = edge_val[1]
+                    raise PrimitiveFinished(result)
+    else:
+        result, = yield [("RR", [])]
+        raise PrimitiveFinished(result)
+
+    raise Exception("Error in reading edge!")
+
+def precompute_cardinalities(a, **remainder):
+    result, =       yield [("CN", [])]
+
+    # Read out all edges from the metamodel
+    a, =             yield [("RD", [a, "metamodel"])]
+    model_dict, =    yield [("RD", [a, "model"])]
+    model_keys, =    yield [("RDK", [model_dict])]
+    type_mapping, =  yield [("RD", [a, "type_mapping"])]
+    elems  =         yield [("RDN", [model_dict, k]) for k in model_keys]
+    model_keys_str = yield [("RV", [i]) for i in model_keys]
+    elem_to_name =   dict(zip(elems, model_keys_str))
+    edges =          yield [("RE", [i]) for i in elems]
+    elems = [elems[i] for i, edge_val in enumerate(edges) if edge_val is not None]
+    # Now we have all edges in the metamodel
+
+    # Read out the type of the Association defining all cardinalities
+    metamodel, =     yield [("RD", [a, "metamodel"])]
+    metametamodel, = yield [("RD", [metamodel, "metamodel"])]
+    metametamodel_dict, = \
+                    yield [("RD", [metametamodel, "model"])]
+    assoc, =         yield [("RD", [metametamodel_dict, "Association"])]
+    slc, suc, tlc, tuc = \
+                    yield [("RDE", [assoc, "source_lower_cardinality"]),
+                           ("RDE", [assoc, "source_upper_cardinality"]),
+                           ("RDE", [assoc, "target_lower_cardinality"]),
+                           ("RDE", [assoc, "target_upper_cardinality"]),
+                          ]
+
+    # All that we now have to do is find, for each edge, whether or not it has an edge typed by any of these links!
+    # Just find all links typed by these links!
+    types =         yield [("RDN", [type_mapping, i]) for i in elems]
+
+    cardinalities = {}
+    for i, edge_type in enumerate(types):
+        if edge_type == slc:
+            t = "slc"
+        elif edge_type == suc:
+            t = "suc"
+        elif edge_type == tlc:
+            t = "tlc"
+        elif edge_type == tuc:
+            t = "tuc"
+        else:
+            continue
+        
+        # Found a link, so add it
+        srcdst, = yield [("RE", [elems[i]])]
+        source, destination = srcdst
+        # The edge gives the "source" the cardinality found in "destination"
+        cardinalities.setdefault(elem_to_name[source], {})[t] = destination
+
+    # Now we have to translate the "cardinalities" Python dictionary to a Modelverse dictionary
+    nodes = yield [("CN", []) for i in cardinalities]
+    yield [("CD", [result, i, node]) for i, node in zip(cardinalities.keys(), nodes)]
+    l = cardinalities.keys()
+    values = yield [("RD", [result, i]) for i in l]
+
+    for i, value in enumerate(values):
+        cards = cardinalities[l[i]]
+        yield [("CD", [value, card_type, cards[card_type]]) for card_type in cards]
+
+    raise PrimitiveFinished(result)
+
+def set_copy(a, **remainder):
+    b, =         yield [("CN", [])]
+    links, =     yield [("RO", [a])]
+    exp_links = yield [("RE", [i]) for i in links]
+    _ =         yield [("CE", [b, i[1]]) for i in exp_links]
+    raise PrimitiveFinished(b)
+
+def allInstances(a, b, **remainder):
+    b_val, =     yield [("RV", [b])]
+    model_dict,= yield [("RD", [a, "model"])]
+    metamodel, = yield [("RD", [a, "metamodel"])]
+    m3, =        yield [("RD", [metamodel, "metamodel"])]
+    m3_model, =  yield [("RD", [m3, "model"])]
+    mm_dict, =   yield [("RD", [metamodel, "model"])]
+    typing, =    yield [("RD", [a, "type_mapping"])]
+    elem_keys, = yield [("RDK", [model_dict])]
+    elems =     yield [("RDN", [model_dict, i]) for i in elem_keys]
+    mms =       yield [("RDN", [typing, i]) for i in elems]
+
+    # Have the type for each name
+    types_to_name_nodes = {}
+    for key, mm in zip(elem_keys, mms):
+        types_to_name_nodes.setdefault(mm, set()).add(key)
+    # And now we have the inverse mapping: for each type, we have the node containing the name
+
+    # Get the inheritance link type
+    inheritance_type, =  yield [("RD", [m3_model, "Inheritance"])]
+
+    # Now we figure out which types are valid for the specified model
+    desired_types = set()
+    mm_element, =    yield [("RD", [mm_dict, b_val])]
+
+    work_list = []
+    work_list.append(mm_element)
+    mm_typing, =     yield [("RD", [metamodel, "type_mapping"])]
+
+    while work_list:
+        mm_element = work_list.pop()
+        if mm_element in desired_types:
+            # Already been here, so stop
+            continue
+
+        # New element, so continue
+        desired_types.add(mm_element)
+
+        # Follow all inheritance links that COME IN this node, as all these are subtypes and should also match
+        incoming, =  yield [("RI", [mm_element])]
+        for i in incoming:
+            t, =     yield [("RDN", [mm_typing, i])]
+            if t == inheritance_type:
+                e, = yield [("RE", [i])]
+                # Add the source of the inheritance link to the work list
+                work_list.append(e[0])
+
+    # Now desired_types holds all the direct types that we are interested in!
+    # Construct the result out of all models that are direct instances of our specified type
+    final = set()
+    for t in desired_types:
+        final |= types_to_name_nodes.get(t, set())
+
+    # Result is a Python set with nodes, so just make this a Mv set
+    result, =    yield [("CN", [])]
+    v =         yield [("RV", [i]) for i in final]
+    _ =    yield [("CE", [result, i]) for i in final]
+    raise PrimitiveFinished(result)
+
+def add_AL(a, b, **remainder):
+    worklist = [(b, "funcdef")]
+    added = set()
+    type_cache = {}
+
+    model_dict, = yield [("RD", [a, "model"])]
+    metamodel, = yield [("RD", [a, "metamodel"])]
+    metamodel_dict, = yield [("RD", [metamodel, "model"])]
+    type_map, = yield [("RD", [a, "type_mapping"])]
+    outgoing, = yield [("RO", [model_dict])]
+    edges = yield [("RE", [i]) for i in outgoing]
+    added |= set([i[1] for i in edges])
+
+    result, = yield [("CNV", ["__%s" % b])]
+
+    # All the action language elements and their expected output links
+    type_links = {
+            "if":       [("cond", ""), ("then", ""), ("else", ""), ("next", "")],
+            "while":    [("cond", ""), ("body", ""), ("next", "")],
+            "assign":   [("var", ""), ("value", ""), ("next", "")],
+            "break":    [("while", "while")],
+            "continue": [("while", "while")],
+            "return":   [("value", "")],
+            "resolve":  [("var", "")],
+            "access":   [("var", "")],
+            "constant": [("node", "")],
+            "output":   [("node", ""), ("next", "")],
+            "global":   [("var", "String"), ("next", "")],
+            "param":    [("name", "String"), ("value", ""), ("next_param", "param")],
+            "funcdef":  [("body", ""), ("next", "")],
+            "call":     [("func", ""), ("params", "param"), ("last_param", "param"), ("next", "")],
+        }
+
+    # Already add some often used types to the type cache, so we don't have to check for their presence
+    to_str, string = yield [("RD", [metamodel_dict, "to_str"]),
+                            ("RD", [metamodel_dict, "String"])]
+
+    type_cache = {"to_str": to_str,
+                  "String": string}
+
+    while worklist:
+        # Fetch the element and see if we need to add it
+        worknode, expected_type = worklist.pop(0)
+        if worknode in added:
+            continue
+
+        # Determine type of element
+        if expected_type == "":
+            value, = yield [("RV", [worknode])]
+            if (isinstance(value, dict)) and ("value" in value):
+                v = value["value"]
+                if v in ["if", "while", "assign", "call", "break", "continue", "return", "resolve", "access", "constant", "global", "declare"]:
+                    expected_type = v
+                else:
+                    expected_type = "Any"
+            else:
+                expected_type = "Any"
+
+        # Fill the cache
+        if expected_type not in type_cache:
+            type_cache[expected_type], = yield [("RD", [metamodel_dict, expected_type])]
+
+        # Need to add it now
+        yield [("CD", [model_dict, "__%s" % worknode, worknode])]
+        added.add(worknode)
+        # NOTE can't just use CD here, as the key is a node and not a value
+        t1, = yield [("CE", [type_map, type_cache[expected_type]])]
+        t2, = yield [("CE", [t1, worknode])]
+        if t1 is None or t2 is None:
+            raise Exception("ERROR")
+
+        # Now add all its outgoing links, depending on the type we actually saw
+        links = type_links.get(expected_type, [])
+        for link in links:
+            link_name, destination_type = link
+
+            # Check if the link actually exists
+            destination, = yield [("RD", [worknode, link_name])]
+            if destination is not None:
+                # If so, we add it and continue
+                edge, = yield [("RDE", [worknode, link_name])]
+                edge_outlinks, = yield [("RO", [edge])]
+                edge_outlink = edge_outlinks[0]
+                edge_name, = yield [("RE", [edge_outlink])]
+                edge_name = edge_name[1]
+                # Now add: edge, edge_outlink, edge_name
+
+                # Add 'edge'
+                yield [("CD", [model_dict, "__%s" % edge, edge])]
+                added.add(edge)
+                link_type = "%s_%s" % (expected_type, link_name)
+                if link_type not in type_cache:
+                    type_cache[link_type], = yield [("RD", [metamodel_dict, link_type])]
+                t, = yield [("CE", [type_map, type_cache[link_type]])]
+                yield [("CE", [t, edge])]
+
+                # Add 'edge_outlink'
+                yield [("CD", [model_dict, "__%s" % edge_outlink, edge_outlink])]
+                added.add(edge_outlink)
+                t, = yield [("CE", [type_map, type_cache["to_str"]])]
+                yield [("CE", [t, edge_outlink])]
+
+                # Add 'edge_name' (if not present)
+                if edge_name not in added:
+                    yield [("CD", [model_dict, "__%s" % edge_name, edge_name])]
+                    t, = yield [("CE", [type_map, type_cache["String"]])]
+                    yield [("CE", [t, edge_name])]
+                    added.add(edge_name)
+
+                # Add the destination to the worklist
+                worklist.append((destination, destination_type))
+
+    raise PrimitiveFinished(result)
+
+def get_superclasses(a, b, **remainder):
+    mm, =            yield [("RD", [a, "metamodel"])]
+    mm, =            yield [("RD", [mm, "metamodel"])]
+    m, =             yield [("RD", [mm, "model"])]
+    inheritance, =   yield [("RD", [m, "Inheritance"])]
+    model_dict, =    yield [("RD", [a, "model"])]
+    b_v, =           yield [("RV", [b])]
+    subclass, =      yield [("RD", [model_dict, b_v])]
+    type_mapping, =  yield [("RD", [a, "type_mapping"])]
+    names, =         yield [("RDK", [model_dict])]
+    elems =         yield [("RDN", [model_dict, i]) for i in names]
+    elem_to_name =  dict(zip(elems, names))
+
+    result, =        yield [("CN", [])]
+    worklist = [subclass]
+
+    touched = set()
+
+    while worklist:
+        subclass = worklist.pop()
+        res = elem_to_name[subclass]
+
+        if subclass not in touched:
+            touched.add(subclass)
+            yield [("CE", [result, res])]
+
+            outgoing, =      yield [("RO", [subclass])]
+            types =         yield [("RDN", [type_mapping, i]) for i in outgoing]
+
+            for i, t in enumerate(types):
+                if t == inheritance:
+                    # Found an inheritance link!
+                    elem = outgoing[i]
+                    srcdst, = yield [("RE", [elem])]
+                    src, dst = srcdst
+                    # Find elem in elems
+                    worklist.append(dst)
+
+    raise PrimitiveFinished(result)
+
+def selectPossibleIncoming(a, b, c, **remainder):
+    model_dict, =    yield [("RD", [a, "model"])]
+    limit_set_links, = \
+                    yield [("RO", [c])]
+    limit_set =     yield [("RE", [i]) for i in limit_set_links]
+    limit_set_names = [i[1] for i in limit_set]
+    name_values =   yield [("RV", [i]) for i in limit_set_names]
+    limit_set =     yield [("RD", [model_dict, i]) for i in name_values]
+
+    try:
+        gen = get_superclasses(a, b)
+        inp = None
+        while 1:
+            inp =   yield gen.send(inp)
+    except PrimitiveFinished as e:
+        superclasses = e.result
+        vals, = yield [("RO", [superclasses])]
+        superclasses = yield [("RE", [i]) for i in vals]
+        superclasses = [i[1] for i in superclasses]
+
+    superclass_names = yield [("RV", [i]) for i in superclasses]
+    elems =         yield [("RD", [model_dict, i]) for i in superclass_names]
+
+    result, =        yield [("CN", [])]
+    for i, edge in enumerate(limit_set):
+        srcdst, =  yield [("RE", [edge])]
+        src, dst = srcdst
+        if dst in elems:
+            yield [("CE", [result, limit_set_names[i]])]
+
+    raise PrimitiveFinished(result)
+
+def selectPossibleOutgoing(a, b, c, **remainder):
+    model_dict, =    yield [("RD", [a, "model"])]
+    limit_set_links, = \
+                    yield [("RO", [c])]
+    limit_set =     yield [("RE", [i]) for i in limit_set_links]
+    limit_set_names = \
+                    [i[1] for i in limit_set]
+    name_values =   yield [("RV", [i]) for i in limit_set_names]
+    limit_set =     yield [("RD", [model_dict, i]) for i in name_values]
+
+    try:
+        gen = get_superclasses(a, b)
+        inp = None
+        while 1:
+            inp =  yield gen.send(inp)
+    except PrimitiveFinished as e:
+        superclasses = e.result
+        vals, = yield [("RO", [superclasses])]
+        superclasses = yield [("RE", [i]) for i in vals]
+        superclasses = [i[1] for i in superclasses]
+
+    superclass_names = yield [("RV", [i]) for i in superclasses]
+    elems =         yield [("RD", [model_dict, i]) for i in superclass_names]
+
+    result, =        yield [("CN", [])]
+    for i, edge in enumerate(limit_set):
+        srcdst, =  yield [("RE", [edge])]
+        src, dst = srcdst
+        if src in elems:
+            yield [("CE", [result, limit_set_names[i]])]
+
+    raise PrimitiveFinished(result)
+
+def check_symbols(a, b, c, **remainder):
+    symbols = {}
+    function_name, = yield [("RV", [b])]
+    symbols[function_name] = False
+    object_links, = yield [("RO", [c])]
+    set_elements = yield [("RE", [i]) for i in object_links]
+    set_elements = [i[1] for i in set_elements]
+    set_values = yield [("RV", [i]) for i in set_elements]
+    set_elements = yield [("RD", [a, i]) for i in set_values]
+    symbols_set = yield [("RD", [i, "symbols"]) for i in set_elements]
+    all_keys = yield [("RDK", [i]) for i in symbols_set]
+    for i, s in zip(all_keys, symbols_set):
+        # For each object we have found
+        keys = yield [("RV", [j]) for j in i]
+        values = yield [("RD", [s, j]) for j in keys]
+        values = yield [("RV", [j]) for j in values]
+        for key, value in zip(keys, values):
+            k = key
+            v = value
+            if v and symbols.get(k, False):
+                result = yield [("CNV", ["ERROR: multiple definition of symbol " + str(key)])]
+                raise PrimitiveFinished(result)
+            elif v and not symbols.get(k, False):
+                symbols[k] = True
+            elif not v and k not in symbols:
+                symbols[k] = False
+
+    for i, j in symbols.items():
+        if i == "input" or i == "output":
+            continue
+        if not j:
+            result, = yield [("CNV", ["ERROR: undefined symbol " + str(i)])]
+            raise PrimitiveFinished(result)
+
+    result, = yield [("CNV", ["OK"])]
+    raise PrimitiveFinished(result)
+
+def construct_const(**remainder):
+    v, = yield [("CNV", [{"value": "constant"}])]
+
+    # Get input: keep trying until we get something
+    try:
+        gen = __get_input(remainder)
+        inp = None
+        while 1:
+            inp = yield gen.send(inp)
+    except PrimitiveFinished as e:
+        inp = e.result
+
+    yield [("CD", [v, "node", inp])]
+
+    raise PrimitiveFinished(v)
+
+def instantiated_name(a, b, **remainder):
+    name_value, = yield [("RV", [b])]
+    if name_value == "":
+        b, = yield [("CNV", ["__" + str(a)])]
+    raise PrimitiveFinished(b)
+
+def retype(a, b, c, **remainder):
+    tm, =       yield [("RD", [a, "type_mapping"])]
+    m, =        yield [("RD", [a, "model"])]
+    mm, =       yield [("RD", [a, "metamodel"])]
+    mm_dict, =  yield [("RD", [mm, "model"])]
+    c_val, =    yield [("RV", [c])]
+    mm_ref, =   yield [("RD", [mm_dict, c_val])]
+    b_val, =    yield [("RV", [b])]
+    m_ref, =    yield [("RD", [m, b_val])]
+    prev_edge, = yield [("RDNE", [tm, m_ref])]
+    if prev_edge is not None:
+        yield [("DE", [prev_edge])]
+    t, =        yield [("CE", [tm, mm_ref])]
+    yield [("CE", [t, m_ref])]
+    raise PrimitiveFinished(None)
+
+def __get_input(parameters):
+    mvk = parameters["mvk"]
+    user_root = parameters["user_root"]
+    while 1:
+        try:
+            gen = mvk.input_init(user_root)
+            inp = None
+            while 1:
+                inp = yield gen.send(inp)
+        except StopIteration:
+            # Finished
+            if mvk.success:
+                # Got some input, so we can access it
+                raise PrimitiveFinished(mvk.input_value)
+            else:
+                # No input, so yield None but don't stop
+                yield None

+ 996 - 0
kernel/modelverse_kernel/legacy.py

@@ -0,0 +1,996 @@
+"""A legacy version of the Modelverse kernel. This kernel can be used as a baseline for the
+performance of newer kernels."""
+
+import modelverse_kernel.primitives as primitive_functions
+import modelverse_kernel.compiled_legacy as compiled_functions
+from collections import defaultdict
+import sys
+import time
+
+if sys.version > '3': # pragma: no cover
+    string_types = (str,)
+else:
+    string_types = (str, unicode)
+
+class ModelverseKernel(object):
+    def __init__(self, root):
+        self.root = root
+        self.primitives = {}
+        self.compiled = {}
+        self.returnvalue = None
+        self.success = True
+        self.generators = {}
+        self.allow_compiled = True
+        #self.allow_compiled = False
+        self.debug_info = defaultdict(list)
+
+    def execute_yields(self, username, operation, params, reply):
+        try:
+            self.success = True
+            self.username = username
+            if username not in self.generators:
+                self.generators[username] = {}
+            if operation not in self.generators[username]:
+                # Create the generator for the function to execute
+                self.generators[username][operation] = getattr(self, operation)(username, *params)
+
+            if reply is not None:
+                return self.generators[username][operation].send(reply)
+            else:
+                return self.generators[username][operation].next()
+        except StopIteration:
+            # Done, so remove the generator
+            del self.generators[username][operation]
+            return None
+        except:
+            print("Unknown error @ %s" % self.debug_info[username])
+            raise
+
+    def execute_rule(self, username):
+        user_root, =    yield [("RD", [self.root, username])]
+        if user_root is None:
+            self.success = False
+            self.returnvalue = None
+            yield None
+        else:
+            user_frame, =   yield [("RD", [user_root, "frame"])]
+            self.inst, phase =   yield [("RD", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "phase"]),
+                                  ]
+            self.new_debug, self.phase_v, inst_v = \
+                            yield [("RD", [self.inst, "__debug"]),
+                                   ("RV", [phase]),
+                                   ("RV", [self.inst]),
+                                  ]
+            if self.new_debug is not None:
+                if self.debug_info[username]:
+                    self.debug_info[username][-1], = yield [("RV", [self.new_debug])]
+
+            if self.phase_v == "finish":
+                gen = self.helper_init(user_root)
+            elif self.inst is None:
+                raise Exception("Instruction pointer could not be found!")
+            elif isinstance(self.phase_v, string_types):
+                if self.phase_v == "init" and self.inst in self.compiled:
+                    #print("%-30s(%s)" % ("COMPILED " + str(self.compiled[self.inst]), self.phase_v))
+                    gen = self.execute_primitive(user_root, self.inst, username)
+                elif inst_v is None:
+                    raise Exception("%s: error understanding command (%s, %s)" % (self.debug_info[username], inst_v, self.phase_v))
+                else:
+                    #print("%-30s(%s) -- %s" % (inst_v["value"], self.phase_v, username))
+                    gen = getattr(self, "%s_%s" % (inst_v["value"], self.phase_v))(user_root)
+            elif inst_v is None:
+                raise Exception("%s: error understanding command (%s, %s)" % (self.debug_info[username], inst_v, self.phase_v))
+            elif inst_v["value"] == "call":
+                #print("%-30s(%s)" % ("call", "param"))
+                gen = self.call_param(user_root)
+            else:
+                raise Exception("%s: error understanding command (%s, %s)" % (self.debug_info[username], inst_v, self.phase_v))
+
+            try:
+                inp = None
+                while 1:
+                    inp = yield gen.send(inp)
+            except StopIteration:
+                pass
+
+    ##########################
+    ### Process primitives ###
+    ##########################
+    def load_primitives(self, username):
+        hierarchy, =     yield [("RD", [self.root, "__hierarchy"])]
+        primitives, =    yield [("RD", [hierarchy, "primitives"])]
+        keys, =          yield [("RDK", [primitives])]
+        function_names = yield [("RV", [f]) for f in keys]
+        signatures  =    yield [("RDN", [primitives, f]) for f in keys]
+        bodies =         yield [("RD", [f, "body"]) for f in signatures]
+        for i in range(len(keys)):
+            self.primitives[bodies[i]] = getattr(primitive_functions, function_names[i])
+        self.compiled.update(self.primitives)
+
+    def execute_primitive(self, user_root, inst, username):
+        # execute_primitive
+        user_frame, =    yield [("RD", [user_root, "frame"])]
+        symbols, =       yield [("RD", [user_frame, "symbols"])]
+        all_links, =     yield [("RO", [symbols])]
+        containers =    yield [("RE", [v]) for v in all_links]
+        outgoings =     yield [("RO", [v]) for v in all_links]
+        dict_values =   yield [("RD", [v[1], "value"]) for v in containers]
+        formals_1 =     yield [("RE", [v[0]]) for v in outgoings]
+        dict_keys_ref = yield [("RD", [v[1], "name"]) for v in formals_1]
+        dict_keys =     yield [("RV", [v]) for v in dict_keys_ref]
+        parameters = dict(zip(dict_keys, dict_values))
+
+        parameters["root"] = self.root
+        parameters["user_root"] = user_root
+        parameters["username"] = username
+        parameters["mvk"] = self
+
+        # prim is a generator itself!
+        try:
+            # Forward the message we get to this generator
+            # Sometimes it might not even be a generator, in which case this should already be in the except block (i.e., for the Read Root operation)
+            prim = self.compiled[inst](**parameters)
+            inp = None
+            while 1:
+                inp = yield prim.send(inp)
+        except StopIteration:
+            # Execution has ended without return value, so we have no idea what to do
+            raise Exception("%s: primitive finished without returning a value!" % (self.debug_info[username]))
+        except primitive_functions.PrimitiveFinished as e:
+            # Execution has ended with a returnvalue, so read it out from the exception being thrown
+            result = e.result
+
+            #if result is None:
+            #    raise Exception("Primitive raised exception: value of None for operation %s with parameters %s" % (self.compiled[inst], str(parameters)))
+
+        # Clean up the current stack, as if a return happened
+        old_frame, =    yield [("RD", [user_frame, "prev"])]
+        lnk, =          yield [("RDE", [old_frame, "returnvalue"])]
+        _, _, _, _ =    yield [("CD", [old_frame, "returnvalue", result]),
+                               ("CD", [user_root, "frame", old_frame]),
+                               ("DE", [lnk]),
+                               ("DN", [user_frame]),
+                              ]
+        if self.debug_info[self.username]:
+            self.debug_info[self.username].pop()
+
+    ########################################
+    ### Execute input and output methods ###
+    ########################################
+    def get_output(self, username):
+        user_root, =        yield [("RD", [self.root, username])]
+        first_output, =     yield [("RD", [user_root, "output"])]
+        next_output, rv =   yield [("RD", [first_output, "next"]),
+                                   ("RD", [first_output, "value"]),
+                                  ]
+        if next_output is None:
+            self.success = False
+            self.returnvalue = None
+        else:
+            rv_value, =     yield [("RV", [rv])]
+            _, _ =          yield [("CD", [user_root, "output", next_output]),
+                                   ("DN", [first_output]),
+                                  ]
+            self.returnvalue = rv_value
+
+    def set_input(self, username, value):
+        user_root, =        yield [("RD", [self.root, username])]
+        old_input, link =   yield [("RD", [user_root, "last_input"]),
+                                   ("RDE", [user_root, "last_input"]),
+                                  ]
+        new_input, =        yield [("CN", [])]
+        _, _ =              yield [("CD", [user_root, "last_input", new_input]),
+                                   ("CD", [old_input, "next", new_input]),
+                                  ]
+
+        new_value, =        yield [("CNV", [value])]
+        _, _ =              yield [("CD", [old_input, "value", new_value]),
+                                   ("DE", [link])
+                                  ]
+        self.returnvalue = {"id": 100, "value": "success"}
+
+    #############################################
+    ### Transformation rules for instructions ###
+    #############################################
+    def break_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        phase_link, ip_link = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_frame, "IP"])
+                                  ]
+        inst, =             yield [("RD", [user_frame, "IP"])]
+        while_inst, new_phase = \
+                            yield [("RD", [inst, "while"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+        _, _, _, _ =        yield [("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [user_frame, "IP", while_inst]),
+                                   ("DE", [phase_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def continue_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        ip_link, inst =     yield [("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        while_inst, =       yield [("RD", [inst, "while"])]
+        _, _ =              yield [("CD", [user_frame, "IP", while_inst]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def if_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        evalstack, evalstack_link = \
+                            yield [("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                  ]
+        inst, ip_link =     yield [("RD", [user_frame, "IP"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                  ]
+        cond, =             yield [("RD", [inst, "cond"])]
+        new_evalstack, new_phase = \
+                            yield [("CN", []),
+                                   ("CNV", ["cond"]),
+                                  ]
+        _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [user_frame, "IP", cond]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", new_phase]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def if_cond(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        returnvalue, inst = yield [("RD", [user_frame, "returnvalue"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        returnvalue_v, =    yield [("RV", [returnvalue])]
+        _else, =            yield [("RD", [inst, "else"])]
+
+        if returnvalue_v:
+            phase_link, evalstack, evalstack_link, ip_link, _then, new_evalstack, evalstack_phase, new_phase = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [inst, "then"]),
+                                   ("CN", []),
+                                   ("CNV", ["finish"]),
+                                   ("CNV", ["init"]),
+                                  ]
+            _, _, _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [user_frame, "IP", _then]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", evalstack_phase]),
+                                   ("CD", [user_frame, "phase", new_phase]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+        elif _else is None:
+            phase_link, new_phase = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+            _, _ =          yield [("CD", [user_frame, "phase", new_phase]),
+                                   ("DE", [phase_link]),
+                                  ]
+        else:
+            phase_link, evalstack, evalstack_link, ip_link = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                  ]
+            new_evalstack, new_phase, evalstack_phase = \
+                            yield [("CN", []),
+                                   ("CNV", ["init"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+            _, _, _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [user_frame, "IP", _else]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", evalstack_phase]),
+                                   ("CD", [user_frame, "phase", new_phase]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+
+    def while_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        evalstack, evalstack_link, ip_link, inst = \
+                            yield [("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        cond, new_evalstack, new_phase = \
+                            yield [("RD", [inst, "cond"]),
+                                   ("CN", []),
+                                   ("CNV", ["cond"]),
+                                  ]
+        _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [user_frame, "IP", cond]),
+                                   ("CD", [evalstack, "phase", new_phase]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def while_cond(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        returnvalue, =      yield [("RD", [user_frame, "returnvalue"])]
+        returnvalue_v, =    yield [("RV", [returnvalue])]
+
+        if returnvalue_v:
+            phase_link, evalstack, evalstack_link, ip_link, inst = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+            body, =         yield [("RD", [inst, "body"])]
+            new_evalstack, new_phase, evalstack_phase = \
+                            yield [("CN", []),
+                                   ("CNV", ["init"]),
+                                   ("CNV", ["init"]),
+                                  ]
+            _, _, _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "IP", body]),
+                                   ("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", evalstack_phase]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+        else:
+            phase_link, new_phase = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+            _, _ =          yield [("CD", [user_frame, "phase", new_phase]),
+                                   ("DE", [phase_link])
+                                  ]
+
+    def access_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        evalstack, evalstack_link, inst, ip_link = \
+                            yield [("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RD", [user_frame, "IP"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                  ]
+        var, new_evalstack, new_phase = \
+                            yield [("RD", [inst, "var"]),
+                                   ("CN", []),
+                                   ("CNV", ["eval"]),
+                                  ]
+        _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "IP", var]),
+                                   ("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", new_phase]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def access_eval(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        phase_link, returnvalue_link, returnvalue = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_frame, "returnvalue"]),
+                                   ("RD", [user_frame, "returnvalue"]),
+                                  ]
+        value, new_phase =  yield [("RD", [returnvalue, "value"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+        _, _, _, _ =        yield [("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [user_frame, "returnvalue", value]),
+                                   ("DE", [phase_link]),
+                                   ("DE", [returnvalue_link]),
+                                  ]
+
+    def resolve_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        symbols, evalstack, evalstack_link, ip_link, inst = \
+                            yield [("RD", [user_frame, "symbols"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        var, =              yield [("RD", [inst, "var"])]
+        variable, =         yield [("RDN", [symbols, var])]
+
+        if variable is None:
+            phase_link, returnvalue_link, _globals, var_name = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_frame, "returnvalue"]),
+                                   ("RD", [user_root, "globals"]),
+                                   ("RV", [var]),
+                                  ]
+            variable, new_phase = \
+                            yield [("RD", [_globals, var_name]),
+                                   ("CNV", ["finish"]),
+                                  ]
+            if variable is None:
+                raise Exception("Not found as global: %s" % var_name)
+
+            # Resolved a global, so this is a string
+            # Potentially, this might even be a function that we have precompiled already!
+            # So check whether this is the case or not
+            if self.allow_compiled:
+                compiled_function = getattr(compiled_functions, var_name, None)
+                if compiled_function is not None:
+                    # We have a compiled function ready!
+                    # Now we have to bind the ID to the compiled functions
+                    # For this, we read out the body of the resolved data
+                    compiler_val, =  yield [("RD", [variable, "value"])]
+                    compiler_body, = yield [("RD", [compiler_val, "body"])]
+                    self.compiled[compiler_body] = compiled_function
+
+        else:
+            phase_link, returnvalue_link, new_phase = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_frame, "returnvalue"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+        _, _, _, _ =        yield [("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [user_frame, "returnvalue", variable]),
+                                   ("DE", [phase_link]),
+                                   ("DE", [returnvalue_link]),
+                                  ]
+
+    def assign_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        evalstack, evalstack_link, ip_link, inst = \
+                            yield [("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        var, new_evalstack, new_phase = \
+                            yield [("RD", [inst, "var"]),
+                                   ("CN", []),
+                                   ("CNV", ["value"]),
+                                  ]
+        _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "IP", var]),
+                                   ("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", new_phase]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def assign_value(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        phase_link, evalstack, returnvalue, evalstack_link, ip_link, inst = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RD", [user_frame, "returnvalue"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        value, new_evalstack, new_phase, evalstack_phase = \
+                            yield [("RD", [inst, "value"]),
+                                   ("CN", []),
+                                   ("CNV", ["init"]),
+                                   ("CNV", ["assign"]),
+                                  ]
+
+        _, _, _, _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "variable", returnvalue]),
+                                   ("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", evalstack_phase]),
+                                   ("CD", [user_frame, "IP", value]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [phase_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def assign_assign(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        phase_link, returnvalue, variable_link, variable = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RD", [user_frame, "returnvalue"]),
+                                   ("RDE", [user_frame, "variable"]),
+                                   ("RD", [user_frame, "variable"]),
+                                  ]
+        value_link, new_phase = \
+                            yield [("RDE", [variable, "value"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+        _, _, _, _, _ =     yield [("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [variable, "value", returnvalue]),
+                                   ("DE", [variable_link]),
+                                   ("DE", [value_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+                    
+    def return_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        inst, =             yield [("RD", [user_frame, "IP"])]
+        value, =            yield [("RD", [inst, "value"])]
+
+        if value is None:
+            prev_frame, =   yield [("RD", [user_frame, "prev"])]
+            if prev_frame is None:
+                _, =            yield [("DN", [user_root])]
+                del self.debug_info[self.username]
+            else:
+                if self.debug_info[self.username]:
+                    self.debug_info[self.username].pop()
+                _, _ =          yield [("CD", [user_root, "frame", prev_frame]),
+                                       ("DN", [user_frame]),
+                                      ]
+        else:
+            evalstack, evalstack_link, ip_link, new_evalstack, evalstack_phase = \
+                            yield [("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("CN", []),
+                                   ("CNV", ["eval"]),
+                                  ]
+            _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", evalstack_phase]),
+                                   ("CD", [user_frame, "IP", value]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def return_eval(self, user_root):
+        if self.debug_info[self.username]:
+            self.debug_info[self.username].pop()
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        prev_frame, =       yield [("RD", [user_frame, "prev"])]
+        returnvalue, old_returnvalue_link = \
+                            yield [("RD", [user_frame, "returnvalue"]),
+                                   ("RDE", [prev_frame, "returnvalue"]),
+                                  ]
+        _, _, _, _ =        yield [("CD", [user_root, "frame", prev_frame]),
+                                   ("CD", [prev_frame, "returnvalue", returnvalue]),
+                                   ("DE", [old_returnvalue_link]),
+                                   ("DN", [user_frame]),
+                                  ]
+
+    def constant_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        phase_link, returnvalue_link, inst = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_frame, "returnvalue"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        node, new_phase =   yield [("RD", [inst, "node"]),
+                                   ("CNV", ["finish"]),
+                                  ]
+        _, _, _, _ =        yield [("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [user_frame, "returnvalue", node]),
+                                   ("DE", [returnvalue_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+
+    def helper_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        inst, =             yield [("RD", [user_frame, "IP"])]
+        next, =             yield [("RD", [inst, "next"])]
+
+        if next is None:
+            ip_link, phase_link, evalstack_top = \
+                            yield [("RDE", [user_frame, "IP"]),
+                                   ("RDE", [user_frame, "phase"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                  ]
+            evalstack, =    yield [("RD", [evalstack_top, "prev"])]
+            evalstack_inst, evalstack_phase, evalstack_inst_link, evalstack_phase_link = \
+                            yield [("RD", [evalstack, "inst"]),
+                                   ("RD", [evalstack, "phase"]),
+                                   ("RDE", [evalstack, "inst"]),
+                                   ("RDE", [evalstack, "phase"]),
+                                  ]
+            _, _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", evalstack]),
+                                   ("CD", [user_frame, "IP", evalstack_inst]),
+                                   ("CD", [user_frame, "phase", evalstack_phase]),
+                                   ("DE", [ip_link]),
+                                   ("DE", [phase_link]),
+                                   ("DE", [evalstack_inst_link]),
+                                   ("DE", [evalstack_phase_link]),
+                                   ("DN", [evalstack_top]),
+                                  ]
+        else:
+            ip_link, phase_link, new_phase = \
+                            yield [("RDE", [user_frame, "IP"]),
+                                   ("RDE", [user_frame, "phase"]),
+                                   ("CNV", ["init"]),
+                                  ]
+            _, _, _, _ =    yield [("CD", [user_frame, "IP", next]),
+                                   ("CD", [user_frame, "phase", new_phase]),
+                                   ("DE", [ip_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+
+    def call_init(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        symbols, evalstack, evalstack_link, ip_link, inst = \
+                            yield [("RD", [user_frame, "symbols"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "IP"]),
+                                  ]
+        func, params =      yield [("RD", [inst, "func"]),
+                                   ("RD", [inst, "params"]),
+                                  ]
+        
+        if params is None:
+            new_evalstack, evalstack_phase = \
+                            yield [("CN", []),
+                                   ("CNV", ["call"]),
+                                  ]
+            _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", evalstack_phase]),
+                                   ("CD", [user_frame, "IP", func]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+        else:
+            new_evalstack,= yield [("CN", [])]
+            _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [evalstack, "phase", params]),
+                                   ("CD", [user_frame, "IP", func]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                  ]
+
+    def call_call(self, user_root):
+        self.debug_info[self.username].append("None")
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        inst, =             yield [("RD", [user_frame, "IP"])]
+        param, =            yield [("RD", [inst, "last_param"])]
+
+        if param is None:
+            returnvalue, =  yield [("RD", [user_frame, "returnvalue"])]
+            body, phase_link, frame_link, prev_phase, new_phase, new_frame, new_evalstack, new_symbols, new_returnvalue = \
+                            yield [("RD", [returnvalue, "body"]),
+                                   ("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_root, "frame"]),
+                                   ("CNV", ["finish"]),
+                                   ("CNV", ["init"]),
+                                   ("CN", []),
+                                   ("CN", []),
+                                   ("CN", []),
+                                   ("CN", []),
+                                  ]
+            _, _, _, _, _, _, _, _, _, _, _ = \
+                            yield [("CD", [user_root, "frame", new_frame]),
+                                   ("CD", [new_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_frame, "symbols", new_symbols]),
+                                   ("CD", [new_frame, "returnvalue", new_returnvalue]),
+                                   ("CD", [new_frame, "caller", inst]),
+                                   ("CD", [new_frame, "phase", new_phase]),
+                                   ("CD", [new_frame, "IP", body]),
+                                   ("CD", [new_frame, "prev", user_frame]),
+                                   ("CD", [user_frame, "phase", prev_phase]),
+                                   ("DE", [phase_link]),
+                                   ("DE", [frame_link]),
+                                  ]
+        else:
+            newer_frames, invoking_frames = \
+                            yield [("RRD", [user_frame, "prev"]),
+                                   ("RRD", [inst, "caller"]),
+                                  ]
+            new_frame = self.find_overlapping(newer_frames, invoking_frames)
+            phase_link, frame_link, new_symbols, new_IP = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_root, "frame"]),
+                                   ("RD", [new_frame, "symbols"]),
+                                   ("RD", [new_frame, "IP"]),
+                                  ]
+            signature, =    yield [("RRD", [new_IP, "body"])]
+            signature = signature[0]
+            sig_params, last_param = \
+                            yield [("RD", [signature, "params"]),
+                                   ("RD", [inst, "last_param"]),
+                                  ]
+            name, =         yield [("RD", [last_param, "name"])]
+            name_value, =   yield [("RV", [name])]
+            returnvalue, formal_parameter, new_phase, variable = \
+                            yield [("RD", [user_frame, "returnvalue"]),
+                                   ("RD", [sig_params, name_value]),
+                                   ("CNV", ["finish"]),
+                                   ("CN", []),
+                                  ]
+            _, _, _, t1 =   yield [("CD", [user_root, "frame", new_frame]),
+                                   ("CD", [user_frame, "phase", new_phase]),
+                                   ("CD", [variable, "value", returnvalue]),
+                                   ("CE", [new_symbols, variable]),
+                                  ]
+            _, _, _ =       yield [("CE", [t1, formal_parameter]),
+                                   ("DE", [frame_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+
+    def find_overlapping(self, a, b):
+        newer_frames = set(a)
+        invoking_frames = set(b)
+        matches = list(newer_frames.intersection(invoking_frames))
+        if len(matches) == 1:
+            return matches[0]
+        elif len(matches) > 1:
+            raise Exception("Error: multiple overlapping elements")
+        else:
+            raise Exception("Error: could not find any overlap")
+
+    def call_param(self, user_root):
+        user_frame, =       yield [("RD", [user_root, "frame"])]
+        inst, phase =       yield [("RD", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "phase"]),
+                                  ]
+        params, last_param = \
+                            yield [("RD", [inst, "params"]),
+                                   ("RD", [inst, "last_param"]),
+                                  ]
+        next_param, =       yield [("RD", [params, "next_param"])]
+
+        if params == phase:
+            phase_link, ip_link, returnvalue, param_value, evalstack, evalstack_link = \
+                            yield [("RDE", [user_frame, "phase"]),
+                                   ("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "returnvalue"]),
+                                   ("RD", [params, "value"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                  ]
+            body, =         yield [("RD", [returnvalue, "body"])]
+            new_frame, prev_evalstack, new_phase, prev_phase, new_evalstack, new_symbols, new_returnvalue = \
+                            yield [("CN", []),
+                                   ("CN", []),
+                                   ("CNV", ["init"]),
+                                   ("CNV", ["init"]),
+                                   ("CN", []),
+                                   ("CN", []),
+                                   ("CN", []),
+                                  ]
+            _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ = \
+                            yield [("CD", [new_frame, "evalstack", new_evalstack]),
+                                   ("CD", [new_frame, "symbols", new_symbols]),
+                                   ("CD", [new_frame, "returnvalue", new_returnvalue]),
+                                   ("CD", [new_frame, "caller", inst]),
+                                   ("CD", [new_frame, "phase", new_phase]),
+                                   ("CD", [new_frame, "IP", body]),
+                                   ("CD", [new_frame, "prev", user_frame]),
+                                   ("CD", [user_frame, "phase", prev_phase]),
+                                   ("CD", [user_frame, "IP", param_value]),
+                                   ("CD", [prev_evalstack, "prev", evalstack]),
+                                   ("CD", [evalstack, "inst", inst]),
+                                   ("CD", [user_frame, "evalstack", prev_evalstack]),
+                                   ("DE", [evalstack_link]),
+                                   ("DE", [ip_link]),
+                                   ("DE", [phase_link]),
+                                  ]
+            if next_param is not None:
+                _ =         yield [("CD", [evalstack, "phase", next_param])]
+            else:
+                evalstack_phase, = \
+                            yield [("CNV", ["call"])]
+                _ =         yield [("CD", [evalstack, "phase", evalstack_phase])]
+        else:
+            frame_link, phase_link, newer_frames, invoking_frames = \
+                            yield [("RDE", [user_root, "frame"]),
+                                   ("RDE", [user_frame, "phase"]),
+                                   ("RRD", [user_frame, "prev"]),
+                                   ("RRD", [inst, "caller"]),
+                                  ]
+            new_frame = self.find_overlapping(newer_frames, invoking_frames)
+            ip_link, evalstack, evalstack_link, new_symbols, new_IP = \
+                            yield [("RDE", [user_frame, "IP"]),
+                                   ("RD", [user_frame, "evalstack"]),
+                                   ("RDE", [user_frame, "evalstack"]),
+                                   ("RD", [new_frame, "symbols"]),
+                                   ("RD", [new_frame, "IP"]),
+                                  ]
+            signature, =    yield [("RRD", [new_IP, "body"])]
+            signature = signature[0]
+            sig_params, =   yield [("RD", [signature, "params"])]
+
+            if last_param == phase:
+                prev_param, = \
+                            yield [("RRD", [last_param, "next_param"])]
+                prev_param = prev_param[0]
+                name, =     yield [("RD", [prev_param, "name"])]
+                name_value, = \
+                            yield [("RV", [name])]
+                evalstack_phase, = \
+                            yield [("CNV", ["call"])]
+                _ =         yield [("CD", [evalstack, "phase", evalstack_phase])]
+                formal_parameter, param_value = \
+                            yield [("RD", [sig_params, name_value]),
+                                   ("RD", [last_param, "value"]),
+                                  ]
+            else:
+                param_b, =  yield [("RD", [user_frame, "phase"])]
+                param_c, param_a = \
+                            yield [("RD", [param_b, "next_param"]),
+                                   ("RRD", [param_b, "next_param"]),
+                                  ]
+                param_a = param_a[0]
+                name, param_value = \
+                            yield [("RD", [param_a, "name"]),
+                                   ("RD", [param_b, "value"]),
+                                  ]
+                name_value, = \
+                            yield [("RV", [name])]
+                formal_parameter, _ = \
+                            yield [("RD", [sig_params, name_value]),
+                                   ("CD", [evalstack, "phase", param_c]),
+                                  ]
+
+            new_phase, new_evalstack, variable, returnvalue = \
+                        yield [("CNV", ["init"]),
+                               ("CN", []),
+                               ("CN", []),
+                               ("RD", [user_frame, "returnvalue"]),
+                              ]
+            _, _, _, _, _, _ = \
+                        yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                               ("CD", [new_evalstack, "prev", evalstack]),
+                               ("CD", [evalstack, "inst", inst]),
+                               ("CD", [user_frame, "phase", new_phase]),
+                               ("CD", [user_frame, "IP", param_value]),
+                               ("CD", [variable, "value", returnvalue]),
+                              ]
+
+            t1, =       yield [("CE", [new_symbols, variable])]
+            _, _, _, _ = \
+                        yield [("CE", [t1, formal_parameter]),
+                               ("DE", [phase_link]),
+                               ("DE", [ip_link]),
+                               ("DE", [evalstack_link]),
+                              ]
+
+    def input_init(self, user_root):
+        user_frame, =   yield [("RD", [user_root, "frame"])]
+        returnvalue_link, _input = \
+                        yield [("RDE", [user_frame, "returnvalue"]),
+                               ("RD", [user_root, "input"]),
+                              ]
+        value, next, phase_link = \
+                        yield [("RD", [_input, "value"]),
+                               ("RD", [_input, "next"]),
+                               ("RDE", [user_frame, "phase"]),
+                              ]
+
+        if value is not None:
+            v =         yield [("RV", [value])]
+            _, _, finish = \
+                        yield [("CD", [user_frame, "returnvalue", value]),
+                               ("CD", [user_root, "input", next]),
+                               ("CNV", ["finish"]),
+                              ]
+            _, _, _, _ = \
+                        yield [("CD", [user_frame, "phase", finish]),
+                               ("DN", [_input]),
+                               ("DE", [returnvalue_link]),
+                               ("DE", [phase_link]),
+                              ]
+            self.input_value = value
+        else:
+            # No input yet, so just wait and don't advance IP or phase
+            self.input_value = None
+            self.success = False
+
+    def output_init(self, user_root):
+        user_frame, =   yield [("RD", [user_root, "frame"])]
+        evalstack, evalstack_link, ip_link, inst = \
+                        yield [("RD", [user_frame, "evalstack"]),
+                               ("RDE", [user_frame, "evalstack"]),
+                               ("RDE", [user_frame, "IP"]),
+                               ("RD", [user_frame, "IP"]),
+                              ]
+        value, new_evalstack, evalstack_phase = \
+                        yield [("RD", [inst, "value"]),
+                               ("CN", []),
+                               ("CNV", ["output"]),
+                              ]
+        _, _, _, _, _, _, _ = \
+                        yield [("CD", [user_frame, "evalstack", new_evalstack]),
+                               ("CD", [new_evalstack, "prev", evalstack]),
+                               ("CD", [evalstack, "inst", inst]),
+                               ("CD", [evalstack, "phase", evalstack_phase]),
+                               ("CD", [user_frame, "IP", value]),
+                               ("DE", [evalstack_link]),
+                               ("DE", [ip_link]),
+                              ]
+
+    def output_output(self, user_root):
+        user_frame, =   yield [("RD", [user_root, "frame"])]
+        returnvalue_link, returnvalue, last_output, phase_link, last_output_link, new_last_output, finish = \
+                        yield [("RDE", [user_frame, "returnvalue"]),
+                               ("RD", [user_frame, "returnvalue"]),
+                               ("RD", [user_root, "last_output"]),
+                               ("RDE", [user_frame, "phase"]),
+                               ("RDE", [user_root, "last_output"]),
+                               ("CN", []),
+                               ("CNV", ["finish"]),
+                              ]
+        _, _, _, _, _, _ = \
+                        yield [("CD", [last_output, "value", returnvalue]),
+                               ("CD", [last_output, "next", new_last_output]),
+                               ("CD", [user_root, "last_output", new_last_output]),
+                               ("CD", [user_frame, "phase", finish]),
+                               ("DE", [last_output_link]),
+                               ("DE", [phase_link]),
+                              ]
+
+    def declare_init(self, user_root):
+        user_frame, =   yield [("RD", [user_root, "frame"])]
+        inst, =         yield [("RD", [user_frame, "IP"])]
+        new_var, symbols, phase_link, empty_node, new_phase = \
+                        yield [("RD", [inst, "var"]),
+                               ("RD", [user_frame, "symbols"]),
+                               ("RDE", [user_frame, "phase"]),
+                               ("CN", []),
+                               ("CNV", ["finish"]),
+                              ]
+
+        exists, =       yield [("RDN", [symbols, new_var])]
+        if exists is None:
+            new_edge, = yield [("CE", [symbols, empty_node])]
+            _ =         yield [("CE", [new_edge, new_var])]
+
+        _, _ =          yield [("CD", [user_frame, "phase", new_phase]),
+                               ("DE", [phase_link]),
+                              ]
+
+    def global_init(self, user_root):
+        user_frame, =   yield [("RD", [user_root, "frame"])]
+        inst, =         yield [("RD", [user_frame, "IP"])]
+        new_var, global_symbols, phase_link, empty_node, new_phase = \
+                        yield [("RD", [inst, "var"]),
+                               ("RD", [user_root, "globals"]),
+                               ("RDE", [user_frame, "phase"]),
+                               ("CN", []),
+                               ("CNV", ["finish"]),
+                              ]
+
+        value, =        yield [("RV", [new_var])]
+        exists, =       yield [("RD", [global_symbols, value])]
+
+        if exists is None:
+            yield [("CD", [global_symbols, value, empty_node])]
+
+        _, _ =          yield [("CD", [user_frame, "phase", new_phase]),
+                               ("DE", [phase_link])
+                              ]

+ 183 - 78
kernel/modelverse_kernel/main.py

@@ -1,5 +1,9 @@
 import modelverse_kernel.primitives as primitive_functions
 import modelverse_kernel.compiled as compiled_functions
+from modelverse_kernel.request_handler import RequestHandler
+import modelverse_jit.jit as jit
+import modelverse_jit.intrinsics as jit_intrinsics
+import modelverse_jit.jit_primitives as jit_primitives
 from collections import defaultdict
 import sys
 import time
@@ -12,35 +16,71 @@ else:
 class ModelverseKernel(object):
     def __init__(self, root):
         self.root = root
-        self.primitives = {}
-        self.compiled = {}
         self.returnvalue = None
         self.success = True
-        self.generators = {}
+        # request_handlers is a dictionary of usernames to dictionaries of operations
+        # to request handlers. In generics notation:
+        #
+        # Dictionary<
+        #     Username,
+        #     Dictionary<
+        #         Operation,
+        #         RequestHandler>>
+        #
+        self.request_handlers = {}
         self.allow_compiled = True
         #self.allow_compiled = False
+
+        # suggested_function_names maps body ids to suggested function names.
+        # You can tell the kernel to stop caring about getting the function names
+        # right by setting this to `None`.
+        self.suggested_function_names = {}
+
+        # `self.jit` handles most JIT-related functionality.
+        self.jit = jit.ModelverseJit()
+        if self.allow_compiled:
+            self.jit.compiled_function_lookup = lambda func_name: \
+                getattr(compiled_functions, func_name, None)
+
+        jit_intrinsics.register_intrinsics(self.jit)
+
+        # To disable the JIT, uncomment the line below:
+        #
+        #     self.jit.set_jit_enabled(False)
+        #
+        # To disable direct calls in the JIT, uncomment the line below:
+        #
+        #     self.jit.allow_direct_calls(False)
+        #
+        # To enable tracing in the JIT (for debugging purposes), uncomment
+        # the line below:
+        #
+        #     self.jit.enable_tracing()
+        #
+        # To make the JIT compile 'input' instructions as calls to
+        # modelverse_jit.runtime.get_input, uncomment the line below:
+        #
+        #     self.jit.use_input_function()
+        #
+
         self.debug_info = defaultdict(list)
 
     def execute_yields(self, username, operation, params, reply):
         try:
             self.success = True
             self.username = username
-            if username not in self.generators:
-                self.generators[username] = {}
-            if operation not in self.generators[username]:
+            if username not in self.request_handlers:
+                self.request_handlers[username] = {}
+            if operation not in self.request_handlers[username]:
                 # Create the generator for the function to execute
-                self.generators[username][operation] = getattr(self, operation)(username, *params)
+                self.request_handlers[username][operation] = RequestHandler()
+            handler = self.request_handlers[username][operation]
+            if not handler.is_active():
+                handler.push_generator(getattr(self, operation)(username, *params))
 
-            if reply is not None:
-                return self.generators[username][operation].send(reply)
-            else:
-                return self.generators[username][operation].next()
-        except StopIteration:
-            # Done, so remove the generator
-            del self.generators[username][operation]
-            return None
+            return handler.handle_request(reply)
         except:
-            print("Unknown error @ %s" % "\n".join(self.debug_info[username]))
+            print("Unknown error @ " + str(self.debug_info.get(username, "Username unknown")))
             raise
 
     def execute_rule(self, username):
@@ -50,10 +90,10 @@ class ModelverseKernel(object):
             self.returnvalue = None
             yield None
         else:
-            user_frame, =   yield [("RD", [user_root, "frame"])]
-            self.inst, phase =   yield [("RD", [user_frame, "IP"]),
-                                   ("RD", [user_frame, "phase"]),
-                                  ]
+            user_frame, = yield [("RD", [user_root, "frame"])]
+            self.inst, phase = yield [("RD", [user_frame, "IP"]),
+                                      ("RD", [user_frame, "phase"]),
+                                     ]
             self.new_debug, self.phase_v, inst_v = \
                             yield [("RD", [self.inst, "__debug"]),
                                    ("RV", [phase]),
@@ -68,14 +108,14 @@ class ModelverseKernel(object):
             elif self.inst is None:
                 raise Exception("Instruction pointer could not be found!")
             elif isinstance(self.phase_v, string_types):
-                if self.phase_v == "init" and self.inst in self.compiled:
-                    #print("%-30s(%s)" % ("COMPILED " + str(self.compiled[self.inst]), self.phase_v))
-                    gen = self.execute_primitive(user_root, self.inst, username)
+                if self.phase_v == "init" and self.jit.is_jittable_entry_point(self.inst):
+                    #print("%-30s(%s)" % ("COMPILED " + str(self.jit.jitted_entry_points[self.inst]), phase_v))
+                    gen = self.execute_jit(user_root, self.inst, username)
                 elif inst_v is None:
                     raise Exception("%s: error understanding command (%s, %s)" % (self.debug_info[username], inst_v, self.phase_v))
                 else:
                     #print("%-30s(%s) -- %s" % (inst_v["value"], self.phase_v, username))
-                    gen = getattr(self, "%s_%s" % (inst_v["value"], self.phase_v))(user_root)
+                    gen = self.get_inst_phase_generator(inst_v, self.phase_v, user_root)
             elif inst_v is None:
                 raise Exception("%s: error understanding command (%s, %s)" % (self.debug_info[username], inst_v, self.phase_v))
             elif inst_v["value"] == "call":
@@ -84,29 +124,57 @@ class ModelverseKernel(object):
             else:
                 raise Exception("%s: error understanding command (%s, %s)" % (self.debug_info[username], inst_v, self.phase_v))
 
-            try:
-                inp = None
-                while 1:
-                    inp = yield gen.send(inp)
-            except StopIteration:
-                pass
+        def handle_jit_failed(exception):
+            # Try again, but this time without the JIT.
+            # print(exception.message)
+            gen = self.get_inst_phase_generator(inst_v, self.phase_v, user_root)
+            yield [("TAIL_CALL", [gen])]
+
+        yield [("TRY", [])]
+        yield [("CATCH", [jit.JitCompilationFailedException, handle_jit_failed])]
+        yield [("CALL", [gen])]
+        yield [("END_TRY", [])]
+
+    def get_inst_phase_generator(self, inst_v, phase_v, user_root):
+        """Gets a generator for the given instruction in the given phase,
+           for the specified user root."""
+        #print("%-30s(%s) -- %s" % (inst_v["value"], phase_v, username))
+        return getattr(self, "%s_%s" % (inst_v["value"], phase_v))(user_root)
 
     ##########################
     ### Process primitives ###
     ##########################
     def load_primitives(self, username):
-        hierarchy, =     yield [("RD", [self.root, "__hierarchy"])]
-        primitives, =    yield [("RD", [hierarchy, "primitives"])]
-        keys, =          yield [("RDK", [primitives])]
+        yield [("CALL_ARGS",
+                [self.load_primitives_from, (username, 'primitives', primitive_functions)])]
+        yield [("CALL_ARGS",
+                [self.load_primitives_from, (username, 'jit', jit_primitives)])]
+
+    def load_primitives_from(self, username, source_name, source):
+        hierarchy, = yield [("RD", [self.root, "__hierarchy"])]
+        primitives, = yield [("RD", [hierarchy, source_name])]
+        keys, = yield [("RDK", [primitives])]
         function_names = yield [("RV", [f]) for f in keys]
-        signatures  =    yield [("RDN", [primitives, f]) for f in keys]
-        bodies =         yield [("RD", [f, "body"]) for f in signatures]
+        signatures = yield [("RDN", [primitives, f]) for f in keys]
+        bodies = yield [("RD", [f, "body"]) for f in signatures]
         for i in range(len(keys)):
-            self.primitives[bodies[i]] = getattr(primitive_functions, function_names[i])
-        self.compiled.update(self.primitives)
+            self.jit.register_compiled(
+                bodies[i],
+                getattr(source, function_names[i]),
+                function_names[i])
+
+    def jit_compile(self, user_root, inst):
+        # Try to retrieve the suggested name.
+        if self.suggested_function_names is not None and inst in self.suggested_function_names:
+            suggested_name = self.suggested_function_names[inst]
+        else:
+            suggested_name = None
 
-    def execute_primitive(self, user_root, inst, username):
-        # execute_primitive
+        # Have the JIT compile the function.
+        return self.jit.jit_compile(user_root, inst, suggested_name)
+
+    def execute_jit(self, user_root, inst, username):
+        # execute_jit
         user_frame, =    yield [("RD", [user_root, "frame"])]
         symbols, =       yield [("RD", [user_frame, "symbols"])]
         all_links, =     yield [("RO", [symbols])]
@@ -123,35 +191,39 @@ class ModelverseKernel(object):
         parameters["username"] = username
         parameters["mvk"] = self
 
-        # prim is a generator itself!
-        try:
-            # Forward the message we get to this generator
-            # Sometimes it might not even be a generator, in which case this should already be in the except block (i.e., for the Read Root operation)
-            prim = self.compiled[inst](**parameters)
-            inp = None
-            while 1:
-                inp = yield prim.send(inp)
-        except StopIteration:
-            # Execution has ended without return value, so we have no idea what to do
-            raise Exception("%s: primitive finished without returning a value!" % (self.debug_info[username]))
-        except primitive_functions.PrimitiveFinished as e:
-            # Execution has ended with a returnvalue, so read it out from the exception being thrown
-            result = e.result
-
-            #if result is None:
-            #    raise Exception("Primitive raised exception: value of None for operation %s with parameters %s" % (self.compiled[inst], str(parameters)))
+        # Have the JIT compile the function.
+        compiled_func, = yield [("CALL_ARGS", [self.jit_compile, (user_root, inst)])]
+        # Run the compiled function.
+        results = yield [("CALL_KWARGS", [compiled_func, parameters])]
+        if results is None:
+            raise Exception(
+                "%s: primitive finished without returning a value!" % (self.debug_info[username]))
+        else:
+            result, = results
 
         # Clean up the current stack, as if a return happened
-        old_frame, =    yield [("RD", [user_frame, "prev"])]
-        lnk, =          yield [("RDE", [old_frame, "returnvalue"])]
-        _, _, _, _ =    yield [("CD", [old_frame, "returnvalue", result]),
-                               ("CD", [user_root, "frame", old_frame]),
-                               ("DE", [lnk]),
-                               ("DN", [user_frame]),
-                              ]
+        old_frame, exception_return = yield [
+            ("RD", [user_frame, "prev"]),
+            ("RD", [user_frame, primitive_functions.EXCEPTION_RETURN_KEY])]
+
         if self.debug_info[self.username]:
             self.debug_info[self.username].pop()
 
+        if exception_return is not None:
+            # The caller has requested that we throw an exception instead of injecting
+            # the return value into the caller's frame. Read the comment at
+            # primitive_functions.EXCEPTION_RETURN_KEY for the rationale behind this design.
+            yield [("CD", [user_root, "frame", old_frame]),
+                   ("DN", [user_frame])]
+            raise primitive_functions.InterpretedFunctionFinished(result)
+        else:
+            lnk, =          yield [("RDE", [old_frame, "returnvalue"])]
+            _, _, _, _ =    yield [("CD", [old_frame, "returnvalue", result]),
+                                   ("CD", [user_root, "frame", old_frame]),
+                                   ("DE", [lnk]),
+                                   ("DN", [user_frame]),
+                                  ]
+
     ########################################
     ### Execute input and output methods ###
     ########################################
@@ -441,7 +513,16 @@ class ModelverseKernel(object):
                     # For this, we read out the body of the resolved data
                     compiler_val, =  yield [("RD", [variable, "value"])]
                     compiler_body, = yield [("RD", [compiler_val, "body"])]
-                    self.compiled[compiler_body] = compiled_function
+                    self.jit.register_compiled(compiler_body, compiled_function, var_name)
+
+            # If we're dealing with a function, then we might want to figure out what its body id
+            # is now so we can suggest a name to the JIT later.
+            if self.suggested_function_names is not None:
+                compiler_val, =  yield [("RD", [variable, "value"])]
+                if compiler_val is not None:
+                    compiler_body, = yield [("RD", [compiler_val, "body"])]
+                    if compiler_body is not None:
+                        self.suggested_function_names[compiler_body] = var_name
 
         else:
             phase_link, returnvalue_link, new_phase = \
@@ -534,6 +615,10 @@ class ModelverseKernel(object):
 
         if value is None:
             prev_frame, =   yield [("RD", [user_frame, "prev"])]
+            # If the callee's frame is marked with the '__exception_return' key, then
+            # we need to throw an exception instead of just finishing here. This design
+            # gives us O(1) state reads per jit-interpreter transition.
+            exception_return, = yield [("RD", [user_frame, primitive_functions.EXCEPTION_RETURN_KEY])]
             if prev_frame is None:
                 _, =            yield [("DN", [user_root])]
                 del self.debug_info[self.username]
@@ -543,6 +628,9 @@ class ModelverseKernel(object):
                 _, _ =          yield [("CD", [user_root, "frame", prev_frame]),
                                        ("DN", [user_frame]),
                                       ]
+
+            if exception_return is not None:
+                raise primitive_functions.InterpretedFunctionFinished(None)
         else:
             evalstack, evalstack_link, ip_link, new_evalstack, evalstack_phase = \
                             yield [("RD", [user_frame, "evalstack"]),
@@ -564,17 +652,28 @@ class ModelverseKernel(object):
     def return_eval(self, user_root):
         if self.debug_info[self.username]:
             self.debug_info[self.username].pop()
-        user_frame, =       yield [("RD", [user_root, "frame"])]
-        prev_frame, =       yield [("RD", [user_frame, "prev"])]
-        returnvalue, old_returnvalue_link = \
-                            yield [("RD", [user_frame, "returnvalue"]),
-                                   ("RDE", [prev_frame, "returnvalue"]),
-                                  ]
-        _, _, _, _ =        yield [("CD", [user_root, "frame", prev_frame]),
-                                   ("CD", [prev_frame, "returnvalue", returnvalue]),
-                                   ("DE", [old_returnvalue_link]),
-                                   ("DN", [user_frame]),
-                                  ]
+
+        user_frame, = yield [("RD", [user_root, "frame"])]
+        prev_frame, exception_return, returnvalue = yield [
+            ("RD", [user_frame, "prev"]),
+            ("RD", [user_frame, primitive_functions.EXCEPTION_RETURN_KEY]),
+            ("RD", [user_frame, "returnvalue"])]
+
+        # If the callee's frame is marked with the '__exception_return' key, then
+        # we need to throw an exception instead of just finishing here. This design
+        # gives us O(1) state reads per jit-interpreter transition.
+        if exception_return is not None:
+            yield [
+                ("CD", [user_root, "frame", prev_frame]),
+                ("DN", [user_frame])]
+            raise primitive_functions.InterpretedFunctionFinished(returnvalue)
+        else:
+            old_returnvalue_link, = yield [("RDE", [prev_frame, "returnvalue"])]
+            yield [
+                ("CD", [user_root, "frame", prev_frame]),
+                ("CD", [prev_frame, "returnvalue", returnvalue]),
+                ("DE", [old_returnvalue_link]),
+                ("DN", [user_frame])]
 
     def constant_init(self, user_root):
         user_frame, =       yield [("RD", [user_root, "frame"])]
@@ -679,9 +778,11 @@ class ModelverseKernel(object):
 
         if param is None:
             returnvalue, =  yield [("RD", [user_frame, "returnvalue"])]
-            body, phase_link, frame_link, prev_phase, new_phase, new_frame, new_evalstack, new_symbols, new_returnvalue = \
-                            yield [("RD", [returnvalue, "body"]),
-                                   ("RDE", [user_frame, "phase"]),
+            body, =         yield [("RD", [returnvalue, "body"])]
+            self.jit.mark_entry_point(body)
+
+            phase_link, frame_link, prev_phase, new_phase, new_frame, new_evalstack, new_symbols, new_returnvalue = \
+                            yield [("RDE", [user_frame, "phase"]),
                                    ("RDE", [user_root, "frame"]),
                                    ("CNV", ["finish"]),
                                    ("CNV", ["init"]),
@@ -721,6 +822,10 @@ class ModelverseKernel(object):
                             yield [("RD", [signature, "params"]),
                                    ("RD", [inst, "last_param"]),
                                   ]
+
+            body, =         yield [("RD", [new_IP, "body"])]
+            self.jit.mark_entry_point(body)
+            
             name, =         yield [("RD", [last_param, "name"])]
             name_value, =   yield [("RV", [name])]
             returnvalue, formal_parameter, new_phase, variable = \

+ 30 - 2
kernel/modelverse_kernel/primitives.py

@@ -1,8 +1,36 @@
-# Exception to indicate the result value of the primitive, as a return cannot be used
+
 class PrimitiveFinished(Exception):
+    """Exception to indicate the result value of a primitive, as a return cannot be used."""
+    def __init__(self, value):
+        Exception.__init__(self)
+        self.result = value
+
+class InterpretedFunctionFinished(Exception):
+    """Exception to indicate the result value of an interpreted function, as a return
+       cannot be used."""
     def __init__(self, value):
+        Exception.__init__(self)
         self.result = value
-    
+
+# Functions annotated with __exception_return use the JIT's calling convention instead of
+# the kernel's: returns are handled by throwing a PrimitiveFinished exception; the caller's
+# returnvalue is not modified.
+#
+# ### Rationale for __exception_return
+#
+# __exception_return is a useful mechanism because it allows us to have an __call_function
+# implementation that has O(1) state read overhead. A previous implementation of
+# __call_function checked if the caller's frame had been popped whenever
+# ModelverseKernel.execute_yield threw a StopIteration exception. However, that incurs O(n) overhead
+# _per call,_ where n is the number of StopIteration exceptions that are thrown during the call.
+# O(n) is pretty bad, but this actually becomes O(n * m) when m calls to __call_function are
+# nested. And that's just not acceptable.
+# __exception_return requires kernel support, but I think the complexity gains are well worth it;
+# I reckon JIT-to-interpreter switches aren't going to get a whole lot cheaper than this.
+EXCEPTION_RETURN_KEY = "__exception_return"
+"""A dictionary key for functions which request that the kernel throw a InterpretedFunctionFinished
+   exception with the return value instead of injecting the return value in the caller's frame."""
+
 def integer_subtraction(a, b, **remainder):
     a_value, b_value =  yield [("RV", [a]), ("RV", [b])]
     result, = yield [("CNV", [a_value - b_value])]

+ 328 - 0
kernel/modelverse_kernel/request_handler.py

@@ -0,0 +1,328 @@
+import modelverse_kernel.primitives as primitive_functions
+
+class KnownRequestHandled(Exception):
+    """An exception that signifies that a known request was handled."""
+    pass
+
+class RequestHandler(object):
+    """A type of object that intercepts logic-related Modelverse requests, and
+       forwards Modelverse state requests."""
+    def __init__(self):
+        # generator_stack is a stack of (generator, pending requests, request replies, has-reply)
+        # tuples.
+        self.generator_stack = []
+        # exception_handlers is a stack of
+        # (generator_stack index, [(exception type, handler function)])
+        # tuples.
+        self.exception_handlers = []
+        self.handlers = {
+            'CALL' : self.execute_call,
+            'CALL_ARGS' : self.execute_call_args,
+            'CALL_KWARGS' : self.execute_call_kwargs,
+            'TAIL_CALL' : self.execute_tail_call,
+            'TAIL_CALL_ARGS' : self.execute_tail_call_args,
+            'TAIL_CALL_KWARGS' : self.execute_tail_call_kwargs,
+            'TRY' : self.execute_try,
+            'CATCH' : self.execute_catch,
+            'END_TRY' : self.execute_end_try
+        }
+
+    def is_active(self):
+        """Tests if this request handler has a top-of-stack generator."""
+        return len(self.generator_stack) > 0
+
+    def handle_request(self, reply):
+        """Replies to a request from the top-of-stack generator, and returns a new request."""
+        if not self.is_active():
+            raise ValueError('handle_request cannot be called with an empty generator stack.')
+
+        # Append the server's replies to the list of replies.
+        self.extend_replies(reply)
+        while 1:
+            # Silence pylint's warning about catching Exception.
+            # pylint: disable=I0011,W0703
+            try:
+                if self.has_pending_requests():
+                    try:
+                        # Try to pop a request for the modelverse state.
+                        result = self.pop_requests()
+                        return result
+                    except KnownRequestHandled:
+                        # Carry on.
+                        pass
+
+                # Perform a single generator step.
+                self.step()
+            except StopIteration:
+                # Done, so remove the generator
+                self.pop_generator()
+                if self.is_active():
+                    # This generator was called from another generator.
+                    # Append 'None' to the caller's list of replies.
+                    self.append_reply(None)
+                else:
+                    # Looks like we're done here.
+                    return None
+            except primitive_functions.PrimitiveFinished as ex:
+                # Done, so remove the generator
+                self.pop_generator()
+                if self.is_active():
+                    # This generator was called from another generator.
+                    # Append the callee's result to the caller's list of replies.
+                    self.append_reply(ex.result)
+                else:
+                    # Looks like we're done here.
+                    return None
+            except Exception as ex:
+                # Maybe get an exception handler to do this.
+                if not self.handle_exception(ex):
+                    raise
+
+    def set_finished_requests_flag(self):
+        """Sets the finished_requests flag in the top-of-stack tuple."""
+        current_generator, requests, _, replies, has_reply = self.generator_stack[-1]
+        self.generator_stack[-1] = (current_generator, requests, True, replies, has_reply)
+
+    def has_pending_requests(self):
+        """Tests if the top-of-stack generator has pending requests."""
+        _, _, finished_requests, _, _ = self.generator_stack[-1]
+        return not finished_requests
+
+    def push_generator(self, gen):
+        """Pushes a new generator onto the stack."""
+        self.generator_stack.append((gen, None, True, [], False))
+        # print('Pushed generator %s. Generator count: %d' % (gen, len(self.generator_stack)))
+
+    def pop_generator(self):
+        """Removes the top-of-stack generator from the generator stack."""
+        # Pop the generator itself.
+        self.generator_stack.pop()
+        # print('Popped generator %s. Generator count: %d' % (gen, len(self.generator_stack)))
+        # Pop any exception handlers defined by the generator.
+        top_of_stack_index = len(self.generator_stack)
+        while len(self.exception_handlers) > 0:
+            stack_index, _ = self.exception_handlers[-1]
+            if stack_index == top_of_stack_index:
+                # Pop exception handlers until exception_handlers is empty or until
+                # we find an exception handler that is not associated with the popped
+                # generator.
+                self.exception_handlers.pop()
+            else:
+                # We're done here.
+                break
+
+    def append_reply(self, new_reply):
+        """Appends a reply to the top-of-stack generator's list of pending replies."""
+        current_generator, requests, requests_done, replies, has_reply = self.generator_stack[-1]
+        replies.append(new_reply)
+        has_reply = True
+        self.generator_stack[-1] = (current_generator, requests, requests_done, replies, has_reply)
+
+    def extend_replies(self, new_replies):
+        """Appends a list of replies to the top-of-stack generator's list of pending replies."""
+        current_generator, requests, requests_done, replies, has_reply = self.generator_stack[-1]
+        if new_replies is not None:
+            replies.extend(new_replies)
+            has_reply = True
+            self.generator_stack[-1] = (
+                current_generator, requests, requests_done, replies, has_reply)
+
+    def step(self):
+        """Performs a single step: accumulated replies are fed to the generator,
+           which then produces requests."""
+        current_generator, _, _, replies, has_reply = self.generator_stack[-1]
+
+        # Send the replies to the generator, and ask for new requests.
+        requests = current_generator.send(replies if has_reply else None)
+
+        # Update the entry on the stack.
+        self.generator_stack[-1] = (current_generator, requests, False, [], False)
+
+    def handle_exception(self, exception):
+        """Handles the given exception. A Boolean is returned that tells if
+           the exception was handled."""
+        # print('Exception thrown from %s: %s' % (str(self.generator_stack[-1]), str(exception)))
+        while len(self.exception_handlers) > 0:
+            # Pop the top-of-stack exception handler.
+            stack_index, handlers = self.exception_handlers.pop()
+
+            # Try to find an applicable handler.
+            applicable_handler = None
+            for handled_type, handler in handlers:
+                if isinstance(exception, handled_type):
+                    applicable_handler = handler
+
+            if applicable_handler is not None:
+                # We handle exceptions by first clearing the current stack frame and
+                # all of its children. Then, we place a dummy frame on the stack with
+                # a single 'TAIL_CALL_ARGS' request. The next iteration will replace
+                # the dummy frame by an actual frame.
+                del self.generator_stack[stack_index:]
+                self.generator_stack.append(
+                    (None,
+                     [('TAIL_CALL_ARGS', [applicable_handler, (exception,)])],
+                     False,
+                     [],
+                     False))
+                return True
+
+        # We couldn't find an applicable exception handler, even after exhausting the
+        # entire exception handler stack. All is lost.
+        # Also, clean up after ourselves.
+        self.generator_stack = []
+        self.exception_handlers = []
+        return False
+
+    def pop_requests(self):
+        """Tries to pop a batch of Modelverse _state_ requests from the
+           current list of requests. Known requests are executed immediately.
+
+           A list of requests and a Boolean are returned. The latter is True
+           if there are no more requests to process, and false otherwise."""
+        _, requests, _, _, _ = self.generator_stack[-1]
+        if requests is None or len(requests) == 0:
+            # Couldn't find a request for the state to handle.
+            self.set_finished_requests_flag()
+            return requests
+
+        for i, elem in enumerate(requests):
+            if elem[0] in self.handlers:
+                # The kernel should handle known requests.
+                if i > 0:
+                    # Handle any requests that precede the known request first.
+                    pre_requests = requests[:i]
+                    del requests[:i]
+                    return pre_requests
+
+                # The known request must be the first element in the list. Pop it.
+                requests.pop(0)
+
+                # The list of requests might be empty now. If so, then flag this
+                # batch of requests as finished.
+                if len(requests) == 0:
+                    self.set_finished_requests_flag()
+
+                # Handle the request.
+                _, request_args = elem
+                self.handlers[elem[0]](request_args)
+                raise KnownRequestHandled()
+
+        # We couldn't find a known request in the batch of requests, so we might as well
+        # handle them all at once then.
+        self.set_finished_requests_flag()
+        return requests
+
+    def execute_call(self, request_args):
+        """Executes a CALL-request with the given argument list."""
+        # Format: ("CALL", [gen])
+        gen, = request_args
+        self.push_generator(gen)
+
+    def execute_call_kwargs(self, request_args):
+        """Executes a CALL_KWARGS-request with the given argument list."""
+        # Format: ("CALL_KWARGS", [func, kwargs])
+        # This format is useful because it also works for functions that
+        # throw an exception but never yield.
+        func, kwargs = request_args
+        # We need to be extra careful here, because func(**kwargs) might
+        # not be a generator at all: it might simply be a method that
+        # raises an exception. To cope with this we need to push a dummy
+        # entry onto the stack if a StopIteration or PrimtiveFinished
+        # exception is thrown. The logic in execute_yields will then pop
+        # that dummy entry.
+        try:
+            self.push_generator(func(**kwargs))
+        except StopIteration:
+            self.push_generator(None)
+            raise
+        except primitive_functions.PrimitiveFinished:
+            self.push_generator(None)
+            raise
+
+    def execute_call_args(self, request_args):
+        """Executes a CALL_ARGS-request with the given argument list."""
+        # Format: ("CALL_ARGS", [gen, args])
+        func, args = request_args
+        # We need to be extra careful here, because func(*args) might
+        # not be a generator at all: it might simply be a method that
+        # raises an exception. To cope with this we need to push a dummy
+        # entry onto the stack if a StopIteration or PrimtiveFinished
+        # exception is thrown. The logic in execute_yields will then pop
+        # that dummy entry.
+        try:
+            self.push_generator(func(*args))
+        except StopIteration:
+            self.push_generator(None)
+            raise
+        except primitive_functions.PrimitiveFinished:
+            self.push_generator(None)
+            raise
+
+    def execute_tail_call(self, request_args):
+        """Executes a TAIL_CALL-request with the given argument list."""
+        # Format: ("TAIL_CALL", [gen])
+        self.pop_generator()
+        self.execute_call(request_args)
+
+    def execute_tail_call_args(self, request_args):
+        """Executes a TAIL_CALL_ARGS-request with the given argument list."""
+        # Format: ("TAIL_CALL_ARGS", [gen, args])
+        self.pop_generator()
+        self.execute_call_args(request_args)
+
+    def execute_tail_call_kwargs(self, request_args):
+        """Executes a TAIL_CALL_KWARGS-request with the given argument list."""
+        # Format: ("TAIL_CALL_KWARGS", [gen, kwargs])
+        self.pop_generator()
+        self.execute_call_kwargs(request_args)
+
+    def execute_try(self, request_args):
+        """Executes a TRY-request with the given argument list."""
+        # TRY pushes an exception handler onto the exception handler stack.
+        # Format: ("TRY", [])
+        if len(request_args) != 0:
+            raise ValueError(
+                ("TRY was given argument list '%s', " +
+                 "expected exactly zero arguments.") % repr(request_args))
+        self.exception_handlers.append((len(self.generator_stack) - 1, []))
+
+    def execute_catch(self, request_args):
+        """Executes a CATCH-request with the given argument list."""
+        if len(request_args) != 2:
+            raise ValueError(
+                ("CATCH was given argument list '%s', "
+                 "expected exactly two arguments: an exception "
+                 "type and an exception handler.") % repr(request_args))
+        exception_type, handler = request_args
+        stack_index, handlers = self.exception_handlers[-1]
+        if stack_index != len(self.generator_stack) - 1:
+            raise ValueError(
+                'Cannot comply with CATCH because there is no exception handler for the '
+                'current generator.')
+
+        handlers.append((exception_type, handler))
+
+    def execute_end_try(self, request_args):
+        """Executes an END_TRY-request with the given argument list."""
+        # END_TRY pops a value from the exception handler stack. The
+        # popped value must reference the top-of-stack element in the
+        # generator stack. END_TRY takes no arguments.
+        # Format: ("END_TRY", [])
+        if len(request_args) != 0:
+            raise ValueError(
+                "END_TRY was given argument list '%s', expected '%s'." % (
+                    repr(request_args), repr([])))
+
+        if len(self.exception_handlers) == 0:
+            raise ValueError(
+                'Cannot comply with END_TRY because the exception handler stack is empty.')
+
+        stack_index, _ = self.exception_handlers[-1]
+        if stack_index != len(self.generator_stack) - 1:
+            raise ValueError(
+                'Cannot comply with END_TRY because there is no exception handler for the '
+                'current generator.')
+
+        # Everything seems to be in order. Pop the exception handler.
+        self.exception_handlers.pop()
+

+ 11 - 0
performance/code/fibonacci.alc

@@ -0,0 +1,11 @@
+include "primitives.alh"
+
+Integer function fib(param : Integer):
+	if (param <= 2):
+		return 1!
+	else:
+		return fib(param - 1) + fib(param - 2)!
+
+Void function test_main():
+	fib(input())
+	return!

+ 95 - 0
performance/code/matrix.alc

@@ -0,0 +1,95 @@
+include "primitives.alh"
+include "random.alh"
+
+Float function v2f(i : Element):
+	return cast_s2f(cast_v2s(i))!
+
+Void function dict_overwrite(d : Element, key : Element, value : Element):
+	if (dict_in(d, key)):
+		dict_delete(d, key)
+	if (dict_in_node(d, key)):
+		dict_delete_node(d, key)
+	dict_add(d, key, value)
+
+	return !
+
+Element function create_random_matrix(n : Integer):
+	Element m
+	Integer i
+	Integer j
+	Element t
+
+	// Construct the matrix first, with as many rows as there are variables
+	// Number of columns is 1 higher
+	i = 0
+	m = create_node()
+	while (i < n):
+		j = 0
+		t = create_node()
+		while (j < (n + 1)):
+			list_append(t, random())
+			j = j + 1
+		list_append(m, t)
+		i = i + 1
+
+	return m!
+
+Void function eliminateGaussJordan(m : Element):
+	Integer i
+	Integer j
+	Integer f
+	Integer g
+	Boolean searching
+	Element t
+	Float divisor
+
+	i = 0
+	j = 0
+
+	while (i < read_nr_out(m)):
+		// Make sure pivot m[i][j] != 0, swapping if necessary
+		while (v2f(m[i][j]) == 0.0):
+			// Is zero, so find row which is not zero
+			f = i + 1
+			searching = True
+			while (searching):
+				if (f >= read_nr_out(m)):
+					// No longer any rows left, so just increase column counter
+					searching = False
+					j = j + 1
+				else:
+					if (v2f(m[f][j]) == 0.0):
+						// Also zero, so continue
+						f = f + 1
+					else:
+						// Found non-zero, so swap row
+						t = v2f(m[f])
+						dict_overwrite(m, f, v2f(m[i]))
+						dict_overwrite(m, i, t)
+						searching = False
+			// If we have increased j, we will just start the loop again (possibly), as m[i][j] might be zero again
+
+		// Pivot in m[i][j] guaranteed to not be 0
+		// Now divide complete row by value of m[i][j] to make it equal 1
+		f = j
+		divisor = v2f(m[i][j])
+		while (f < read_nr_out(m[i])):
+			dict_overwrite(m[i], f, float_division(v2f(m[i][f]), divisor))
+			f = f + 1
+
+		// Eliminate all rows in the j-th column, except the i-th row
+		f = 0
+		while (f < read_nr_out(m)):
+			if (bool_not(f == i)):
+				g = j
+				divisor = v2f(m[f][j])
+				while (g < read_nr_out(m[f])):
+					dict_overwrite(m[f], g, v2f(m[f][g]) - (divisor * v2f(m[i][g])))
+					g = g + 1
+			f = f + 1
+
+		// Increase row and column
+		i = i + 1
+		j = j + 1
+
+	return !

+ 7 - 0
performance/code/matrix_create.alc

@@ -0,0 +1,7 @@
+include "primitives.alh"
+
+Element function create_random_matrix(n : Integer)
+
+Void function test_main():
+	create_random_matrix(input())
+	return!

+ 10 - 0
performance/code/matrix_gauss_jordan.alc

@@ -0,0 +1,10 @@
+include "primitives.alh"
+
+Element function create_random_matrix(n : Integer)
+Void function eliminateGaussJordan(m : Element)
+
+Void function test_main():
+	Element m
+	m = create_random_matrix(input())
+	eliminateGaussJordan(m)
+	return!

+ 32 - 0
performance/code/test_harness.alc

@@ -0,0 +1,32 @@
+include "primitives.alh"
+include "jit.alh"
+
+Void function test_main()
+
+Void function call_function(function_name : String):
+	// Resolve the specified function, and execute it.
+	Element func
+	func = resolve(function_name)
+	func()
+	return!
+
+Void function main():
+	String config
+	Integer start_time
+	Integer end_time
+	config = input()
+	// if (config == "interpreter"):
+		// set_jit_enabled(False)
+
+	start_time = time()
+	// HACK: use `call_function` to hide what would otherwise be a direct call to `test_main`
+	// from the JIT. This prevents the JIT from compiling `test_main` _before_ `config` has
+	// been analyzed.
+	call_function("test_main")
+	end_time = time()
+	output(end_time - start_time)
+	
+	while (True):
+		output(input())
+	
+	return!

+ 101 - 0
performance/perf2tex.py

@@ -0,0 +1,101 @@
+"""Converts performance data files (as produced by utils.py) to LaTeX charts."""
+
+import utils
+
+# Generated LaTeX is based on the accepted answer to
+# http://tex.stackexchange.com/questions/101320/grouped-bar-chart
+
+# pylint: disable=I0011,W0141
+
+LATEX_COLORS = [
+    ('chartBlue', 0x4F81BD),
+    ('chartRed', 0xC0504D),
+    ('chartGreen', 0x9BBB59),
+    ('chartPurple', 0x9F4C7C)
+]
+
+LATEX_HEADER = r"""\documentclass[12pt,a4paper,onecolumn,openright]{report}
+\usepackage{xcolor}
+\usepackage{pgfplots}
+\usepackage{tikz}
+\usepgfplotslibrary{units}
+
+% Define bar chart colors
+%"""
+
+LATEX_DOCUMENT_HEADER = r"""\begin{document}
+\begin{tikzpicture}"""
+
+LATEX_DOCUMENT_FOOTER = r"""\end{tikzpicture}
+\end{document}"""
+
+def encode_latex_string(value):
+    """Encodes the given string as a LaTeX string."""
+    # I guess this is good enough for now. This may need to be
+    # revisited if we encounter more complicated names.
+    return value.replace('_', '\\_')
+
+def assemble_latex_chart(optimization_levels, color_defs, test_names, data):
+    """Assembles a LaTeX chart from the given components."""
+    lines = []
+    lines.append(LATEX_HEADER)
+    for color in color_defs:
+        lines.append(r'\definecolor{%s}{HTML}{%X}' % color)
+    lines.append(LATEX_DOCUMENT_HEADER)
+    lines.append(r"""
+    \begin{axis}[
+        width = 0.85*\textwidth,
+        height = 8cm,
+        major x tick style = transparent,
+        ybar=2*\pgflinewidth,
+        bar width=14pt,
+        ymajorgrids = true,
+        ylabel = {Run time},
+        symbolic x coords={%s},
+        xtick = data,
+        scaled y ticks = false,
+        enlarge x limits=0.25,
+        ymin=0,
+        y unit=s,
+        legend cell align=left,
+        legend style={
+                at={(1,1.05)},
+                anchor=south east,
+                column sep=1ex
+        }
+    ]""" % ','.join(map(encode_latex_string, test_names)))
+    for color_name, points in data:
+        lines.append(r"""
+        \addplot[style={%s,fill=%s,mark=none}]
+            coordinates {%s};""" % (
+                color_name, color_name,
+                ' '.join([('(%s,%s)' % (encode_latex_string(name), measurement))
+                          for name, measurement in points])))
+    lines.append(r"""
+        \legend{%s}""" % ','.join(map(encode_latex_string, optimization_levels)))
+    lines.append(r"""
+    \end{axis}""")
+    lines.append(LATEX_DOCUMENT_FOOTER)
+    return '\n'.join(lines)
+
+def create_latex_chart(perf_data):
+    """Creates a LaTeX chart for the given performance data."""
+    unused_colors = LATEX_COLORS[:]
+    opt_levels = []
+    color_defs = []
+    test_names = []
+    data = []
+    for optimization_level, measurements in perf_data:
+        color = unused_colors.pop(0)
+        color_name, _ = color
+        opt_levels.append(optimization_level)
+        color_defs.append(color)
+        data.append((color_name, measurements))
+        for name, _ in measurements:
+            if name not in test_names:
+                test_names.append(name)
+
+    return assemble_latex_chart(opt_levels, color_defs, test_names, data)
+
+if __name__ == '__main__':
+    print(create_latex_chart(utils.parse_perf_data(utils.DEFAULT_PERF_FILE_NAME)))

+ 13 - 0
performance/test_fibonacci.py

@@ -0,0 +1,13 @@
+import unittest
+import utils
+
+class TestFibonacci(unittest.TestCase):
+    def fibonacci(self, optimization_level):
+        utils.write_perf_to_file(
+            'fibonacci', optimization_level,
+            utils.run_perf_test(
+                ["test_harness.alc", "fibonacci.alc", "primitives.alc", "jit.alc"],
+                [20],
+                optimization_level))
+
+utils.define_perf_tests(TestFibonacci, TestFibonacci.fibonacci)

+ 16 - 0
performance/test_matrix_create.py

@@ -0,0 +1,16 @@
+import unittest
+import utils
+
+
+class TestMatrixCreate(unittest.TestCase):
+    def create_matrix(self, optimization_level):
+        utils.write_perf_to_file(
+            'matrix_create', optimization_level,
+            utils.run_perf_test(
+                ["test_harness.alc", "matrix.alc",
+                 "matrix_create.alc", "primitives.alc",
+                 "random.alc", "jit.alc"],
+                [100],
+                optimization_level))
+
+utils.define_perf_tests(TestMatrixCreate, TestMatrixCreate.create_matrix)

+ 16 - 0
performance/test_matrix_gauss_jordan.py

@@ -0,0 +1,16 @@
+import unittest
+import utils
+
+
+class TestMatrixGaussJordan(unittest.TestCase):
+    def matrix_gauss_jordan(self, optimization_level):
+        utils.write_perf_to_file(
+            'matrix_gauss_jordan', optimization_level,
+            utils.run_perf_test(
+                ["test_harness.alc", "matrix.alc",
+                 "matrix_gauss_jordan.alc", "primitives.alc",
+                 "random.alc", "jit.alc"],
+                [25],
+                optimization_level))
+
+utils.define_perf_tests(TestMatrixGaussJordan, TestMatrixGaussJordan.matrix_gauss_jordan)

+ 284 - 0
performance/utils.py

@@ -0,0 +1,284 @@
+import unittest
+import sys
+import os
+
+import sys
+import time
+import json
+import urllib
+import urllib2
+import subprocess
+import signal
+import random
+import operator
+
+sys.path.append("interface/HUTN")
+sys.path.append("scripts")
+from hutn_compiler.compiler import main as do_compile
+from check_objects import to_recompile
+
+USERNAME = "test_user"
+PARALLEL_PUSH = True
+
+BOOTSTRAP_FOLDER_NAME = "bootstrap"
+CURRENT_FOLDER_NAME = "performance"
+
+PORTS = set()
+
+OPTIMIZATION_LEVEL_LEGACY_INTERPRETER = "legacy-interpreter"
+OPTIMIZATION_LEVEL_INTERPRETER = "interpreter"
+OPTIMIZATION_LEVEL_BASELINE_JIT = "baseline-jit"
+ALL_OPTIMIZATION_LEVELS = [
+    OPTIMIZATION_LEVEL_LEGACY_INTERPRETER,
+    OPTIMIZATION_LEVEL_INTERPRETER,
+    OPTIMIZATION_LEVEL_BASELINE_JIT
+]
+
+class ModelverseTerminated(Exception):
+    """An exception that tells the user that the Modelverse has terminated."""
+    pass
+
+def get_code_folder_name():
+    """Gets the name of the code folder."""
+    return '%s/code' % CURRENT_FOLDER_NAME
+
+def get_free_port():
+    """Gets a unique new port."""
+    while 1:
+        port = random.randint(10000, 20000)
+        # Check if this port is in the set of ports.
+        if port not in PORTS:
+            # We have found a unique port. Add it to the set and return.
+            PORTS.add(port)
+            return port
+
+def execute(scriptname, parameters=None, wait=False):
+    """Runs a script."""
+    if os.name not in ["nt", "posix"]:
+        # Stop now, as we would have no clue on how to kill its subtree
+        raise Exception("Unknown OS version: " + str(os.name))
+
+    command = [sys.executable, "scripts/%s.py" % scriptname] + (
+        [] if parameters is None else parameters)
+
+    if wait:
+        return subprocess.call(command, shell=False)
+    else:
+        return subprocess.Popen(command, shell=False)
+
+def kill(process):
+    """Kills the given process."""
+    if os.name == "nt":
+        subprocess.call(["taskkill", "/F", "/T", "/PID", "%i" % process.pid])
+    elif os.name == "posix":
+        subprocess.call(["pkill", "-P", "%i" % process.pid])
+
+def set_input_data(address, data):
+    """Sets the Modelverse program's input data."""
+    if data is not None:
+        urllib2.urlopen(
+            urllib2.Request(
+                address,
+                urllib.urlencode(
+                    {"op": "set_input", "data": json.dumps(data), "username": USERNAME})),
+            timeout=10).read()
+    else:
+        return []
+
+def compile_file(address, mod_filename, filename, mode, proc):
+    """Compiles the given file."""
+    # Load in the file required
+    try:
+        timeout_val = 240
+        username = str(random.random())
+        while 1:
+            proc2 = execute(
+                "compile", [address, mod_filename, username, filename, mode], wait=False)
+
+            if proc.returncode is not None:
+                # Modelverse has already terminated, which isn't a good sign!
+                raise Exception("Modelverse died!")
+
+            while proc2.returncode is None:
+                time.sleep(0.01)
+                proc2.poll()
+                timeout_val -= 0.01
+                if timeout_val < 0:
+                    kill(proc2)
+                    print("Compilation timeout expired!")
+                    return False
+
+            if proc2.returncode != 2:
+                break
+
+        # Make sure everything stopped correctly
+        assert proc2.returncode == 0
+        if proc2.returncode != 0:
+            return False
+    except:
+        raise
+    finally:
+        try:
+            kill(proc2)
+        except UnboundLocalError:
+            pass
+
+def run_file(files, parameters, mode, handle_output, optimization_level=None):
+    """Compiles the given sequence of files, feeds them the given input in the given mode,
+       and handles their output."""
+    # Resolve file
+    import os.path
+
+    time.sleep(0.01)
+    port = get_free_port()
+    address = "http://127.0.0.1:%i" % port
+    try:
+        # Run Modelverse server
+        modelverse_args = [str(port)]
+        if optimization_level is not None:
+            modelverse_args.append('--kernel=%s' % optimization_level)
+        proc = execute("run_local_modelverse", modelverse_args, wait=False)
+
+        threads = []
+        mod_files = []
+        for filename in files:
+            if os.path.isfile("%s/%s" % (get_code_folder_name(), filename)):
+                mod_filename = "%s/%s" % (get_code_folder_name(), filename)
+            elif os.path.isfile("%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)):
+                mod_filename = "%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)
+            else:
+                raise Exception("File not found: %s" % filename)
+            mod_files.append(mod_filename)
+
+        to_compile = to_recompile(address, mod_files)
+
+        for mod_filename in to_compile:
+            if PARALLEL_PUSH:
+                import threading
+                threads.append(
+                    threading.Thread(
+                        target=compile_file,
+                        args=[address, mod_filename, mod_filename, mode, proc]))
+                threads[-1].start()
+            else:
+                compile_file(address, mod_filename, mod_filename, mode, proc)
+
+        if PARALLEL_PUSH:
+            for t in threads:
+                t.join()
+
+        if mode[-1] == "O":
+            # Fire up the linker
+            val = execute("link_and_load", [address, USERNAME] + mod_files, wait=True)
+            if val != 0:
+                raise Exception("Linking error")
+
+        # Send the request ...
+        set_input_data(address, parameters)
+
+        # ... and wait for replies
+        while 1:
+            val = urllib2.urlopen(
+                urllib2.Request(
+                    address,
+                    urllib.urlencode({"op": "get_output", "username": USERNAME})),
+                timeout=240).read()
+            val = json.loads(val)
+
+            if proc.returncode is not None:
+                # Modelverse has terminated. This may or may not be what we want.
+                raise ModelverseTerminated()
+
+            if not handle_output(val):
+                return
+
+        # All passed!
+        return
+    except:
+        raise
+    finally:
+        try:
+            kill(proc)
+        except UnboundLocalError:
+            pass
+
+def run_file_to_completion(files, parameters, mode):
+    """Compiles the given sequence of files, feeds them the given input in the given mode,
+       and then collects and returns output."""
+    results = []
+    def handle_output(output):
+        """Appends the given output to the list of results."""
+        results.append(output)
+        return True
+
+    try:
+        run_file(files, parameters, mode, handle_output)
+    except ModelverseTerminated:
+        return results
+
+def run_file_fixed_output_count(files, parameters, mode, output_count, optimization_level=None):
+    """Compiles the given sequence of files, feeds them the given input in the given mode,
+       and then collects and returns a fixed number of outputs."""
+    results = []
+    def handle_output(output):
+        """Appends the given output to the list of results."""
+        if len(results) < output_count:
+            results.append(output)
+            return True
+        else:
+            return False
+
+    run_file(files, parameters, mode, handle_output, optimization_level)
+    return results
+
+def run_file_single_output(files, parameters, mode, optimization_level=None):
+    """Compiles the given sequence of files, feeds them the given input in the given mode,
+       and then collects and returns a single output."""
+    return run_file_fixed_output_count(files, parameters, mode, 1, optimization_level)[0]
+
+def run_perf_test(files, parameters, optimization_level, n_iterations=1):
+    """Compiles the given sequence of files, feeds them the given input in the given mode,
+       and then collects their output. This process is repeated n_iterations times. The
+       return value is the average of all outputs."""
+    result = 0.0
+    for _ in xrange(n_iterations):
+        result += float(
+            run_file_single_output(
+                files, [optimization_level] + parameters + [0], 'CO',
+                optimization_level)) / float(n_iterations)
+    return result
+
+def format_output(output):
+    """Formats the output of `run_file_to_completion` as a string."""
+    return '\n'.join(output)
+
+def define_perf_test(target_class, test_function, optimization_level):
+    """Defines a performance test in the given class. The performance test calls the given function
+       at the given optimization level."""
+    setattr(
+        target_class,
+        'test_%s' % optimization_level.replace('-', '_').lower(),
+        lambda self: test_function(self, optimization_level))
+
+def define_perf_tests(target_class, test_function):
+    """Defines performance tests in the given class. Each test calls the given function."""
+    for optimization_level in ALL_OPTIMIZATION_LEVELS:
+        define_perf_test(target_class, test_function, optimization_level)
+
+DEFAULT_PERF_FILE_NAME = 'perf_data.txt'
+
+def write_perf_to_file(test_name, optimization_level, result, file_name=DEFAULT_PERF_FILE_NAME):
+    """Writes performance data to a file."""
+    with open(file_name, "a") as perf_file:
+        perf_file.write('%s:%s:%f\n' % (test_name, optimization_level, result))
+
+def parse_perf_data(file_name):
+    """Parses the performance data in the given file."""
+    results = {}
+    with open(file_name, 'r') as perf_file:
+        for line in perf_file.readlines():
+            test_name, optimization_level, result = line.strip().split(':')
+            if optimization_level not in results:
+                results[optimization_level] = []
+            results[optimization_level].append((test_name, result))
+    return sorted(results.items(), key=operator.itemgetter(1))

+ 2 - 2
scripts/run_local_modelverse.py

@@ -2,9 +2,9 @@ import subprocess
 import sys
 
 # sys.executable to use the same Python interpreter used to invoke this command
-if len(sys.argv) != 2:
+if len(sys.argv) < 2:
     sys.stderr.write("Expected different parameters!\n")
     sys.stderr.write("    %s port\n" % sys.argv[0])
 else:
     subprocess.check_call([sys.executable, "-m", "sccd.compiler.sccdc", "-p", "threads", "server.xml"], cwd="hybrid_server")
-    subprocess.call([sys.executable, "run_mvk_server.py", sys.argv[1]], cwd="hybrid_server")
+    subprocess.call([sys.executable, "run_mvk_server.py"] + sys.argv[1:], cwd="hybrid_server")