Explorar o código

Merge branch 'master' into testing

Yentl Van Tendeloo %!s(int64=8) %!d(string=hai) anos
pai
achega
13f53c98c7

+ 23 - 55
bootstrap/bootstrap.py

@@ -139,40 +139,31 @@ def bootstrap():
                 # Create all children of the root
                 for node in root:
                     f.write("Node %s()\n" % node)
-                    f.write("Edge _%s(root, %s)\n" % (node, node))
-                    f.write('Node __%s("%s")\n' % (node, node))
-                    f.write("Edge ___%s(_%s, __%s)\n" % (node, node, node))
+                    f.write("Dict (root, \"%s\", %s)\n" % (node, node))
 
                 def declare_primitive_class(primitive_class_name, primitive_decls):
                     f.write("Node %s()\n" % primitive_class_name)
-                    f.write("Edge _%s(__hierarchy, %s)\n" % (primitive_class_name, primitive_class_name))
-                    f.write('Node __%s("%s")\n' % (primitive_class_name, primitive_class_name))
-                    f.write("Edge ___%s(_%s, __%s)\n" % (primitive_class_name, primitive_class_name, primitive_class_name))
+                    f.write("Dict (__hierarchy, \"%s\", %s)\n" % (primitive_class_name, primitive_class_name))
 
                     # Define all primitive functions
                     for function, parameters in primitive_decls.iteritems():
                         f.write("Node _func_signature_%s()\n" % function)
                         f.write("Node _func_params_%s()\n" % function)
                         f.write("Node _func_body_%s()\n" % function)
-                        f.write("Edge _%s_%s(%s, _func_signature_%s)\n" % (primitive_class_name, function, primitive_class_name, function))
-                        f.write('Node _name_%s("%s")\n' % (function, function))
-                        f.write("Edge _%s_name_%s(_%s_%s, _name_%s)\n" % (primitive_class_name, function, primitive_class_name, function, function))
 
-                        f.write('Node _body_%s("body")\n' % function)
-                        f.write("Edge _signature_body_%s(_func_signature_%s, _func_body_%s)\n" % (function, function, function))
-                        f.write("Edge _signature_body_str_%s(_signature_body_%s, _body_%s)\n" % (function, function, function))
-
-                        f.write('Node _params_%s("params")\n' % function)
-                        f.write("Edge _signature_params_%s(_func_signature_%s, _func_params_%s)\n" % (function, function, function))
-                        f.write("Edge _signature_params_str_%s(_signature_params_%s, _params_%s)\n" % (function, function, function))
+                        f.write('Dict (%s, "%s", _func_signature_%s)\n' % (primitive_class_name, function, function))
+                        f.write('Dict (_func_signature_%s, "body", _func_body_%s)\n' % (function, function))
+                        f.write('Dict (_func_signature_%s, "params", _func_params_%s)\n' % (function, function))
 
                         parameter_names = "abcdefghijklmnopqrstuvwxyz"
                         for number, param in enumerate(parameters[1:]):
                             param_encoding = "%s_%s" % (function, parameter_names[number])
                             f.write("Node _func_params_%s()\n" % (param_encoding))
+
                             f.write('Node _name_%s("%s")\n' % (param_encoding, parameter_names[number]))
                             f.write("Edge _param_link_%s(_func_params_%s, _func_params_%s)\n" % (param_encoding, function, param_encoding))
                             f.write("Edge _param_link_str_%s(_param_link_%s, _name_%s)\n" % (param_encoding, param_encoding, param_encoding))
+
                             f.write('Node _name_str_%s("name")\n' % param_encoding)
                             f.write("Edge _param_name_%s(_func_params_%s, _name_%s)\n" % (param_encoding, param_encoding, param_encoding))
                             f.write("Edge _param_name_str_%s(_param_name_%s, _name_str_%s)\n" % (param_encoding, param_encoding, param_encoding))
@@ -185,26 +176,18 @@ def bootstrap():
 
                 for data in task_data:
                     f.write("Node task_%s()\n" % data)
-                    f.write('Node ___task_%s("%s")\n' % (data, data))
-                    f.write("Edge _task_%s(task_root, task_%s)\n" % (data, data))
-                    f.write("Edge __task_%s(_task_%s, ___task_%s)\n" % (data, data, data))
+                    f.write('Dict (task_root, "%s", task_%s)\n' % (data, data))
 
                 for data in task_frame:
                     f.write("Node task_%s()\n" % data)
-                    f.write('Node ___task_%s("%s")\n' % (data, data))
-                    f.write("Edge _task_%s(task_frame, task_%s)\n" % (data, data))
-                    f.write("Edge __task_%s(_task_%s, ___task_%s)\n" % (data, data, data))
+                    f.write('Dict (task_frame, "%s", task_%s)\n' % (data, data))
 
                 # Add last_input and last_output links
                 for data in ["input", "output"]:
-                    f.write('Node ___task_last_%s("last_%s")\n' % (data, data))
-                    f.write("Edge _task_last_%s(task_root, task_%s)\n" % (data, data))
-                    f.write("Edge __task_last_%s(_task_last_%s, ___task_last_%s)\n" % (data, data, data))
+                    f.write('Dict (task_root, "last_%s", task_%s)\n' % (data, data))
                     
                 # Bind task to the root
-                f.write('Node ___new_task("%s")\n' % task_manager)
-                f.write("Edge _new_task(root, task_root)\n")
-                f.write("Edge __new_task(_new_task, ___new_task)\n")
+                f.write('Dict (root, "%s", task_root)\n' % (task_manager))
 
                 def compile_code_AL(filename, target, prepend="", main=False, symbols=None):
                     import sys
@@ -216,9 +199,7 @@ def bootstrap():
                 # Create all library code
                 # But first create the structure to hold compiled data
                 f.write("Node __objects()\n", both=False)
-                f.write('Node __objects_name("objects")\n', both=False)
-                f.write("Edge __obj_link(__hierarchy, __objects)\n", both=False)
-                f.write("Edge _name_obj_link(__obj_link, __objects_name)\n", both=False)
+                f.write('Dict (__hierarchy, "objects", __objects)\n', both=False)
 
                 # Compile all files and add to structure manually
                 for bootstrap_file in bootstrap_files:
@@ -230,46 +211,33 @@ def bootstrap():
 
                     # Now link the code with the compilation manager structure
                     f.write("Node elem()\n", both=False)
-                    f.write('Node initializers("initializers")\n', both=False)
-                    f.write('Node hash("hash_md5")\n', both=False)
-                    f.write("Edge _(__objects, elem)\n", both=False)
-                    f.write('Node filename("%s")\n' % bootstrap_file, both=False)
-                    f.write("Edge _(_, filename)\n", both=False)
-                    f.write("Edge _(elem, %s_initial_IP)\n" % bootstrap_file, both=False)
-                    f.write("Edge _(_, initializers)\n", both=False)
+
+                    f.write('Dict (__objects, "%s", elem)\n' % bootstrap_file, both=False)
+                    f.write('Dict (elem, "initializers", %s_initial_IP)\n' % bootstrap_file, both=False)
+
                     md5 = hashlib.md5()
                     md5.update(open(bootstrap_file, 'r').read())
+
                     f.write('Node hash_value("%s")\n' % md5.hexdigest(), both=False)
-                    f.write("Edge _(elem, hash_value)\n", both=False)
-                    f.write("Edge _(_, hash)\n", both=False)
+                    f.write('Dict (elem, "hash_md5", hash_value)\n', both=False)
 
-                    f.write('Node symbols("symbols")\n', both=False)
                     f.write('Node __symbols()\n', both=False)
-                    f.write('Edge _(elem, __symbols)\n', both=False)
-                    f.write('Edge _(_, symbols)\n', both=False)
+                    f.write('Dict (elem, "symbols", __symbols)\n', both=False)
 
                     for k, v in symbols.items():
                         f.write('Node v(%s)\n' % v, both=False)
-                        f.write('Node k("%s")\n' % k, both=False)
-                        f.write('Edge _(__symbols, v)\n', both=False)
-                        f.write('Edge _(_, k)\n', both=False)
+                        f.write('Dict (__symbols, "%s", v)\n' % (k), both=False)
 
                 # Create code for initial task
                 print("[BOOT] task_manager")
-                f.write('Node _IP_str("IP")\n', both=False)
-                f.write("Edge _task_frame(task_frame, %s_initial_IP)\n" % initial_code_manager, both=False)
-                f.write("Edge __task_frame(_task_frame, _IP_str)\n", both=False)
+                f.write('Dict (task_frame, "IP", %s_initial_IP)\n' % (initial_code_manager), both=False)
 
                 f.write('Node __phase("init")\n', both=False)
-                f.write('Node __phase_str("phase")\n', both=False)
-                f.write("Edge _task_phase(task_frame, __phase)\n", both=False)
-                f.write("Edge __task_phase(_task_phase, __phase_str)\n", both=False)
+                f.write('Dict (task_frame, "phase", __phase)\n', both=False)
 
                 # Create code for new tasks to start at
                 print("[BOOT] new_task")
-                f.write('Node __IP_str("__IP")\n', both=False)
-                f.write("Edge _task_IP(__hierarchy, %s_initial_IP)\n" % initial_code_task, both=False)
-                f.write("Edge __task_IP(_task_IP, __IP_str)\n", both=False)
+                f.write('Dict (__hierarchy, "__IP", %s_initial_IP)\n' % initial_code_task, both=False)
     except:
         os.remove("bootstrap/bootstrap.m.gz")
         os.remove("bootstrap/minimal.m.gz")

+ 9 - 4
bootstrap/conformance_scd.alc

@@ -115,6 +115,11 @@ String function conformance_scd(model : Element):
 	Element spi_cache
 	Element constraint_function
 
+	Element reverse_m
+	Element reverse_mm
+	reverse_m = make_reverse_dictionary(model["model"])
+	reverse_mm = make_reverse_dictionary(model["metamodel"]["model"])
+
 	spo_cache = create_node()
 	spi_cache = create_node()
 
@@ -144,10 +149,10 @@ String function conformance_scd(model : Element):
 				return "Type of element not in specified metamodel: " + model_info(model, model_name)!
 
 			if (is_edge(element)):
-				src_model = reverseKeyLookup(model["model"], read_edge_src(element))
-				dst_model = reverseKeyLookup(model["model"], read_edge_dst(element))
-				src_metamodel = reverseKeyLookup(metamodel["model"], read_edge_src(metamodel["model"][typing[model_name]]))
-				dst_metamodel = reverseKeyLookup(metamodel["model"], read_edge_dst(metamodel["model"][typing[model_name]]))
+				src_model = reverse_m[cast_id2s(read_edge_src(element))]
+				dst_model = reverse_m[cast_id2s(read_edge_dst(element))]
+				src_metamodel = reverse_mm[cast_id2s(read_edge_src(metamodel["model"][typing[model_name]]))]
+				dst_metamodel = reverse_mm[cast_id2s(read_edge_dst(metamodel["model"][typing[model_name]]))]
 
 				if (bool_not(is_nominal_instance(model, src_model, src_metamodel))):
 					log("got: " + src_model)

+ 24 - 21
bootstrap/model_management.alc

@@ -16,6 +16,7 @@ Element function model_fuse(models : Element):
 	String key
 	Element selected_MM
 	String type
+	Element reverse
 
 	// Read out some data first
 	tagged_model = set_pop(models)
@@ -28,6 +29,8 @@ Element function model_fuse(models : Element):
 		model_name = string_join(list_read(tagged_model, 0), "/")
 		model = list_read(tagged_model, 1)
 
+		reverse = make_reverse_dictionary(model["model"])
+
 		// Add all elements from 'model', but prepend it with the 'model_name'
 		keys = dict_keys(model["model"])
 		second_keys = create_node()
@@ -39,8 +42,8 @@ Element function model_fuse(models : Element):
 			if (is_edge(model["model"][key])):
 				String src
 				String dst
-				src = model_name + reverseKeyLookup(model["model"], read_edge_src(model["model"][key]))
-				dst = model_name + reverseKeyLookup(model["model"], read_edge_dst(model["model"][key]))
+				src = string_join(model_name, reverse[cast_id2s(read_edge_src(model["model"][key]))])
+				dst = string_join(model_name, reverse[cast_id2s(read_edge_dst(model["model"][key]))])
 				if (bool_and(dict_in(new_model["model"], src), dict_in(new_model["model"], dst))):
 					instantiate_link(new_model, type, model_name + key, src, dst)
 				else:
@@ -63,6 +66,9 @@ Element function model_copy(src_model : Element):
 	Element second_keys
 	String type
 
+	Element reverse
+	reverse = make_reverse_dictionary(src_model["model"])
+
 	dst_model = instantiate_model(src_model["metamodel"])
 	keys = dict_keys(src_model["model"])
 	second_keys = create_node()
@@ -75,8 +81,9 @@ Element function model_copy(src_model : Element):
 			String src
 			String dst
 
-			src = reverseKeyLookup(src_model["model"], read_edge_src(src_model["model"][name]))
-			dst = reverseKeyLookup(src_model["model"], read_edge_dst(src_model["model"][name]))
+			src = reverse[cast_id2s(read_edge_src(src_model["model"][name]))]
+			dst = reverse[cast_id2s(read_edge_dst(src_model["model"][name]))]
+
 			type = read_type(src_model, name)
 
 			if (bool_and(dict_in(dst_model["model"], src), dict_in(dst_model["model"], dst))):
@@ -141,18 +148,19 @@ Void function model_join(dst_model : Element, src_model : Element, retyping_key
 	String type
 	String src
 	String dst
-	Element cached_src_model_model
 	Element elem
-	cached_src_model_model = src_model["model"]
 
 	mapping = create_node()
 
 	second_keys = create_node()
 
+	Element reverse
+	reverse = make_reverse_dictionary(src_model["model"])
+
 	keys = dict_keys(src_model["model"])
 	while (read_nr_out(keys) > 0):
 		name = set_pop(keys)
-		elem = cached_src_model_model[name]
+		elem = src_model["model"][name]
 		type = read_type(src_model, name)
 
 		if (is_edge(elem)):
@@ -160,8 +168,8 @@ Void function model_join(dst_model : Element, src_model : Element, retyping_key
 			String src
 			String dst
 
-			src = reverseKeyLookup(cached_src_model_model, read_edge_src(elem))
-			dst = reverseKeyLookup(cached_src_model_model, read_edge_dst(elem))
+			src = reverse[cast_id2s(read_edge_src(elem))]
+			dst = reverse[cast_id2s(read_edge_dst(elem))]
 
 			if (bool_and(dict_in(mapping, src), dict_in(mapping, dst))):
 				// All present, so create the link between them
@@ -197,19 +205,20 @@ Element function model_split(src_model : Element, target_metamodel : Element, re
 	String dst
 	Integer length
 	String new_type
-	Element cached_src_model_model
 	Element elem
-	cached_src_model_model = src_model["model"]
 
 	mapping = create_node()
 	length = string_len(retyping_key)
 
-	keys = dict_keys(src_model["model"])
 	second_keys = create_node()
 
+	Element reverse
+	reverse = make_reverse_dictionary(src_model["model"])
+
+	keys = dict_keys(src_model["model"])
 	while (read_nr_out(keys) > 0):
 		name = set_pop(keys)
-		elem = cached_src_model_model[name]
+		elem = src_model["model"][name]
 
 		type = read_type(src_model, name)
 		if (string_startswith(type, retyping_key)):
@@ -219,8 +228,8 @@ Element function model_split(src_model : Element, target_metamodel : Element, re
 				String src
 				String dst
 
-				src = reverseKeyLookup(cached_src_model_model, read_edge_src(elem))
-				dst = reverseKeyLookup(cached_src_model_model, read_edge_dst(elem))
+				src = reverse[cast_id2s(read_edge_src(elem))]
+				dst = reverse[cast_id2s(read_edge_dst(elem))]
 
 				if (bool_and(dict_in(mapping, src), dict_in(mapping, dst))):
 					// All present, so create the link between them
@@ -229,12 +238,6 @@ Element function model_split(src_model : Element, target_metamodel : Element, re
 					// Source/target not in the queue, but we need it to split!
 					// This is an error as it indicates problems with links crossing different formalisms
 					log("ERROR: source/target of link to be included is not included!")
-					log("Source: " + src)
-					log("  type: " + read_type(src_model, src))
-					log("Destination: " + dst)
-					log("  type: " + read_type(src_model, dst))
-					log("For link: " + name)
-					log("  type: " + type)
 					return create_node()!
 				else:
 					// Still source or destination in the queue, so we wait for that

+ 13 - 20
bootstrap/modelling.alc

@@ -154,11 +154,9 @@ Element function get_subclasses(model : Element, name : String):
 	Element edge
 	String elem
 	Element nodes
-	Element inheritance
 
 	nodes = create_node()
 	set_add(nodes, name)
-	inheritance = "Inheritance"
 
 	// Initialize empty set
 	result = create_node()
@@ -173,7 +171,7 @@ Element function get_subclasses(model : Element, name : String):
 			j = 0
 			while (j < num_edges):
 				edge = read_in(model["model"][elem], j)
-				if (value_eq(model["type_mapping"][reverseKeyLookup(model["model"], edge)], inheritance)):
+				if (value_eq(model["type_mapping"][reverseKeyLookup(model["model"], edge)], "Inheritance")):
 					set_add(nodes, reverseKeyLookup(model["model"], read_edge_src(edge)))
 				j = j + 1
 
@@ -187,11 +185,9 @@ Element function get_superclasses(model : Element, name : String):
 	Element edge
 	String elem
 	Element nodes
-	Element inheritance
 
 	nodes = create_node()
 	set_add(nodes, name)
-	inheritance = "Inheritance"
 
 	// Initialize empty set
 	result = create_node()
@@ -206,7 +202,7 @@ Element function get_superclasses(model : Element, name : String):
 			j = 0
 			while (j < num_edges):
 				edge = read_out(model["model"][elem], j)
-				if (value_eq(model["type_mapping"][reverseKeyLookup(model["model"], edge)], inheritance)):
+				if (value_eq(model["type_mapping"][reverseKeyLookup(model["model"], edge)], "Inheritance")):
 					set_add(nodes, reverseKeyLookup(model["model"], read_edge_dst(edge)))
 				j = j + 1
 
@@ -376,15 +372,15 @@ String function model_define_attribute(model : Element, elem : String, name : St
 	return edge_name!
 
 Element function read_attribute(model : Element, element : String, attribute : String):
-	Integer i
-	Integer count
-	Element edge
-	String edge_type_name
-	Element elem
-	Element typing
-	Element name
-
 	if (dict_in(model["model"], element)):
+		Integer i
+		Integer count
+		Element edge
+		Element edge_type
+		Element elem
+		Element typing
+		Element name
+
 		elem = model["model"][element]
 		typing = model["type_mapping"]
 		count = read_nr_out(elem)
@@ -394,8 +390,8 @@ Element function read_attribute(model : Element, element : String, attribute : S
 			edge = read_out(elem, i)
 			name = reverseKeyLookup(model["model"], edge)
 			if (dict_in(typing, name)):
-				edge_type_name = typing[name]
-				if (edge_type_name == reverseKeyLookup(model["metamodel"]["model"], dict_read_edge(read_edge_src(model["metamodel"]["model"][edge_type_name]), attribute))):
+				edge_type = model["metamodel"]["model"][typing[name]]
+				if (element_eq(edge_type, dict_read_edge(read_edge_src(edge_type), attribute))):
 					return read_edge_dst(edge)!
 			i = i + 1
 
@@ -466,10 +462,7 @@ String function add_AL(model : Element, element : Element):
 	String elem_name
 
 	todo = create_node()
-	node = create_node()
-	list_append(node, element)
-	list_append(node, "funcdef")
-	set_add(todo, node)
+	set_add(todo, create_tuple(element, "funcdef"))
 
 	while (0 < dict_len(todo)):
 		work_node = set_pop(todo)

+ 18 - 1
bootstrap/primitives.alc

@@ -320,7 +320,7 @@ Element function dict_copy(d : Element):
 	keys = dict_keys(d)
 	while (read_nr_out(keys) > 0):
 		key = set_pop(keys)
-		dict_add(result, key, dict_read_node(d, key))
+		dict_add_fast(result, key, dict_read_node(d, key))
 
 	return result!
 
@@ -349,3 +349,20 @@ Element function set_merge(a : Element, b : Element):
 	while (read_nr_out(b) > 0):
 		set_add(a, set_pop(b))
 	return a!
+
+Element function make_reverse_dictionary(dict : Element):
+	Element keys
+	Element reverse
+	String key
+	String value
+
+	reverse = create_node()
+	keys = dict_keys(dict)
+	while (read_nr_out(keys) > 0):
+		key = set_pop(keys)
+		value = cast_id2s(dict[key])
+		if (dict_in(reverse, value)):
+			dict_delete(reverse, value)
+		dict_add(reverse, value, key)
+
+	return reverse!

+ 5 - 5
bootstrap/ramify.alc

@@ -9,12 +9,12 @@ Element function ramify(model : Element):
 	// Create new model structure
 	Element new_model
 	new_model = create_node()
-	dict_add(new_model, "model", create_node())
-	dict_add(new_model, "type_mapping", create_node())
-	dict_add(new_model, "metamodel", model["metamodel"])
+	dict_add_fast(new_model, "model", create_node())
+	dict_add_fast(new_model, "type_mapping", create_node())
+	dict_add_fast(new_model, "metamodel", model["metamodel"])
 
-	dict_add(new_model, "source", model)
-	dict_add(new_model, "target", model)
+	dict_add_fast(new_model, "source", model)
+	dict_add_fast(new_model, "target", model)
 
 	// Get local variables for parts
 	Element old_m

+ 12 - 12
bootstrap/task_manager.alc

@@ -19,18 +19,18 @@ Void function task_management():
 			task_frame = create_node()
 			output_value = create_node()
 			input_value = create_node()
-			dict_add(task_root, "frame", task_frame)
-			dict_add(task_root, "globals", create_node())
-			dict_add(task_root, "output", output_value)
-			dict_add(task_root, "last_output", output_value)
-			dict_add(task_root, "input", input_value)
-			dict_add(task_root, "last_input", input_value)
-			dict_add(task_frame, "evalstack", create_node())
-			dict_add(task_frame, "returnvalue", create_node())
-			dict_add(task_frame, "phase", "init")
-			dict_add(task_frame, "IP", dict_read(dict_read(read_root(), "__hierarchy"), "__IP"))
-			dict_add(task_frame, "symbols", create_node())
+			dict_add_fast(task_root, "frame", task_frame)
+			dict_add_fast(task_root, "globals", create_node())
+			dict_add_fast(task_root, "output", output_value)
+			dict_add_fast(task_root, "last_output", output_value)
+			dict_add_fast(task_root, "input", input_value)
+			dict_add_fast(task_root, "last_input", input_value)
+			dict_add_fast(task_frame, "evalstack", create_node())
+			dict_add_fast(task_frame, "returnvalue", create_node())
+			dict_add_fast(task_frame, "phase", "init")
+			dict_add_fast(task_frame, "IP", dict_read(dict_read(read_root(), "__hierarchy"), "__IP"))
+			dict_add_fast(task_frame, "symbols", create_node())
 
 			//Add this only at the end, as otherwise the task will already be detected
-			dict_add(read_root(), taskname, task_root)
+			dict_add_fast(read_root(), taskname, task_root)
 	return!

+ 17 - 10
bootstrap/transform.alc

@@ -17,6 +17,9 @@ Element function make_matching_schedule(schedule_model : Element, LHS : String,
 	String next
 	Element tmp
 
+	Element reverse
+	reverse = make_reverse_dictionary(schedule_model["model"])
+
 	// Initialize
 	schedule = create_node()
 	workset = create_node()
@@ -52,22 +55,22 @@ Element function make_matching_schedule(schedule_model : Element, LHS : String,
 					// If it is an edge, we should also add the target and source
 					if (is_edge(schedule_model["model"][next])):
 						// Add the target/source to the schedule
-						set_add(workset, reverseKeyLookup(schedule_model["model"], read_edge_src(schedule_model["model"][next])))
-						set_add(workset, reverseKeyLookup(schedule_model["model"], read_edge_dst(schedule_model["model"][next])))
+						set_add(workset, reverse[cast_id2s(read_edge_src(schedule_model["model"][next]))])
+						set_add(workset, reverse[cast_id2s(read_edge_dst(schedule_model["model"][next]))])
 
 					// Also add all outgoing links
 					counter = read_nr_out(schedule_model["model"][next])
 					while (counter > 0):
 						counter = counter - 1
 						if (set_in_node(schedule_model["model"], read_out(schedule_model["model"][next], counter))):
-							set_add(workset, reverseKeyLookup(schedule_model["model"], read_out(schedule_model["model"][next], counter)))
+							set_add(workset, reverse[cast_id2s(read_out(schedule_model["model"][next], counter))])
 
 					// And incoming links
 					counter = read_nr_in(schedule_model["model"][next])
 					while (counter > 0):
 						counter = counter - 1
 						if (set_in_node(schedule_model["model"], read_in(schedule_model["model"][next], counter))):
-							set_add(workset, reverseKeyLookup(schedule_model["model"], read_in(schedule_model["model"][next], counter)))
+							set_add(workset, reverse[cast_id2s(read_in(schedule_model["model"][next], counter))])
 
 	return schedule!
 
@@ -287,7 +290,7 @@ Element function match(host_model : Element, schedule_model : Element, LHS : Str
 	set_add(mappings, initial_mapping)
 	while (read_nr_out(schedule) > 0):
 		current_element = list_pop(schedule, read_nr_out(schedule) - 1)
-		log("Binding element with label " + cast_v2s(read_attribute(schedule_model, current_element, "label")))
+		//log("Binding element with label " + cast_v2s(read_attribute(schedule_model, current_element, "label")))
 		new_mappings = create_node()
 
 		while (read_nr_out(mappings) > 0):
@@ -301,7 +304,7 @@ Element function match(host_model : Element, schedule_model : Element, LHS : Str
 				set_add(new_mappings, new_map)
 
 		mappings = new_mappings
-		log("Remaining options: " + cast_v2s(read_nr_out(mappings)))
+		//log("Remaining options: " + cast_v2s(read_nr_out(mappings)))
 
 		if (read_nr_out(mappings) == 0):
 			// Stop because we have no more options remaining!
@@ -348,6 +351,9 @@ Void function rewrite(host_model : Element, schedule_model : Element, RHS : Stri
 	Element action
 	Element original_RHS_labels
 
+	Element reverse
+	reverse = make_reverse_dictionary(schedule_model["model"])
+
 	LHS_labels = dict_keys(mapping)
 	RHS_labels = create_node()
 	RHS_map = create_node()
@@ -376,8 +382,9 @@ Void function rewrite(host_model : Element, schedule_model : Element, RHS : Stri
 		label = list_pop(labels_to_add, read_nr_out(labels_to_add) - 1)
 		if (is_edge(schedule_model["model"][RHS_map[label]])):
 			// Edge
-			src = read_attribute(schedule_model, reverseKeyLookup(schedule_model["model"], read_edge_src(schedule_model["model"][RHS_map[label]])), "label")
-			dst = read_attribute(schedule_model, reverseKeyLookup(schedule_model["model"], read_edge_dst(schedule_model["model"][RHS_map[label]])), "label")
+			src = read_attribute(schedule_model, reverse[cast_id2s(read_edge_src(schedule_model["model"][RHS_map[label]]))], "label")
+			dst = read_attribute(schedule_model, reverse[cast_id2s(read_edge_dst(schedule_model["model"][RHS_map[label]]))], "label")
+
 			// First check whether both source and destination are already created
 			if (bool_and(dict_in(new_mapping, src), dict_in(new_mapping, dst))):
 				// Both are present, so we can make the link
@@ -480,7 +487,7 @@ Boolean function transform_composite(host_model : Element, schedule_model : Elem
 
 	current = set_pop(allAssociationDestinations(schedule_model, composite, "Initial"))
 	while (is_nominal_instance(schedule_model, current, "Rule")):
-		log("Executing " + current)
+		//log("Executing " + current)
 		// Still a rule that we must execute
 		typename = read_type(schedule_model, current)
 		if (typename == "Atomic"):
@@ -533,7 +540,7 @@ Boolean function transform_forall(host_model : Element, schedule_model : Element
 	else:
 		result = False
 
-	log("Matches in forall: " + cast_v2s(read_nr_out(mappings)))
+	//log("Matches in forall: " + cast_v2s(read_nr_out(mappings)))
 	while (read_nr_out(mappings) > 0):
 		mapping = set_pop(mappings)
 		// TODO check if there are actually no deletions happening in the meantime of other matched elements...

+ 0 - 2
integration/test_powerwindow.py

@@ -35,7 +35,6 @@ all_files = [   "core/mini_modify.alc",
             ]
 
 class TestPowerWindow(unittest.TestCase):
-    @slow
     def test_process_powerwindow_fast(self):
         self.assertTrue(run_file(all_files,
             [ "root", "root", "root", 
@@ -1414,4 +1413,3 @@ class TestPowerWindow(unittest.TestCase):
                 # Finished, so we go back to the start
                 "Ready for command...",
             ]))
-

+ 8 - 5
integration/utils.py

@@ -10,7 +10,6 @@ import urllib2
 import subprocess
 import signal
 import random
-import pytest
 
 sys.path.append("interface/HUTN")
 sys.path.append("scripts")
@@ -22,10 +21,14 @@ parallel_push = True
 INIT_TIMEOUT = 30
 TIMEOUT = 2000
 
-slow = pytest.mark.skipif(
-    not pytest.config.getoption("--runslow"),
-        reason="need --runslow option to run"
-)
+try:
+    import pytest
+    slow = pytest.mark.skipif(
+        not pytest.config.getoption("--runslow"),
+            reason="need --runslow option to run"
+    )
+except:
+    slow = lambda i:i
 
 ports = set()
 

+ 4 - 4
interface/HUTN/hutn_compiler/bootstrap_visitor.py

@@ -51,11 +51,11 @@ class BootstrapVisitor(PrimitivesVisitor):
 
                 source = self.rename(source)
                 target = self.rename(target)
-                linkname = "%s_%s_%s" % (source, link_id, target)
                 link_id += 1
-                output.append("Edge _%s_0(%s, %s)\n" % (linkname, source, target))
-                output.append("Node _%s_2(%s)\n" % (linkname, value))
-                output.append("Edge _%s_1(_%s_0, _%s_2)\n" % (linkname, linkname, linkname))
+                output.append("Dict _%s(%s, %s, %s)\n" % (link_id, source, value, target))
+                #output.append("Edge _%s_0(%s, %s)\n" % (linkname, source, target))
+                #output.append("Node _%s_2(%s)\n" % (linkname, value))
+                #output.append("Edge _%s_1(_%s_0, _%s_2)\n" % (linkname, linkname, linkname))
             elif t == "E":
                 name, source, target = data
                 source = source if self.first != source else "initial_IP"

+ 1 - 4
interface/HUTN/hutn_compiler/compiler.py

@@ -25,7 +25,7 @@ def do_parse(inputfile, grammarfile):
                 new_grammar = False
             else:
                 # Will be catched immediately
-                raise Exception("Pickle is invalid!")
+                raise Exception()
         except:
             result = parser = Parser(Grammar(), hide_implicit = True).parse(read(grammarfile))
             if result['status'] != Parser.Constants.Success:
@@ -46,9 +46,6 @@ def do_parse(inputfile, grammarfile):
 
     picklefile = inputfile + ".pickle"
     try:
-        if new_grammar:
-            # Stop anyway, as the grammar is new
-            raise Exception()
         if os.path.getmtime(picklefile) > os.path.getmtime(inputfile):
             # Pickle is more recent than inputfile, so use it
             result = pickle.load(open(picklefile, 'rb'))

+ 1 - 0
interface/HUTN/includes/primitives.alh

@@ -110,3 +110,4 @@ Element function set_to_list(s : Element)
 Element function create_tuple(a : Element, b : Element)
 Void function dict_overwrite(a : Element, b : Element, c : Element)
 Element function set_merge(sa : Element, sb : Element)
+Element function make_reverse_dictionary(dict : Element)

+ 35 - 0
kernel/modelverse_kernel/compiled.py

@@ -110,3 +110,38 @@ def has_value(a, **remainder):
     else:
         result, = yield [("CNV", [True])]
     raise PrimitiveFinished(result)
+
+def make_reverse_dictionary(a, **remainder):
+    reverse, = yield [("CN", [])]
+    key_nodes, = yield [("RDK", [a])]
+    values = yield [("RDN", [a, i]) for i in key_nodes]
+    yield [("CD", [reverse, str(v), k]) for k, v in zip(key_nodes, values)]
+    raise PrimitiveFinished(reverse)
+
+def dict_eq(a, b, **remainder):
+    key_nodes, = yield [("RDK", [a])]
+    key_values = yield [("RV", [i]) for i in key_nodes]
+    values = yield [("RD", [a, i]) for i in key_values]
+    values = yield [("RV", [i]) for i in values]
+    a_dict = dict(zip(key_values, values))
+
+    key_nodes, = yield [("RDK", [b])]
+    key_values = yield [("RV", [i]) for i in key_nodes]
+    values = yield [("RD", [b, i]) for i in key_values]
+    values = yield [("RV", [i]) for i in values]
+    b_dict = dict(zip(key_values, values))
+
+    result, = yield [("CNV", [a_dict == b_dict])]
+    raise PrimitiveFinished(result)
+
+def string_substr(a, b, c, **remainder):
+    a_val, b_val, c_val = yield [("RV", [a]),
+                                 ("RV", [b]),
+                                 ("RV", [c])]
+    try:
+        new_value = a_val[b_val:c_val]
+    except:
+        new_value = ""
+    
+    result, = yield [("CNV", [new_value])]
+    raise PrimitiveFinished(result)

+ 3 - 2
kernel/modelverse_kernel/primitives.py

@@ -1,3 +1,5 @@
+import time as python_time
+
 class PrimitiveFinished(Exception):
     """Exception to indicate the result value of a primitive, as a return cannot be used."""
     def __init__(self, value):
@@ -578,8 +580,7 @@ def read_taskroot(task_root, **remainder):
     raise PrimitiveFinished(task_root)
 
 def time(**remainder):
-    import time
-    a, = yield [("CNV", [time.time()])]
+    a, = yield [("CNV", [python_time.time()])]
     raise PrimitiveFinished(a)
 
 def hash(a, **remainder):

+ 18 - 25
kernel/modelverse_kernel/request_handler.py

@@ -15,30 +15,32 @@ class GeneratorStackEntry(object):
         self.function_origin = None
         self.pending_requests = None
         self.finished_requests = True
-        self.replies = []
-        self.has_reply = False
+        self.replies = None
 
     def append_reply(self, new_reply):
         """Appends a reply to the this entry's list of pending replies."""
-        self.replies.append(new_reply)
-        self.has_reply = True
+        if self.replies is None:
+            self.replies = [new_reply]
+        else:
+            self.replies.append(new_reply)
 
     def extend_replies(self, new_replies):
         """Appends a list of replies to this entry's list of pending replies."""
         if new_replies is not None:
-            self.replies.extend(new_replies)
-            self.has_reply = True
+            if self.replies is None:
+                self.replies = new_replies
+            else:
+                self.replies.extend(new_replies)
 
     def step(self):
         """Performs a single step: accumulated replies are fed to the generator,
            which then produces requests."""
         # Send the replies to the generator, and ask for new requests.
-        self.pending_requests = self.generator.send(self.replies if self.has_reply else None)
+        self.pending_requests = self.generator.send(self.replies)
 
         # Reset some data structures.
         self.finished_requests = False
-        self.replies = []
-        self.has_reply = False
+        self.replies = None
 
 def format_stack_trace(stack_trace):
     """Formats a list of (function name, debug info, origin) triples."""
@@ -95,17 +97,13 @@ class RequestHandler(object):
             # Silence pylint's warning about catching Exception.
             # pylint: disable=I0011,W0703
             try:
-                if self.has_pending_requests():
-                    try:
-                        # Try to pop a request for the modelverse state.
-                        return self.pop_requests()
-                    except KnownRequestHandled:
-                        # Carry on.
-                        pass
-
-                if not self.has_pending_requests():
-                    # Perform a single generator step.
-                    self.step()
+                while self.generator_stack[-1].finished_requests:
+                    self.generator_stack[-1].step()
+                else:
+                    return self.pop_requests()
+
+            except KnownRequestHandled:
+                pass
             except StopIteration:
                 # Done, so remove the generator
                 self.pop_generator()
@@ -174,11 +172,6 @@ class RequestHandler(object):
         """Appends a list of replies to the top-of-stack generator's list of pending replies."""
         self.generator_stack[-1].extend_replies(new_replies)
 
-    def step(self):
-        """Performs a single step: accumulated replies are fed to the generator,
-           which then produces requests."""
-        self.generator_stack[-1].step()
-
     def handle_exception(self, exception):
         """Handles the given exception. A Boolean is returned that tells if
            the exception was handled."""

+ 0 - 186
models/environment_to_EPN.alc

@@ -1,186 +0,0 @@
-// Pseudo-code
-//    TODO: add link to the ports of the boundary
-//    TODO: update MM to reflect the new changes to the structure (everything is an Activity with a Next link, and everything has a duration)
-
-//  init_place = new_place()
-//	branches = [(init_place, [(0, topmost_model)]]
-//	while branches:
-//		prev, options = branches.pop()
-//		nr = find_min_time(options)
-//		time, cur = options.pop(nr)
-//
-//		if type(cur) == "Event":
-//			// Just add the current node and augment the duration for the next one
-//			prev_model = new_element(cur)
-//			if (cur.next != None):
-//				options.append((time + cur.next.duration, cur.next))
-//				branches.append((prev_model, options))
-//			else:
-//				// recurse upwards until we can follow a link
-//				elem = containee(cur)
-//				while elem.next_activity == None and containee(elem) != None:
-//					elem = containee(elem)
-//				if containee(elem) == None:
-//					// finished this branch
-//					continue!
-//				else:
-//					cur = elem.next_activity
-//					options.append((time + elem.duration, cur.next))
-//					branches.append((prev_model, options))
-//		elif type(cur) == "Sequence":
-//			options.append((time + first.duration, cur.first))
-//			branches.append((prev_model, options))
-//		elif type(cur) == "Parallel":
-//			// Add all starts of the parallel as potential next one
-//			// But keep the previous source, as we only expanded
-//			for next in cur.start_nodes:
-//				options.append((time + next.duration, next))
-//			branches.append((prev, options))
-//		elif type(cur) == "Alternative":
-//			// Expand into new branches, but keep the options as is (add the individual options though)
-//			for next in cur.start_nodes:
-//				options.append((time + next.duration, next))
-//				// Don't rewrite the source, as we effectively want to branch out from this node
-//				branches.append(prev, options)
-
-include "primitives.alh"
-include "modelling.alh"
-include "object_operations.alh"
-
-Element function env_to_EPN(params : Element, output_mms : Element):
-	Element result
-	Element out_model
-	Element in_model
-	String init_place
-	String current_activity
-	Element branches
-	Element options
-	Element branch
-	String previous_place
-	Integer i
-	Integer cnt
-	Integer min_time
-	Integer index_min_time
-	Element option
-	Integer current_time
-	String new_transition
-	String new_model
-	Element containers
-	Element new_options
-	Element inner_elements
-	Element entry
-	String type
-	String prev_model
-	String element
-
-	result = create_node()
-	out_model = instantiate_model(output_mms["Encapsulated_PetriNet"])
-	in_model = params["environment"]
-
-	// Create the initial place
-	init_place = instantiate_node(out_model, "Place", "")
-	instantiate_attribute(out_model, init_place, "tokens", 1)
-
-	// Set current element to the TopActivity, which will be expanded
-	current_activity = set_pop(allInstances(in_model, "TopActivity"))
-
-	// Initialize the data structure with the current element and initial place
-	branches = create_node()
-	options = create_node()
-	list_append(options, create_tuple(0, current_activity))
-	set_add(branches, create_tuple(init_place, options))
-
-	// Keep going as long as there are branches to resolve
-	while (read_nr_out(branches) > 0):
-		// Still a branch, so pick one at random
-		branch = set_pop(branches)
-		previous_place = branch[0]
-		options = branch[1]
-
-		// Find the index of the option with the lowest time (first element of tuple)
-		i = 0
-		cnt = list_len(options)
-		min_time = 9999999
-		index_min_time = -1
-		while (i < cnt):
-			entry = list_read(options, i)
-			if (integer_lt(entry[0], min_time)):
-				min_time = entry[0]
-				index_min_time = i
-			i = i + 1
-
-		// Pop the minimal option
-		option = list_pop(options, index_min_time)
-		current_time = option[0]
-		current_activity = option[1]
-
-		// Figure out the type
-		type = read_type(in_model, current_activity)
-
-		// Now branch based on the type
-		if (type == "Event"):
-			// Process an event: update the PN and go to the next activity
-			new_transition = instantiate_node(out_model, "Transition", "")
-			new_model = instantiate_node(out_model, "Place", "")
-			instantiate_link(out_model, "P2T", "", prev_model, new_transition)
-			instantiate_link(out_model, "T2P", "", new_transition, new_model)
-
-			// Check if there is a Next to this Event, meaning another event
-			if (read_nr_out(allOutgoingAssociationInstances(in_model, current_activity, "Next")) > 0):
-				// We have a Next, so just push that next event on the options
-				current_activity = set_pop(allAssociationDestinations(in_model, current_activity, "Next"))
-				list_append(options, create_tuple(integer_addition(current_time, read_attribute(in_model, current_activity, "duration")), current_activity))
-				set_add(branches, create_tuple(new_model, options))
-			else:
-				// No Next in this node, so we recurse up until one of these elements does have a next (or we reach the top)
-				while (read_nr_out(allOutgoingAssociationInstances(in_model, current_activity, "Next")) == 0):
-					// Recurse up
-					containers = allAssociationOrigins(in_model, current_activity, "Contains")
-					if (read_nr_out(containers) == 1):
-						current_activity = set_pop(containers)
-					elif (read_nr_out(containers) == 0):
-						// No more containers, so at top element
-						break!
-				if (read_nr_out(containers) == 0):
-					// Nothing left to do, so clear up this branch, but continue with the others
-					continue!
-				else:
-					// Found a node with a Next link: we follow it
-					current_activity = set_pop(allAssociationDestinations(in_model, current_activity, "Next"))
-					list_append(options, create_tuple(integer_addition(current_time, read_attribute(in_model, current_activity, "duration")), current_activity))
-					set_add(branches, create_tuple(new_model, options))
-
-		elif (type == "Sequence"):
-			// Process a sequence: just move the current option to the enclosing activity
-			inner_elements = allAssociationDestinations(in_model, current_activity, "Contains")
-			while (read_nr_out(inner_elements) > 0):
-				element = set_pop(inner_elements)
-				if (read_nr_out(allIncomingAssociationInstances(in_model, element, "Next")) == 0):
-					current_activity = element
-					break!
-			// current_activity now contains the inner element to execute
-			list_append(options, create_tuple(integer_addition(current_time, read_attribute(in_model, current_activity, "duration")), current_activity))
-			// keep the current branch alive, as everything is updated by reference
-			set_add(branches, branch)
-
-		elif (type == "Parallel"):
-			// Process a parallel: create new options for each containing element
-			inner_elements = allAssociationDestinations(in_model, current_activity, "Contains")
-			while (read_nr_out(inner_elements) > 0):
-				current_activity = set_pop(inner_elements)
-				// current_activity now contains the inner element to execute in parallel (an option)
-				list_append(options, create_tuple(integer_addition(current_time, read_attribute(in_model, current_activity, "duration")), current_activity))
-			// keep the current branch alive, as everything is updated by reference
-			set_add(branches, branch)
-
-		elif (type == "Alternative"):
-			inner_elements = allAssociationDestinations(in_model, current_activity, "Contains")
-			while (read_nr_out(inner_elements) > 0):
-				current_activity = set_pop(inner_elements)
-				// current_activity now contains the inner element to execute in alternative branches (a branch)
-				new_options = set_copy(options)
-				list_append(options, create_tuple(integer_addition(current_time, read_attribute(in_model, current_activity, "duration")), current_activity))
-				set_add(branches, create_tuple(previous_place, new_options))
-
-	dict_add(result, "Encapsulated_PetriNet", out_model)
-	return result!

+ 22 - 1
models/matches.mvc

@@ -6,6 +6,25 @@ A B {
     Composite schedule {
         {Contains} Failure failure {}
         {Contains} Success success {}
+        {Contains} Query query {
+            LHS {
+                Pre_Query/Place {
+                    label = "2"
+                }
+                Pre_ReachabilityGraph/Place {
+                    label = "3"
+                }
+
+                constraint = $
+                    Boolean function constraint(host_model : Element, mapping : Element):
+                        Boolean names_match
+                        Boolean tokens_match
+                        names_match = value_eq(read_attribute(host_model, mapping["2"], "name"), read_attribute(host_model, mapping["3"], "name"))
+                        tokens_match = value_eq(read_attribute(host_model, mapping["2"], "tokens"), read_attribute(host_model, mapping["3"], "tokens"))
+                        return bool_and(names_match, tokens_match)!
+                    $
+            }
+        }
         {Contains} ForAll match {
             LHS {
                 Pre_Query/Place {
@@ -50,7 +69,9 @@ A B {
             }
         }
     }
-    Initial (schedule, match) {}
+    Initial (schedule, query) {}
+    OnSuccess (query, match) {}
+    OnFailure (query, failure) {}
     OnSuccess (match, success) {}
     OnFailure (match, failure) {}
 }

+ 1 - 0
models/reachability.alc

@@ -137,6 +137,7 @@ Element function reachability_graph(params : Element, output_mms : Element):
 
 				keys = dict_keys(reachable_states)
 				target_id = -1
+				Float start
 				while (read_nr_out(keys) > 0):
 					other_state_id = set_pop(keys)
 

+ 2 - 1
scripts/run_local_modelverse.py

@@ -9,5 +9,6 @@ else:
     subprocess.check_call([sys.executable, "-m", "sccd.compiler.sccdc", "-p", "threads", "server.xml"], cwd="hybrid_server")
     # There's no need to specify `--kernel=baseline-jit` here, because that's the default kernel.
     # Also, specifying a kernel here breaks the performance tests.
+
     subprocess.call([sys.executable, "run_mvk_server.py"] + sys.argv[1:], cwd="hybrid_server")
-    #subprocess.call([sys.executable, "run_mvk_server.py"] + sys.argv[1:] + ["--kernel=legacy-interpreter"], cwd="hybrid_server")
+    #subprocess.call([sys.executable, "-m", "cProfile", "-s", "tottime", "run_mvk_server.py"] + sys.argv[1:], cwd="hybrid_server", stdout=open("/tmp/stdout", 'w'), stderr=open("/tmp/stderr", "w"))

+ 45 - 4
state/modelverse_state/main.py

@@ -87,10 +87,10 @@ class ModelverseState(object):
                 for line in f:
                     element_type, constructor = line.split(None, 1)
                     name, values = constructor.split("(", 1)
-                    name = name.split()[0]
                     values, _ = values.rsplit(")", 1)
 
                     if element_type == "Node":
+                        name = name.split()[0]
                         if values == "":
                             symbols[name], status = self.create_node()
                         else:
@@ -101,8 +101,16 @@ class ModelverseState(object):
                                 value = eval(value)
                             symbols[name], status = self.create_nodevalue(value)
                     elif element_type == "Edge":
+                        name = name.split()[0]
                         values = [v.split()[0] for v in values.split(",")]
                         symbols[name], status = self.create_edge(resolve(values[0]), resolve(values[1]))
+                    elif element_type == "Dict":
+                        values = [v.split()[0] for v in values.split(",")]
+                        if values[1] in complex_primitives:
+                            values[1] = string_to_instance(values[1])
+                        else:
+                            values[1] = eval(values[1])
+                        self.create_dict(resolve(values[0]), values[1], resolve(values[2]))
                     else:
                         raise Exception("Unknown element type: %s" % element_type)
 
@@ -149,8 +157,6 @@ class ModelverseState(object):
 
     def create_nodevalue(self, value):
         if not self.is_valid_datavalue(value):
-            print("Not correct: " + str(value))
-            #raise Exception()
             return (None, status.FAIL_CNV_OOB)
         self.values[self.free_id] = value
         self.nodes.add(self.free_id)
@@ -199,7 +205,7 @@ class ModelverseState(object):
             s, t = v
             return ([s, t], status.SUCCESS)
 
-    def read_dict(self, node, value):
+    def read_dict_old(self, node, value):
         e, s = self.read_dict_edge(node, value)
         if s != status.SUCCESS:
             return (None, {status.FAIL_RDICTE_UNKNOWN: status.FAIL_RDICT_UNKNOWN,
@@ -209,6 +215,41 @@ class ModelverseState(object):
                            status.FAIL_RDICTE_AMBIGUOUS: status.FAIL_RDICT_AMBIGUOUS}[s])
         return (self.edges[e][1], status.SUCCESS)
 
+    def read_dict(self, node, value):
+        try:
+            first = self.cache[node][value]
+            # Got hit, so validate
+            if (self.edges[first][0] == node) and \
+                (len(self.outgoing[first]) == 1) and \
+                (self.values[self.edges[list(self.outgoing[first])[0]][1]] == value):
+                return (self.edges[first][1], status.SUCCESS)
+            # Hit but invalid now
+            del self.cache[node][value]
+        except KeyError:
+            # Didn't exist
+            pass
+
+        if node not in self.nodes and node not in self.edges:
+            return (None, status.FAIL_RDICT_UNKNOWN)
+        if not self.is_valid_datavalue(value):
+            return (None, status.FAIL_RDICT_OOB)
+            
+        # Get all outgoing links
+        for e1 in self.outgoing.get(node, set()):
+            data_links = self.outgoing.get(e1, set())
+            # For each link, we read the links that might link to a data value
+            for e2 in data_links:
+                # Now read out the target of the link
+                target = self.edges[e2][1]
+                # And access its value
+                v = self.values.get(target, None)
+                if v == value:
+                    # Found a match
+                    # Now get the target of the original link
+                    self.cache.setdefault(node, {})[value] = e1
+                    return (self.edges[e1][1], status.SUCCESS)
+        return (None, status.FAIL_RDICT_NOT_FOUND)
+
     def read_dict_keys(self, node):
         if node not in self.nodes and node not in self.edges:
             return (None, status.FAIL_RDICTKEYS_UNKNOWN)

+ 14 - 0
sum_times.py

@@ -0,0 +1,14 @@
+import sys
+
+total = 0.0
+
+for l in open(sys.argv[1], "r"):
+    if sys.argv[2] in l:
+        _, t = l.rsplit(" ", 1)
+        try:
+            t = float(t)
+            total += t
+        except:
+            pass
+
+print("Total for " + sys.argv[2] + " = " + str(total))