소스 검색

Fixes to transformation algorithm

Yentl Van Tendeloo 8 년 전
부모
커밋
a7dee2a4ce
5개의 변경된 파일98개의 추가작업 그리고 88개의 파일을 삭제
  1. BIN
      bootstrap/bootstrap.m.gz
  2. 21 20
      bootstrap/model_management.alc
  3. 4 0
      bootstrap/transform.alc
  4. 73 63
      core/core_algorithm.alc
  5. 0 5
      state/modelverse_state/main.py

BIN
bootstrap/bootstrap.m.gz


+ 21 - 20
bootstrap/model_management.alc

@@ -176,6 +176,7 @@ Element function model_split(src_model : Element, target_metamodel : Element, re
 	String src
 	String dst
 	Integer length
+	String new_type
 
 	queue = set_to_list(dict_keys(src_model["model"]))
 	mapping = create_node()
@@ -184,29 +185,29 @@ Element function model_split(src_model : Element, target_metamodel : Element, re
 	while (read_nr_out(queue) > 0):
 		name = list_pop(queue, 0)
 
-		if (is_edge(src_model["model"][name])):
-			// Is an edge, so potentially queue it
-			String src
-			String dst
+		type = read_type(src_model, name)
+		if (string_startswith(type, retyping_key)):
+			new_type = string_substr(type, length, string_len(type))
+			if (is_edge(src_model["model"][name])):
+				// Is an edge, so potentially queue it
+				String src
+				String dst
 
-			src = reverseKeyLookup(src_model["model"], read_edge_src(src_model["model"][name]))
-			dst = reverseKeyLookup(src_model["model"], read_edge_dst(src_model["model"][name]))
-			type = reverseKeyLookup(src_model["metamodel"]["model"], dict_read_node(src_model["type_mapping"], src_model["model"][name]))
+				src = reverseKeyLookup(src_model["model"], read_edge_src(src_model["model"][name]))
+				dst = reverseKeyLookup(src_model["model"], read_edge_dst(src_model["model"][name]))
 
-			if (bool_and(dict_in(mapping, src), dict_in(mapping, dst))):
-				// All present, so create the link between them
-				dict_add(mapping, name, instantiate_link(dst_model, string_substr(type, length, string_len(type)), "", mapping[src], mapping[dst]))
-			else:
-				list_append(queue, name)
+				if (bool_and(dict_in(mapping, src), dict_in(mapping, dst))):
+					// All present, so create the link between them
+					dict_add(mapping, name, instantiate_link(dst_model, new_type, "", mapping[src], mapping[dst]))
+				else:
+					list_append(queue, name)
 
-		elif (has_value(src_model["model"][name])):
-			// Has a value, so copy that as well
-			type = reverseKeyLookup(src_model["metamodel"]["model"], dict_read_node(src_model["type_mapping"], src_model["model"][name]))
-			dict_add(mapping, name, instantiate_value(dst_model, string_substr(type, length, string_len(type)), "", src_model["model"][name]))
+			elif (has_value(src_model["model"][name])):
+				// Has a value, so copy that as well
+				dict_add(mapping, name, instantiate_value(dst_model, new_type, "", src_model["model"][name]))
 
-		else:
-			// Is a node
-			type = reverseKeyLookup(src_model["metamodel"]["model"], dict_read_node(src_model["type_mapping"], src_model["model"][name]))
-			dict_add(mapping, name, instantiate_node(dst_model, string_substr(type, length, string_len(type)), ""))
+			else:
+				// Is a node
+				dict_add(mapping, name, instantiate_node(dst_model, new_type, ""))
 
 	return dst_model!

+ 4 - 0
bootstrap/transform.alc

@@ -398,6 +398,8 @@ Boolean function transform_atomic(host_model : Element, schedule_model : Element
 	Element mappings
 	Element mapping
 	mappings = full_match(host_model, schedule_model, current)
+	log("Execute atomic " + current)
+	log("Mappings: " + cast_v2s(read_nr_out(mappings)))
 
 	if (read_nr_out(mappings) > 0):
 		// Pick one!
@@ -417,6 +419,8 @@ Boolean function transform_forall(host_model : Element, schedule_model : Element
 	Boolean result
 
 	mappings = full_match(host_model, schedule_model, current)
+	log("Execute forall " + current)
+	log("Mappings: " + cast_v2s(read_nr_out(mappings)))
 
 	if (read_nr_out(mappings) > 0):
 		result = True

+ 73 - 63
core/core_algorithm.alc

@@ -380,6 +380,8 @@ Void function user_function_skip_init(user_id : String):
 			Element inputs
 			Element outputs
 			Element trace_links
+			String target_model_name
+			String source_model_name
 
 			output("Which transformation do you want to execute?")
 			transformation_id = get_model_id(input())
@@ -393,10 +395,11 @@ Void function user_function_skip_init(user_id : String):
 							source = set_pop(sources)
 							log("Got source link: " + cast_e2s(source))
 							output(string_join("Which model to bind for source element ", read_attribute(core, source, "name")))
-							name_id = get_model_id(input())
+							source_model_name = input()
+							name_id = get_model_id(source_model_name)
 							if (name_id != ""):
 								if (allow_read(user_id, name_id)):
-									dict_add(inputs, read_attribute(core, source, "name"), name_id)
+									dict_add(inputs, read_attribute(core, source, "name"), source_model_name)
 								else:
 									output("Permission denied")
 									set_add(sources, source)
@@ -409,8 +412,17 @@ Void function user_function_skip_init(user_id : String):
 						while (read_nr_out(targets) > 0):
 							target = set_pop(targets)
 							output(string_join("Which model to create for target element ", read_attribute(core, target, "name")))
-							name_id = get_model_id(input())
-							dict_add(outputs, read_attribute(core, target, "name"), name_id)
+							target_model_name = input()
+
+							if (get_model_id(target_model_name) == ""):
+								// Doesn't exist yet, so we can easily create
+								dict_add(outputs, read_attribute(core, target, "name"), target_model_name)
+							else:
+								// Already exists, so we need to check for write access
+								if (allow_write(user_id, get_model_id(target_model_name))):
+									dict_add(outputs, read_attribute(core, target, "name"), target_model_name)
+								else:
+									output("Permission denied; try again")
 
 						exact_type = read_type(core, transformation_id)
 						if (exact_type == "ModelTransformation"):
@@ -433,67 +445,65 @@ Void function user_function_skip_init(user_id : String):
 							Boolean result
 
 							schedule_model = import_node(read_attribute(core, transformation_id, "location"))
-							if (bool_and(bool_and(read_nr_out(inputs) == 1, read_nr_out(outputs) == 1), set_equality(inputs, outputs))):
-								// inputs and outputs have the same values and there is only one: keep in-place without additional bookkeeping
-								input_model = import_node(read_attribute(core, set_pop(inputs), "location"))
-								result = transform(input_model, schedule_model)
+							// Need to fall back to the default approach, which is way slower
+							// 1) Create empty instance of merged metamodel
+
+							ramified_metamodel_id = set_pop(followAssociation(core, transformation_id, "instanceOf"))
+							log("Got ramified MM: " + ramified_metamodel_id)
+							trace_links = allOutgoingAssociationInstances(core, ramified_metamodel_id, "tracability")
+							log("With tracability links: " + set_to_string(trace_links))
+							merged_metamodel_id = ""
+							while (read_nr_out(trace_links) > 0):
+								trace_link_id = set_pop(trace_links)
+								if (value_eq(read_attribute(core, trace_link_id, "type"), "RAMified")):
+									merged_metamodel_id = readAssociationDestination(core, trace_link_id)
+							if (merged_metamodel_id != ""):
+								merged_model = instantiate_model(import_node(read_attribute(core, merged_metamodel_id, "location")))
+
+								// 2) Merge source models
+
+								String key
+								Element keys
+								Element input_keys
+								Element output_keys
+
+								input_keys = dict_keys(inputs)
+								while (read_nr_out(input_keys) > 0):
+									key = set_pop(input_keys)
+									model_join(merged_model, import_node(read_attribute(core, get_model_id(inputs[key]), "location")), key + "_")
+
+								// 3) Transform
+
+								result = transform(merged_model, schedule_model)
 								output("Transformation executed with result: " + cast_v2s(result))
+
+								// 4) Split in different files depending on type
+
+								String desired_metamodel_id
+								Element split_off_model
+
+								output_keys = dict_keys(outputs)
+								while (read_nr_out(output_keys) > 0):
+									key = set_pop(output_keys)
+									log("Key: " + key)
+									log("Model id: " + get_model_id(key))
+									desired_metamodel_id = get_model_id(key)
+									log("Instance of: " + desired_metamodel_id)
+									split_off_model = model_split(merged_model, import_node(read_attribute(core, desired_metamodel_id, "location")), key + "_")
+
+									// Check if the destination model already exists
+									if (get_model_id(outputs[key]) == ""):
+										// New model
+										model_create(split_off_model, outputs[key], user_id, desired_metamodel_id, "Model")
+										log("Created new model with name: " + cast_v2s(outputs[key]))
+										log("Split off model has size: " + cast_v2s(read_nr_out(split_off_model["model"])))
+									else:
+										// Model exists, so we overwrite
+										model_overwrite(split_off_model, get_model_id(outputs[key]))
+										log("Overwrite existing model with name: " + cast_v2s(outputs[key]))
+										log("Split off model has size: " + cast_v2s(read_nr_out(split_off_model["model"])))
 							else:
-								// Need to fall back to the default approach, which is way slower
-								// 1) Create empty instance of merged metamodel
-
-								ramified_metamodel_id = set_pop(followAssociation(core, transformation_id, "instanceOf"))
-								log("Got ramified MM: " + ramified_metamodel_id)
-								trace_links = allOutgoingAssociationInstances(core, ramified_metamodel_id, "tracability")
-								log("With tracability links: " + set_to_string(trace_links))
-								merged_metamodel_id = ""
-								while (read_nr_out(trace_links) > 0):
-									trace_link_id = set_pop(trace_links)
-									if (value_eq(read_attribute(core, trace_link_id, "type"), "RAMified")):
-										merged_metamodel_id = readAssociationDestination(core, trace_link_id)
-								if (merged_metamodel_id != ""):
-									merged_model = instantiate_model(import_node(read_attribute(core, merged_metamodel_id, "location")))
-
-									// 2) Merge source models
-
-									String key
-									Element keys
-									Element input_keys
-									Element output_keys
-
-									input_keys = dict_keys(inputs)
-									while (read_nr_out(input_keys) > 0):
-										key = set_pop(input_keys)
-										model_join(merged_model, import_node(read_attribute(core, inputs[key], "location")), key + "_")
-
-									// 3) Transform
-
-									result = transform(merged_model, schedule_model)
-
-									// 4) Split in different files depending on type
-
-									String desired_metamodel_id
-									Element split_off_model
-
-									output_keys = dict_keys(outputs)
-									while (read_nr_out(output_keys) > 0):
-										key = set_pop(output_keys)
-										log("Key: " + key)
-										log("Model id: " + get_model_id(key))
-										desired_metamodel_id = set_pop(followAssociation(core, get_model_id(key), "instanceOf"))
-										log("Instance of: " + desired_metamodel_id)
-										split_off_model = model_split(merged_model, import_node(read_attribute(core, desired_metamodel_id, "location")), key + "+")
-
-										// Check if the destination model already exists
-										if (get_model_id(outputs[key]) == ""):
-											// New model
-											model_create(split_off_model, outputs[key], user_id, desired_metamodel_id, "Model")
-										else:
-											// Model exists, so we overwrite
-											model_overwrite(split_off_model, outputs[key])
-									output("Transformation executed with result: " + cast_v2s(result))
-								else:
-									output("Could not resolve intermediate merged metamodel")
+								output("Could not resolve intermediate merged metamodel")
 						elif (exact_type == "ActionLanguage"):
 							output("Not Implemented yet!")
 						else:

+ 0 - 5
state/modelverse_state/main.py

@@ -162,11 +162,6 @@ class ModelverseState(object):
             return (None, status.FAIL_CDICT_TARGET)
         if not self.is_valid_datavalue(data):
             return (None, status.FAIL_CDICT_OOB)
-        if (type(data) == float and data == 0.0):
-            print("Got dictionary with value 0.0")
-            print(locals())
-            print(self.values.get(destination, destination))
-            raise Exception()
         n = self.create_nodevalue(data)[0]
         e = self.create_edge(source, destination)[0]
         self.create_edge(e, n)