|
@@ -481,6 +481,98 @@ Boolean function pm_finished(worklist : Element, pm : String):
|
|
|
|
|
|
return False!
|
|
|
|
|
|
+Element function execute_operation(operation_id : String, input_models : Element, output_metamodels : Element):
|
|
|
+ // Operations are always in-place and uses only a single metamodel
|
|
|
+ // Therefore, we must:
|
|
|
+ // 1) Find merged metamodel
|
|
|
+ // 2) Merge the different source models and retype
|
|
|
+ // 3) Perform the operation on the merged model
|
|
|
+ // 4) Split the resulting model based on the target formalisms; if operation successful
|
|
|
+
|
|
|
+ Element input_model
|
|
|
+ String trace_link_id
|
|
|
+ Element merged_model
|
|
|
+ String merged_metamodel_id
|
|
|
+ String ramified_metamodel_id
|
|
|
+ Boolean result
|
|
|
+ String exact_type
|
|
|
+ Element trace_links
|
|
|
+ String linktype
|
|
|
+ String model_ID
|
|
|
+ String key
|
|
|
+ Element keys
|
|
|
+ Element input_keys
|
|
|
+ Element output_keys
|
|
|
+ Element model_tuples
|
|
|
+ String metamodel_name
|
|
|
+ Element metamodel
|
|
|
+ String metamodel_ID
|
|
|
+
|
|
|
+ // 1) Find merged metamodel
|
|
|
+
|
|
|
+ exact_type = read_type(core, operation_id)
|
|
|
+ ramified_metamodel_id = set_pop(followAssociation(core, operation_id, "instanceOf"))
|
|
|
+ trace_links = allOutgoingAssociationInstances(core, ramified_metamodel_id, "tracability")
|
|
|
+
|
|
|
+ if (exact_type == "ModelTransformation"):
|
|
|
+ linktype = "RAMified"
|
|
|
+ elif (exact_type == "ManualOperation"):
|
|
|
+ linktype = "operatesOn"
|
|
|
+ elif (exact_type == "ActionLanguage"):
|
|
|
+ linktype = "operatesOn"
|
|
|
+ else:
|
|
|
+ // Don't know how to execute this operation!
|
|
|
+ return read_root()!
|
|
|
+
|
|
|
+ merged_metamodel_id = ""
|
|
|
+ while (read_nr_out(trace_links) > 0):
|
|
|
+ trace_link_id = set_pop(trace_links)
|
|
|
+ if (value_eq(read_attribute(core, trace_link_id, "type"), linktype)):
|
|
|
+ merged_metamodel_id = readAssociationDestination(core, trace_link_id)
|
|
|
+
|
|
|
+ if (merged_metamodel_id != ""):
|
|
|
+ // 2) Merge source models
|
|
|
+
|
|
|
+ model_tuples = create_node()
|
|
|
+ while (read_nr_out(input_models) > 0):
|
|
|
+ model_ID = set_pop(input_models)
|
|
|
+ input_model = get_full_model(get_model_id(model_ID))
|
|
|
+ metamodel_name = read_attribute(core, set_pop(allAssociationDestinations(core, model_ID, "instanceOf")), "name")
|
|
|
+ set_add(model_tuples, create_tuple(metamodel_name, input_model))
|
|
|
+
|
|
|
+ merged_model = model_join(model_tuples, get_full_model(merged_metamodel_id), read_root())
|
|
|
+
|
|
|
+ // 3) Transform
|
|
|
+
|
|
|
+ if (exact_type == "ModelTransformation"):
|
|
|
+ result = transform(merged_model, get_full_model(operation_id))
|
|
|
+ elif (exact_type == "ManualOperation"):
|
|
|
+ modify(merged_model, True)
|
|
|
+ result = True
|
|
|
+ elif (exact_type == "ActionLanguage"):
|
|
|
+ Element func
|
|
|
+ func = get_func_AL_model(get_full_model(operation_id))
|
|
|
+ result = func(merged_model)
|
|
|
+
|
|
|
+ output("Transformation executed with result: " + cast_v2s(result))
|
|
|
+
|
|
|
+ // 4) Split in different models depending on type
|
|
|
+
|
|
|
+ if (result):
|
|
|
+ model_tuples = create_node()
|
|
|
+ while (read_nr_out(output_metamodels) > 0):
|
|
|
+ metamodel_ID = set_pop(output_metamodels)
|
|
|
+ metamodel_name = read_attribute(core, metamodel_ID, "name")
|
|
|
+ metamodel = get_full_model(get_model_id(metamodel_ID))
|
|
|
+ set_add(model_tuples, create_tuple(metamodel_name, metamodel))
|
|
|
+ return model_split(merged_model, model_tuples, False)!
|
|
|
+ else:
|
|
|
+ return read_root()!
|
|
|
+
|
|
|
+ else:
|
|
|
+ output("Could not resolve intermediate merged metamodel")
|
|
|
+ return read_root()!
|
|
|
+
|
|
|
Boolean function enact_action(pm : Element, element : String, prefix : String, user_id : String):
|
|
|
Boolean result
|
|
|
String transformation_id
|
|
@@ -493,213 +585,48 @@ Boolean function enact_action(pm : Element, element : String, prefix : String, u
|
|
|
Element trace_links
|
|
|
Element output_mms
|
|
|
Element consumes_link
|
|
|
- Element types
|
|
|
String name
|
|
|
String value
|
|
|
+ String elem_name
|
|
|
+ Element keys
|
|
|
+ String key
|
|
|
|
|
|
inputs = create_node()
|
|
|
outputs = create_node()
|
|
|
- output_mms = create_node()
|
|
|
- types = create_node()
|
|
|
|
|
|
// Read out the referenced element from the MvC
|
|
|
transformation_id = get_model_id(read_attribute(pm, element, "name"))
|
|
|
|
|
|
- // Find all inputs and their types (i.e., key)
|
|
|
+ // Find all input model names
|
|
|
lst = allOutgoingAssociationInstances(pm, element, "Consumes")
|
|
|
while (read_nr_out(lst) > 0):
|
|
|
- consumes_link = set_pop(lst)
|
|
|
- // As there are no inheritance relations between full models, we can just read out the typename
|
|
|
- name = read_attribute(pm, consumes_link, "name")
|
|
|
- value = read_attribute(pm, readAssociationDestination(pm, consumes_link), "name")
|
|
|
- dict_add(inputs, name, prefix + value)
|
|
|
- dict_add(types, name, read_attribute(pm, readAssociationDestination(pm, consumes_link), "type"))
|
|
|
-
|
|
|
- // Find all outputs and their types (i.e., key)
|
|
|
+ value = read_attribute(pm, readAssociationDestination(pm, set_pop(lst)), "name")
|
|
|
+ set_add(inputs, prefix + value)
|
|
|
+
|
|
|
+ // Find all output model names and their metamodel
|
|
|
lst = allAssociationDestinations(pm, element, "Produces")
|
|
|
while (read_nr_out(lst) > 0):
|
|
|
elem = set_pop(lst)
|
|
|
- // As there are no inheritance relations between full models, we can just read out the typename
|
|
|
type_name = read_attribute(pm, elem, "type")
|
|
|
- dict_add(outputs, type_name, string_join(prefix, read_attribute(pm, elem, "name")))
|
|
|
- dict_add(output_mms, type_name, get_full_model(get_model_id(type_name)))
|
|
|
-
|
|
|
- exact_type = read_type(core, transformation_id)
|
|
|
- if (exact_type == "ModelTransformation"):
|
|
|
- // Model transformation is always in-place and uses only a single metamodel
|
|
|
- // Therefore, we must:
|
|
|
- // 1) Create an empty model, instance of merged metamodel
|
|
|
- // 2) Merge the different source models and retype
|
|
|
- // 3) Perform the transformation on the merged model
|
|
|
- // 4) Split the resulting model based on the target formalisms
|
|
|
- //
|
|
|
- Element input_model
|
|
|
- Element schedule_model
|
|
|
- String trace_link_id
|
|
|
- Element merged_model
|
|
|
- String merged_metamodel_id
|
|
|
- String ramified_metamodel_id
|
|
|
-
|
|
|
- schedule_model = get_full_model(transformation_id)
|
|
|
-
|
|
|
- // 1) Create empty instance of merged metamodel
|
|
|
- ramified_metamodel_id = set_pop(followAssociation(core, transformation_id, "instanceOf"))
|
|
|
- trace_links = allOutgoingAssociationInstances(core, ramified_metamodel_id, "tracability")
|
|
|
- merged_metamodel_id = ""
|
|
|
- while (read_nr_out(trace_links) > 0):
|
|
|
- trace_link_id = set_pop(trace_links)
|
|
|
- if (value_eq(read_attribute(core, trace_link_id, "type"), "RAMified")):
|
|
|
- merged_metamodel_id = readAssociationDestination(core, trace_link_id)
|
|
|
-
|
|
|
- if (merged_metamodel_id != ""):
|
|
|
- // 2) Merge source models
|
|
|
- String key
|
|
|
- Element input_keys
|
|
|
- Element output_keys
|
|
|
- Element model_tuples
|
|
|
-
|
|
|
- model_tuples = create_node()
|
|
|
- input_keys = dict_keys(inputs)
|
|
|
- while (read_nr_out(input_keys) > 0):
|
|
|
- key = set_pop(input_keys)
|
|
|
- set_add(model_tuples, create_tuple(types[key], get_full_model(get_model_id(inputs[key]))))
|
|
|
-
|
|
|
- merged_model = model_join(model_tuples, get_full_model(merged_metamodel_id), read_root())
|
|
|
-
|
|
|
- // 3) Transform
|
|
|
- result = transform(merged_model, schedule_model)
|
|
|
- log("Transformation result: " + cast_b2s(result))
|
|
|
-
|
|
|
- // 4) Split in different files depending on type
|
|
|
- model_tuples = create_node()
|
|
|
- output_keys = dict_keys(outputs)
|
|
|
-
|
|
|
- while (read_nr_out(output_keys) > 0):
|
|
|
- key = set_pop(output_keys)
|
|
|
- set_add(model_tuples, create_tuple(key, get_full_model(get_model_id(key))))
|
|
|
+ elem_name = read_attribute(pm, elem, "name")
|
|
|
+ dict_add(outputs, type_name, prefix + elem_name)
|
|
|
|
|
|
- Element splitted_models
|
|
|
- splitted_models = model_split(merged_model, model_tuples, False)
|
|
|
+ result = execute_operation(transformation_id, inputs, dict_keys(outputs))
|
|
|
|
|
|
- Element keys
|
|
|
- keys = dict_keys(splitted_models)
|
|
|
- while (read_nr_out(keys) > 0):
|
|
|
- key = set_pop(keys)
|
|
|
-
|
|
|
- if (get_model_id(outputs[key]) == ""):
|
|
|
- // New model
|
|
|
- model_create(splitted_models[key], outputs[key], user_id, get_model_id(key), "Model")
|
|
|
- else:
|
|
|
- model_overwrite(splitted_models[key], get_model_id(outputs[key]))
|
|
|
-
|
|
|
- else:
|
|
|
- log("Intermediate not found")
|
|
|
-
|
|
|
- elif (exact_type == "ActionLanguage"):
|
|
|
- // TODO might be broken with switch to name-based merging of inputs with same type
|
|
|
- Element dictionary
|
|
|
- Element new_inputs
|
|
|
- Element input_keys
|
|
|
- Element output_keys
|
|
|
- Element result
|
|
|
- String key
|
|
|
- Element func
|
|
|
-
|
|
|
- // 1) Group source models in dictionary
|
|
|
- // --> This is just the "inputs" variable, but resolve all references
|
|
|
- new_inputs = create_node()
|
|
|
- input_keys = dict_keys(inputs)
|
|
|
- while (read_nr_out(input_keys) > 0):
|
|
|
- key = set_pop(input_keys)
|
|
|
- dict_add(new_inputs, key, get_full_model(get_model_id(inputs[key])))
|
|
|
- inputs = new_inputs
|
|
|
-
|
|
|
- // 2) Execute action language model
|
|
|
- func = get_func_AL_model(get_full_model(transformation_id))
|
|
|
-
|
|
|
- result = func(inputs, output_mms)
|
|
|
-
|
|
|
- // 3) Split output dictionary back to seperate models
|
|
|
- output_keys = dict_keys(outputs)
|
|
|
- while (read_nr_out(output_keys) > 0):
|
|
|
- key = set_pop(output_keys)
|
|
|
-
|
|
|
- // Check if the destination model already exists
|
|
|
+ if (element_eq(result, read_root())):
|
|
|
+ // Something went wrong!
|
|
|
+ return False!
|
|
|
+ else:
|
|
|
+ keys = dict_keys(result)
|
|
|
+ while (read_nr_out(keys) > 0):
|
|
|
+ key = set_pop(keys)
|
|
|
+
|
|
|
if (get_model_id(outputs[key]) == ""):
|
|
|
// New model
|
|
|
model_create(result[key], outputs[key], user_id, get_model_id(key), "Model")
|
|
|
else:
|
|
|
- // Model exists, so we overwrite
|
|
|
model_overwrite(result[key], get_model_id(outputs[key]))
|
|
|
|
|
|
- elif (exact_type == "ManualOperation"):
|
|
|
- // Identical to model transformations, but give control to users for modification
|
|
|
- // 1) Create empty instance of merged metamodel
|
|
|
- Element input_model
|
|
|
- String trace_link_id
|
|
|
- Element merged_model
|
|
|
- String merged_metamodel_id
|
|
|
-
|
|
|
- trace_links = allOutgoingAssociationInstances(core, transformation_id, "tracability")
|
|
|
- merged_metamodel_id = ""
|
|
|
- while (read_nr_out(trace_links) > 0):
|
|
|
- trace_link_id = set_pop(trace_links)
|
|
|
- if (value_eq(read_attribute(core, trace_link_id, "type"), "operatesOn")):
|
|
|
- merged_metamodel_id = readAssociationDestination(core, trace_link_id)
|
|
|
-
|
|
|
- if (merged_metamodel_id != ""):
|
|
|
- merged_model = instantiate_model(get_full_model(merged_metamodel_id))
|
|
|
-
|
|
|
- // 2) Merge source models
|
|
|
-
|
|
|
- String key
|
|
|
- Element keys
|
|
|
- Element input_keys
|
|
|
- Element output_keys
|
|
|
- Element model_tuples
|
|
|
-
|
|
|
- model_tuples = create_node()
|
|
|
- input_keys = dict_keys(inputs)
|
|
|
- while (read_nr_out(input_keys) > 0):
|
|
|
- key = set_pop(input_keys)
|
|
|
- set_add(model_tuples, create_tuple(types[key], get_full_model(get_model_id(inputs[key]))))
|
|
|
-
|
|
|
- merged_model = model_join(model_tuples, get_full_model(merged_metamodel_id), read_root())
|
|
|
-
|
|
|
- // 3) Transform
|
|
|
-
|
|
|
- output("Please perform manual transformation " + cast_v2s(read_attribute(pm, element, "name")))
|
|
|
- modify(merged_model, True)
|
|
|
-
|
|
|
- // 4) Split in different files depending on type
|
|
|
-
|
|
|
- model_tuples = create_node()
|
|
|
- output_keys = dict_keys(outputs)
|
|
|
-
|
|
|
- while (read_nr_out(output_keys) > 0):
|
|
|
- key = set_pop(output_keys)
|
|
|
- set_add(model_tuples, create_tuple(key, get_full_model(get_model_id(key))))
|
|
|
-
|
|
|
- Element splitted_models
|
|
|
- splitted_models = model_split(merged_model, model_tuples, False)
|
|
|
-
|
|
|
- keys = dict_keys(splitted_models)
|
|
|
- while (read_nr_out(keys) > 0):
|
|
|
- key = set_pop(keys)
|
|
|
-
|
|
|
- if (get_model_id(outputs[key]) == ""):
|
|
|
- // New model
|
|
|
- model_create(splitted_models[key], outputs[key], user_id, get_model_id(key), "Model")
|
|
|
- else:
|
|
|
- model_overwrite(splitted_models[key], get_model_id(outputs[key]))
|
|
|
- else:
|
|
|
- output("Could not find merged metamodel")
|
|
|
-
|
|
|
- // TODO find out whether it succeeded or not
|
|
|
- result = True
|
|
|
- else:
|
|
|
- output("Did not know how to interpret model of type " + exact_type)
|
|
|
-
|
|
|
return result!
|
|
|
|
|
|
Void function enact_PM(pm : Element, prefix : String, user_id : String):
|
|
@@ -908,6 +835,10 @@ Void function user_function_skip_init(user_id : String):
|
|
|
Element trace_links
|
|
|
String target_model_name
|
|
|
String source_model_name
|
|
|
+ String source_model_ID
|
|
|
+ Element result
|
|
|
+ Element keys
|
|
|
+ String key
|
|
|
|
|
|
output("Which transformation do you want to execute?")
|
|
|
transformation_id = get_model_id(input())
|
|
@@ -919,35 +850,33 @@ Void function user_function_skip_init(user_id : String):
|
|
|
inputs = create_node()
|
|
|
while (read_nr_out(sources) > 0):
|
|
|
source = set_pop(sources)
|
|
|
- output(string_join("Which model to bind for source element ", read_attribute(core, source, "name")))
|
|
|
+ output(string_join("Which model to bind for source type ", read_attribute(core, source, "name")))
|
|
|
source_model_name = input()
|
|
|
- name_id = get_model_id(source_model_name)
|
|
|
+ source_model_ID = get_model_id(source_model_name)
|
|
|
if (name_id != ""):
|
|
|
if (allow_read(user_id, name_id)):
|
|
|
- // Check for conformance to the specified metamodel!
|
|
|
- Element specified_model
|
|
|
- // TODO Maybe find out which conformance relation to use, as there might be multiple!
|
|
|
- if (check_is_typed_by(name_id, source)):
|
|
|
- if (check_conformance(name_id)):
|
|
|
- dict_add(inputs, read_attribute(core, source, "name"), source_model_name)
|
|
|
+ // Check for conformance to the requested metamodel
|
|
|
+ if (check_is_typed_by(source_model_ID, source)):
|
|
|
+ if (check_conformance(source_model_ID)):
|
|
|
+ set_add(inputs, source_model_ID)
|
|
|
+ continue!
|
|
|
else:
|
|
|
output("Model has correct type but does not conform completely!")
|
|
|
- set_add(sources, source)
|
|
|
else:
|
|
|
output("Model has different type!")
|
|
|
- set_add(sources, source)
|
|
|
else:
|
|
|
output("Permission denied")
|
|
|
- set_add(sources, source)
|
|
|
else:
|
|
|
output("No such model")
|
|
|
- set_add(sources, source)
|
|
|
+
|
|
|
+ // Retry this model type
|
|
|
+ set_add(sources, source)
|
|
|
|
|
|
targets = allOutgoingAssociationInstances(core, transformation_id, "transformOutput")
|
|
|
outputs = create_node()
|
|
|
while (read_nr_out(targets) > 0):
|
|
|
target = set_pop(targets)
|
|
|
- output(string_join("Which model to create for target element ", read_attribute(core, target, "name")))
|
|
|
+ output(string_join("Which model to create for target type ", read_attribute(core, target, "name")))
|
|
|
target_model_name = input()
|
|
|
|
|
|
if (get_model_id(target_model_name) == ""):
|
|
@@ -959,181 +888,26 @@ Void function user_function_skip_init(user_id : String):
|
|
|
dict_add(outputs, read_attribute(core, target, "name"), target_model_name)
|
|
|
else:
|
|
|
output("Permission denied; try again")
|
|
|
+ set_add(targets, target)
|
|
|
|
|
|
- exact_type = read_type(core, transformation_id)
|
|
|
- if (exact_type == "ModelTransformation"):
|
|
|
- // Model transformation is always in-place and uses only a single metamodel
|
|
|
- // Therefore, we must:
|
|
|
- // 1) Create an empty model, instance of merged metamodel
|
|
|
- // 2) Merge the different source models and retype
|
|
|
- // 3) Perform the transformation on the merged model
|
|
|
- // 4) Split the resulting model based on the target formalisms
|
|
|
- //
|
|
|
- Element input_model
|
|
|
- Element schedule_model
|
|
|
- String trace_link_id
|
|
|
- Element merged_model
|
|
|
- String merged_metamodel_id
|
|
|
- String ramified_metamodel_id
|
|
|
- Boolean result
|
|
|
-
|
|
|
- schedule_model = get_full_model(transformation_id)
|
|
|
-
|
|
|
- // 1) Create empty instance of merged metamodel
|
|
|
-
|
|
|
- ramified_metamodel_id = set_pop(followAssociation(core, transformation_id, "instanceOf"))
|
|
|
- trace_links = allOutgoingAssociationInstances(core, ramified_metamodel_id, "tracability")
|
|
|
- merged_metamodel_id = ""
|
|
|
- while (read_nr_out(trace_links) > 0):
|
|
|
- trace_link_id = set_pop(trace_links)
|
|
|
- if (value_eq(read_attribute(core, trace_link_id, "type"), "RAMified")):
|
|
|
- merged_metamodel_id = readAssociationDestination(core, trace_link_id)
|
|
|
- if (merged_metamodel_id != ""):
|
|
|
- merged_model = instantiate_model(get_full_model(merged_metamodel_id))
|
|
|
-
|
|
|
- // 2) Merge source models
|
|
|
-
|
|
|
- String key
|
|
|
- Element keys
|
|
|
- Element input_keys
|
|
|
- Element output_keys
|
|
|
- Element model_tuples
|
|
|
-
|
|
|
- model_tuples = create_node()
|
|
|
- input_keys = dict_keys(inputs)
|
|
|
- while (read_nr_out(input_keys) > 0):
|
|
|
- key = set_pop(input_keys)
|
|
|
- set_add(model_tuples, create_tuple(key, get_full_model(get_model_id(inputs[key]))))
|
|
|
-
|
|
|
- merged_model = model_join(model_tuples, get_full_model(merged_metamodel_id), read_root())
|
|
|
-
|
|
|
- // 3) Transform
|
|
|
-
|
|
|
- result = transform(merged_model, schedule_model)
|
|
|
- output("Transformation executed with result: " + cast_v2s(result))
|
|
|
-
|
|
|
- // 4) Split in different files depending on type
|
|
|
-
|
|
|
- model_tuples = create_node()
|
|
|
- output_keys = dict_keys(outputs)
|
|
|
-
|
|
|
- while (read_nr_out(output_keys) > 0):
|
|
|
- key = set_pop(output_keys)
|
|
|
- set_add(model_tuples, create_tuple(key, get_full_model(get_model_id(key))))
|
|
|
-
|
|
|
- Element splitted_models
|
|
|
- splitted_models = model_split(merged_model, model_tuples, False)
|
|
|
-
|
|
|
- keys = dict_keys(splitted_models)
|
|
|
- while (read_nr_out(keys) > 0):
|
|
|
- key = set_pop(keys)
|
|
|
-
|
|
|
- if (get_model_id(outputs[key]) == ""):
|
|
|
- // New model
|
|
|
- model_create(splitted_models[key], outputs[key], user_id, get_model_id(key), "Model")
|
|
|
- else:
|
|
|
- model_overwrite(splitted_models[key], get_model_id(outputs[key]))
|
|
|
- else:
|
|
|
- output("Could not resolve intermediate merged metamodel")
|
|
|
-
|
|
|
- elif (exact_type == "ActionLanguage"):
|
|
|
- Element dictionary
|
|
|
- Element new_inputs
|
|
|
- Element input_keys
|
|
|
- Element output_keys
|
|
|
- Element result
|
|
|
- String key
|
|
|
- Element func
|
|
|
-
|
|
|
- // 1) Group source models in dictionary
|
|
|
- // --> This is just the "inputs" variable, but resolve all references
|
|
|
- new_inputs = create_node()
|
|
|
- input_keys = dict_keys(inputs)
|
|
|
- while (read_nr_out(input_keys) > 0):
|
|
|
- key = set_pop(input_keys)
|
|
|
- dict_add(new_inputs, key, get_full_model(get_model_id(inputs[key])))
|
|
|
- inputs = new_inputs
|
|
|
-
|
|
|
- // 2) Execute action language model
|
|
|
- func = get_func_AL_model(get_full_model(transformation_id))
|
|
|
- result = func(inputs)
|
|
|
-
|
|
|
- // 3) Split output dictionary back to seperate models
|
|
|
- output_keys = dict_keys(outputs)
|
|
|
- while (read_nr_out(output_keys) > 0):
|
|
|
- key = set_pop(output_keys)
|
|
|
-
|
|
|
- // Check if the destination model already exists
|
|
|
+ result = execute_operation(transformation_id, inputs, dict_keys(outputs))
|
|
|
+
|
|
|
+ // Now write out the models again
|
|
|
+ if (element_eq(result, read_root())):
|
|
|
+ // Something went wrong!
|
|
|
+ log("Transformation failed; discarding changes")
|
|
|
+
|
|
|
+ else:
|
|
|
+ keys = dict_keys(result)
|
|
|
+ while (read_nr_out(keys) > 0):
|
|
|
+ key = set_pop(keys)
|
|
|
+
|
|
|
if (get_model_id(outputs[key]) == ""):
|
|
|
// New model
|
|
|
model_create(result[key], outputs[key], user_id, get_model_id(key), "Model")
|
|
|
else:
|
|
|
- // Model exists, so we overwrite
|
|
|
model_overwrite(result[key], get_model_id(outputs[key]))
|
|
|
|
|
|
- elif (exact_type == "ManualOperation"):
|
|
|
- // Identical to model transformations, but give control to users for modification
|
|
|
- // 1) Create empty instance of merged metamodel
|
|
|
- Element input_model
|
|
|
- String trace_link_id
|
|
|
- Element merged_model
|
|
|
- String merged_metamodel_id
|
|
|
-
|
|
|
- trace_links = allOutgoingAssociationInstances(core, transformation_id, "tracability")
|
|
|
- merged_metamodel_id = ""
|
|
|
- while (read_nr_out(trace_links) > 0):
|
|
|
- trace_link_id = set_pop(trace_links)
|
|
|
- if (value_eq(read_attribute(core, trace_link_id, "type"), "operatesOn")):
|
|
|
- merged_metamodel_id = readAssociationDestination(core, trace_link_id)
|
|
|
-
|
|
|
- if (merged_metamodel_id != ""):
|
|
|
- merged_model = instantiate_model(get_full_model(merged_metamodel_id))
|
|
|
-
|
|
|
- // 2) Merge source models
|
|
|
-
|
|
|
- String key
|
|
|
- Element keys
|
|
|
- Element input_keys
|
|
|
- Element output_keys
|
|
|
- Element model_tuples
|
|
|
-
|
|
|
- model_tuples = create_node()
|
|
|
- input_keys = dict_keys(inputs)
|
|
|
- while (read_nr_out(input_keys) > 0):
|
|
|
- key = set_pop(input_keys)
|
|
|
- set_add(model_tuples, create_tuple(key, get_full_model(get_model_id(inputs[key]))))
|
|
|
-
|
|
|
- merged_model = model_join(model_tuples, get_full_model(merged_metamodel_id), read_root())
|
|
|
-
|
|
|
- // 3) Transform
|
|
|
-
|
|
|
- modify(merged_model, True)
|
|
|
-
|
|
|
- // 4) Split in different files depending on type
|
|
|
-
|
|
|
- model_tuples = create_node()
|
|
|
- output_keys = dict_keys(outputs)
|
|
|
-
|
|
|
- while (read_nr_out(output_keys) > 0):
|
|
|
- key = set_pop(output_keys)
|
|
|
- set_add(model_tuples, create_tuple(key, get_full_model(get_model_id(key))))
|
|
|
-
|
|
|
- Element splitted_models
|
|
|
- splitted_models = model_split(merged_model, model_tuples, False)
|
|
|
-
|
|
|
- keys = dict_keys(splitted_models)
|
|
|
- while (read_nr_out(keys) > 0):
|
|
|
- key = set_pop(keys)
|
|
|
-
|
|
|
- if (get_model_id(outputs[key]) == ""):
|
|
|
- // New model
|
|
|
- model_create(splitted_models[key], outputs[key], user_id, get_model_id(key), "Model")
|
|
|
- else:
|
|
|
- model_overwrite(splitted_models[key], get_model_id(outputs[key]))
|
|
|
- else:
|
|
|
- output("Could not find merged metamodel")
|
|
|
- else:
|
|
|
- output("Did not know how to interpret model of type " + exact_type)
|
|
|
else:
|
|
|
output("Model is not an executable transformation")
|
|
|
else:
|