|
@@ -9,12 +9,22 @@ include "ramify.alh"
|
|
|
include "conformance_scd.alh"
|
|
|
include "transform.alh"
|
|
|
include "metamodels.alh"
|
|
|
+include "random.alh"
|
|
|
|
|
|
Element core = ?
|
|
|
|
|
|
String core_location = "models/CoreFormalism"
|
|
|
String core_model_location = "core"
|
|
|
|
|
|
+Void function dict_overwrite(d : Element, key : Element, value : Element):
|
|
|
+ if (dict_in(d, key)):
|
|
|
+ dict_delete(d, key)
|
|
|
+ if (dict_in_node(d, key)):
|
|
|
+ dict_delete_node(d, key)
|
|
|
+ dict_add(d, key, value)
|
|
|
+
|
|
|
+ return !
|
|
|
+
|
|
|
Void function main():
|
|
|
// Initialize the Core Formalism
|
|
|
String core_model
|
|
@@ -449,6 +459,324 @@ Boolean function check_conformance(model_id : String):
|
|
|
|
|
|
return True!
|
|
|
|
|
|
+Boolean function pm_finished(worklist : Element, pm : String):
|
|
|
+ Element finished
|
|
|
+ Integer cnt
|
|
|
+ Integer i
|
|
|
+
|
|
|
+ // Check if any of the "finish" elements are in the worklist
|
|
|
+ // If so, we can already finish, and therefore will stop immediately
|
|
|
+ finished = allInstances(pm, "Finish")
|
|
|
+ i = 0
|
|
|
+ cnt = read_nr_out(finished)
|
|
|
+
|
|
|
+ while (i < cnt):
|
|
|
+ // Check each finished element individually
|
|
|
+ if (set_in(worklist, read_edge_dst(read_out(finished, i)))):
|
|
|
+ return True!
|
|
|
+ i = i + 1
|
|
|
+
|
|
|
+ return False!
|
|
|
+
|
|
|
+Boolean function enact_action(pm : Element, element : String, prefix : String, user_id : String):
|
|
|
+ Boolean result
|
|
|
+ String transformation_id
|
|
|
+ Element lst
|
|
|
+ String elem
|
|
|
+ Element inputs
|
|
|
+ Element outputs
|
|
|
+ String type_name
|
|
|
+ String exact_type
|
|
|
+ Element trace_links
|
|
|
+
|
|
|
+ // TODO use the prefix for data locations (in model write/read in MvC)
|
|
|
+
|
|
|
+ // Read out the referenced element from the MvC
|
|
|
+ transformation_id = read_attribute(pm, element, "name")
|
|
|
+
|
|
|
+ // Find all inputs and their types (i.e., key)
|
|
|
+ lst = allAssociationOrigins(pm, element, "consumes")
|
|
|
+ while (read_nr_out(lst) > 0):
|
|
|
+ elem = set_pop(lst)
|
|
|
+ // As there are no inheritance relations between full models, we can just read out the typename
|
|
|
+ type_name = read_attribute(core, set_pop(allAssociationDestinations(core, transformation_id, "instanceOf")), "name")
|
|
|
+ dict_add(inputs, type_name, read_attribute(core, elem, "name"))
|
|
|
+
|
|
|
+ // Find all outputs and their types (i.e., key)
|
|
|
+ lst = allAssociationDestinations(pm, element, "produces")
|
|
|
+ while (read_nr_out(lst) > 0):
|
|
|
+ elem = set_pop(lst)
|
|
|
+ // As there are no inheritance relations between full models, we can just read out the typename
|
|
|
+ type_name = read_attribute(core, set_pop(allAssociationDestinations(core, transformation_id, "instanceOf")), "name")
|
|
|
+ dict_add(outputs, type_name, read_attribute(core, elem, "name"))
|
|
|
+
|
|
|
+ exact_type = read_type(core, transformation_id)
|
|
|
+ if (exact_type == "ModelTransformation"):
|
|
|
+ // Model transformation is always in-place and uses only a single metamodel
|
|
|
+ // Therefore, we must:
|
|
|
+ // 1) Create an empty model, instance of merged metamodel
|
|
|
+ // 2) Merge the different source models and retype
|
|
|
+ // 3) Perform the transformation on the merged model
|
|
|
+ // 4) Split the resulting model based on the target formalisms
|
|
|
+ //
|
|
|
+ // There is one exception: if the target model is bound to a source model, that model is overwritten
|
|
|
+ // This allows for some optimizations when it is a simple in-place transformation (skip model copy, join, and split)
|
|
|
+ // First check for this exception, as it is much faster
|
|
|
+ Element input_model
|
|
|
+ Element schedule_model
|
|
|
+ String trace_link_id
|
|
|
+ Element merged_model
|
|
|
+ String merged_metamodel_id
|
|
|
+ String ramified_metamodel_id
|
|
|
+ Boolean result
|
|
|
+
|
|
|
+ schedule_model = get_full_model(transformation_id)
|
|
|
+
|
|
|
+ // 1) Create empty instance of merged metamodel
|
|
|
+
|
|
|
+ ramified_metamodel_id = set_pop(followAssociation(core, transformation_id, "instanceOf"))
|
|
|
+ trace_links = allOutgoingAssociationInstances(core, ramified_metamodel_id, "tracability")
|
|
|
+ merged_metamodel_id = ""
|
|
|
+ while (read_nr_out(trace_links) > 0):
|
|
|
+ trace_link_id = set_pop(trace_links)
|
|
|
+ if (value_eq(read_attribute(core, trace_link_id, "type"), "RAMified")):
|
|
|
+ merged_metamodel_id = readAssociationDestination(core, trace_link_id)
|
|
|
+ if (merged_metamodel_id != ""):
|
|
|
+ merged_model = instantiate_model(get_full_model(merged_metamodel_id))
|
|
|
+
|
|
|
+ // 2) Merge source models
|
|
|
+
|
|
|
+ String key
|
|
|
+ Element keys
|
|
|
+ Element input_keys
|
|
|
+ Element output_keys
|
|
|
+
|
|
|
+ input_keys = dict_keys(inputs)
|
|
|
+ while (read_nr_out(input_keys) > 0):
|
|
|
+ key = set_pop(input_keys)
|
|
|
+ model_join(merged_model, get_full_model(get_model_id(inputs[key])), key + "/")
|
|
|
+
|
|
|
+ // 3) Transform
|
|
|
+
|
|
|
+ log("EXECUTE TRANSFORMATION " + cast_e2s(read_attribute(core, transformation_id, "name")))
|
|
|
+ result = transform(merged_model, schedule_model)
|
|
|
+ output("Transformation executed with result: " + cast_v2s(result))
|
|
|
+
|
|
|
+ // 4) Split in different files depending on type
|
|
|
+
|
|
|
+ String desired_metamodel_id
|
|
|
+ Element split_off_model
|
|
|
+
|
|
|
+ output_keys = dict_keys(outputs)
|
|
|
+ while (read_nr_out(output_keys) > 0):
|
|
|
+ key = set_pop(output_keys)
|
|
|
+ desired_metamodel_id = get_model_id(key)
|
|
|
+ split_off_model = model_split(merged_model, get_full_model(desired_metamodel_id), key + "/")
|
|
|
+
|
|
|
+ // Check if the destination model already exists
|
|
|
+ if (get_model_id(outputs[key]) == ""):
|
|
|
+ // New model
|
|
|
+ model_create(split_off_model, outputs[key], user_id, desired_metamodel_id, "Model")
|
|
|
+ else:
|
|
|
+ // Model exists, so we overwrite
|
|
|
+ model_overwrite(split_off_model, get_model_id(outputs[key]))
|
|
|
+ else:
|
|
|
+ output("Could not resolve intermediate merged metamodel")
|
|
|
+
|
|
|
+ elif (exact_type == "ActionLanguage"):
|
|
|
+ Element dictionary
|
|
|
+ Element new_inputs
|
|
|
+ Element input_keys
|
|
|
+ Element output_keys
|
|
|
+ Element result
|
|
|
+ String key
|
|
|
+ Element func
|
|
|
+
|
|
|
+ log("Action Language execution starts!")
|
|
|
+ log("Getting full model for transformation")
|
|
|
+ log("Got model: " + cast_e2s(read_attribute(core, transformation_id, "location")))
|
|
|
+
|
|
|
+ // 1) Group source models in dictionary
|
|
|
+ // --> This is just the "inputs" variable, but resolve all references
|
|
|
+ log("Create inputs")
|
|
|
+ new_inputs = create_node()
|
|
|
+ input_keys = dict_keys(inputs)
|
|
|
+ while (read_nr_out(input_keys) > 0):
|
|
|
+ key = set_pop(input_keys)
|
|
|
+ log("Resolving " + cast_e2s(key))
|
|
|
+ log(" --> " + cast_e2s(inputs[key]))
|
|
|
+ log(" ID " + cast_e2s(get_model_id(inputs[key])))
|
|
|
+ log(" full m " + cast_e2s(get_full_model(get_model_id(inputs[key]))))
|
|
|
+ dict_add(new_inputs, key, get_full_model(get_model_id(inputs[key])))
|
|
|
+ inputs = new_inputs
|
|
|
+
|
|
|
+ // 2) Execute action language model
|
|
|
+ func = get_func_AL_model(get_full_model(transformation_id))
|
|
|
+ log("Ready to execute: " + cast_e2s(func))
|
|
|
+ result = func(inputs)
|
|
|
+ log("Result: " + cast_e2s(result))
|
|
|
+
|
|
|
+ // 3) Split output dictionary back to seperate models
|
|
|
+ output_keys = dict_keys(outputs)
|
|
|
+ while (read_nr_out(output_keys) > 0):
|
|
|
+ key = set_pop(output_keys)
|
|
|
+ log("Splitting " + key)
|
|
|
+
|
|
|
+ // Check if the destination model already exists
|
|
|
+ if (get_model_id(outputs[key]) == ""):
|
|
|
+ // New model
|
|
|
+ model_create(result[key], outputs[key], user_id, get_model_id(key), "Model")
|
|
|
+ else:
|
|
|
+ // Model exists, so we overwrite
|
|
|
+ model_overwrite(result[key], get_model_id(outputs[key]))
|
|
|
+
|
|
|
+ log("Finished")
|
|
|
+
|
|
|
+ elif (exact_type == "ManualOperation"):
|
|
|
+ log("Manual operation starts!")
|
|
|
+ // Identical to model transformations, but give control to users for modification
|
|
|
+ // 1) Create empty instance of merged metamodel
|
|
|
+ Element input_model
|
|
|
+ String trace_link_id
|
|
|
+ Element merged_model
|
|
|
+ String merged_metamodel_id
|
|
|
+
|
|
|
+ trace_links = allOutgoingAssociationInstances(core, transformation_id, "tracability")
|
|
|
+ merged_metamodel_id = ""
|
|
|
+ while (read_nr_out(trace_links) > 0):
|
|
|
+ trace_link_id = set_pop(trace_links)
|
|
|
+ if (value_eq(read_attribute(core, trace_link_id, "type"), "operatesOn")):
|
|
|
+ merged_metamodel_id = readAssociationDestination(core, trace_link_id)
|
|
|
+
|
|
|
+ if (merged_metamodel_id != ""):
|
|
|
+ merged_model = instantiate_model(get_full_model(merged_metamodel_id))
|
|
|
+
|
|
|
+ // 2) Merge source models
|
|
|
+
|
|
|
+ String key
|
|
|
+ Element keys
|
|
|
+ Element input_keys
|
|
|
+ Element output_keys
|
|
|
+
|
|
|
+ input_keys = dict_keys(inputs)
|
|
|
+ while (read_nr_out(input_keys) > 0):
|
|
|
+ key = set_pop(input_keys)
|
|
|
+ model_join(merged_model, get_full_model(get_model_id(inputs[key])), key + "/")
|
|
|
+
|
|
|
+ // 3) Transform
|
|
|
+
|
|
|
+ log("Start modify")
|
|
|
+ modify(merged_model, True)
|
|
|
+ log("Modify finished")
|
|
|
+
|
|
|
+ // 4) Split in different files depending on type
|
|
|
+
|
|
|
+ String desired_metamodel_id
|
|
|
+ Element split_off_model
|
|
|
+
|
|
|
+ output_keys = dict_keys(outputs)
|
|
|
+ while (read_nr_out(output_keys) > 0):
|
|
|
+ key = set_pop(output_keys)
|
|
|
+ desired_metamodel_id = get_model_id(key)
|
|
|
+ split_off_model = model_split(merged_model, get_full_model(desired_metamodel_id), key + "/")
|
|
|
+
|
|
|
+ // Check if the destination model already exists
|
|
|
+ if (get_model_id(outputs[key]) == ""):
|
|
|
+ // New model
|
|
|
+ model_create(split_off_model, outputs[key], user_id, desired_metamodel_id, "Model")
|
|
|
+ else:
|
|
|
+ // Model exists, so we overwrite
|
|
|
+ model_overwrite(split_off_model, get_model_id(outputs[key]))
|
|
|
+ else:
|
|
|
+ output("Could not find merged metamodel")
|
|
|
+ else:
|
|
|
+ output("Did not know how to interpret model of type " + exact_type)
|
|
|
+
|
|
|
+ return result!
|
|
|
+
|
|
|
+Void function enact_PM(pm : Element, prefix : String, user_id : String):
|
|
|
+ Element worklist
|
|
|
+ String element
|
|
|
+ String start
|
|
|
+ String type
|
|
|
+ Boolean result
|
|
|
+ Element tuple
|
|
|
+ Element counters
|
|
|
+ Element join_nodes
|
|
|
+
|
|
|
+ // Initialize Join counters
|
|
|
+ counters = create_node()
|
|
|
+ join_nodes = allInstances(pm, "Join")
|
|
|
+ while (read_nr_out(join_nodes) > 0):
|
|
|
+ dict_add(counters, set_pop(join_nodes), 0)
|
|
|
+
|
|
|
+ // Create the worklist with the Start instance as first element
|
|
|
+ worklist = create_node()
|
|
|
+ set_add(worklist, create_tuple(set_pop(allInstances(pm, "Start")), True))
|
|
|
+
|
|
|
+ while (bool_not(pm_finished(worklist, pm))):
|
|
|
+ // Pop a random element from the list and execute it
|
|
|
+ tuple = random_choice(set_to_list(worklist))
|
|
|
+ element = tuple[0]
|
|
|
+ result = tuple[1]
|
|
|
+
|
|
|
+ // Find the type (to see what to do with it)
|
|
|
+ // this does not yet yield the type of transformation, if it is an Execution
|
|
|
+ type = read_type(pm, element)
|
|
|
+
|
|
|
+ if (type == "Start"):
|
|
|
+ // Initial node, just progress to the next elements
|
|
|
+ // Nothing to do here though, as we have just started
|
|
|
+ result = True
|
|
|
+ elif (type == "Finish"):
|
|
|
+ // Should be impossible, as we would have ended...
|
|
|
+ result = result
|
|
|
+ elif (type == "Fork"):
|
|
|
+ // Just fork, so we have multiple outgoing
|
|
|
+ // Not a problem, as we already can do this with the usual code
|
|
|
+ result = result
|
|
|
+ elif (type == "Join"):
|
|
|
+ // Only do this if all dependencies are fullfilled
|
|
|
+ // So add to the counter of this Join
|
|
|
+ dict_overwrite(counters, element, integer_addition(counters[element], 1))
|
|
|
+
|
|
|
+ // Now check whether we have enough tokens to execute the Join itself
|
|
|
+ Integer required
|
|
|
+ Integer got
|
|
|
+ required = read_nr_out(allIncomingAssociationInstances(pm, element, "Next")) + read_nr_out(allIncomingAssociationInstances(pm, element, "Else"))
|
|
|
+ got = counters[element]
|
|
|
+ if (got == required):
|
|
|
+ // Reset counter to 0
|
|
|
+ dict_overwrite(counters, element, 0)
|
|
|
+
|
|
|
+ // And continue
|
|
|
+ else:
|
|
|
+ // We haven't gotten all yet, so we wait (i.e., continue without adding Next link to worklist)
|
|
|
+ continue!
|
|
|
+
|
|
|
+ elif (type == "Exec"):
|
|
|
+ // Execute a transformation
|
|
|
+ // This the difficult part!
|
|
|
+
|
|
|
+ result = enact_action(pm, element, prefix, user_id)
|
|
|
+
|
|
|
+ elif (type == "Decision"):
|
|
|
+ // If the previous result is true, we add the normal one, otherwise the false one
|
|
|
+ // in this context, that means that if it is false, we should add it manually to the list, and then continue the simulation loop
|
|
|
+ if (bool_not(result)):
|
|
|
+ // Apparently it is False, so map this to the "Else" branch
|
|
|
+ set_add(worklist, create_tuple(set_pop(allAssociationDestinations(pm, element, "Else")), True))
|
|
|
+ continue!
|
|
|
+
|
|
|
+ // We have finished the execution, so add all outgoing edges to the worklist
|
|
|
+ Element all_next
|
|
|
+ all_next = allAssociationDestinations(pm, element, "Next")
|
|
|
+ while (read_nr_out(all_next) > 0):
|
|
|
+ set_add(worklist, create_tuple(set_pop(all_next), True))
|
|
|
+
|
|
|
+ // Reached a finish element, so stop
|
|
|
+ return !
|
|
|
+
|
|
|
Void function user_function_skip_init(user_id : String):
|
|
|
String cmd
|
|
|
|