25 Комити 07edcc0a8e ... 457aac48b3

Аутор SHA1 Порука Датум
  Joeri Exelmans 457aac48b3 fix пре 3 месеци
  Joeri Exelmans 6b5931343d Merge remote-tracking branch 'github/master' into development пре 3 месеци
  Joeri Exelmans 558772fbe4 commit some long outstanding changes пре 3 месеци
  Joeri Exelmans e046f2f972 nice visualization пре 3 месеци
  Joeri Exelmans fecce51828 add tutorial on model transformation with pivots пре 3 месеци
  Joeri Exelmans 790ba031cf add conformance check to MT tutorial пре 3 месеци
  Joeri Exelmans 33a70c9a88 add model transformation tutorial пре 3 месеци
  Joeri Exelmans 66b9a2dc33 add tutorial пре 3 месеци
  Joeri Exelmans 35f74aed84 get rid of unnecessary link пре 3 месеци
  Joeri Exelmans 069cb439cb add two more tutorials пре 3 месеци
  Joeri Exelmans 1dfeef767e start writing tutorials пре 3 месеци
  joeriexelmans 35f74ab79d Merge pull request #2 from joeriexelmans/development пре 4 месеци
  Inte Vleminckx 9475b1fdc5 Merge remote-tracking branch 'origin/master' into development пре 5 месеци
  Inte Vleminckx d00b9c25db Add 'simplified' version of the FTG+PM++ formalism with operational semantics пре 5 месеци
  robbe 04a17f6ac8 has_slot(obj) now works on instance ipv class. Useful for optional field пре 6 месеци
  robbe 8ee9fba4ea petrinet example fixed пре 6 месеци
  robbe 756b3f30da get_slots and is_instance added to readonly api + is_instance implementation пре 6 месеци
  robbe 5e5865d0d5 base_case of len == 0 added (same as Interactive decisionMaker) пре 6 месеци
  robbe bad9e8e32a removed unused variable пре 6 месеци
  robbe 87fc7362db Scheduler petrinet example пре 6 месеци
  robbe 2c64ebda67 Scheduler first commit пре 6 месеци
  Inte Vleminckx ced3edbd08 fix bytes extraction + give created objects also a name when None is provided (same as links) пре 8 месеци
  Joeri Exelmans 70c53a9aef rename branch пре 9 месеци
  Joeri Exelmans 7f5c2f39fc update readme пре 9 месеци
  Joeri Exelmans 1d03337a61 update readme пре 9 месеци
66 измењених фајлова са 2950 додато и 28 уклоњено
  1. 4 0
      README.md
  2. 4 1
      api/cd.py
  3. 15 8
      api/od.py
  4. 1 2
      bootstrap/scd.py
  5. 2 2
      concrete_syntax/textual_od/parser.py
  6. 0 4
      examples/conformance/woods.py
  7. 47 0
      examples/ftg_pm_pt/ftg_pm_pt.py
  8. 68 0
      examples/ftg_pm_pt/help_functions.py
  9. 272 0
      examples/ftg_pm_pt/helpers/composite_activity.py
  10. 2 0
      examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_lhs.od
  11. 7 0
      examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_nac.od
  12. 12 0
      examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_rhs.od
  13. 49 0
      examples/ftg_pm_pt/operational_semantics/r_exec_activity_lhs.od
  14. 42 0
      examples/ftg_pm_pt/operational_semantics/r_exec_activity_rhs.od
  15. 36 0
      examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_lhs.od
  16. 29 0
      examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_rhs.od
  17. 20 0
      examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_lhs.od
  18. 42 0
      examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_rhs.od
  19. 200 0
      examples/ftg_pm_pt/pm/metamodels/mm_design.od
  20. 38 0
      examples/ftg_pm_pt/pm/metamodels/mm_runtime.od
  21. 109 0
      examples/ftg_pm_pt/pt/metamodels/mm_design.od
  22. 162 0
      examples/ftg_pm_pt/runner.py
  23. 66 0
      examples/petrinet/models/schedule.od
  24. 13 0
      examples/petrinet/operational_semantics/all_input_have_token.od
  25. 13 0
      examples/petrinet/operational_semantics/all_inputs.od
  26. 13 0
      examples/petrinet/operational_semantics/all_output_places.od
  27. 13 0
      examples/petrinet/operational_semantics/all_output_places_update.od
  28. 0 0
      examples/petrinet/operational_semantics/delete_all.od
  29. 1 1
      examples/petrinet/operational_semantics/r_fire_transition_lhs.od
  30. 1 0
      examples/petrinet/operational_semantics/transition.od
  31. 15 5
      examples/petrinet/runner.py
  32. 49 0
      examples/schedule/RuleExecuter.py
  33. 104 0
      examples/schedule/ScheduledActionGenerator.py
  34. 0 0
      examples/schedule/__init__.py
  35. 129 0
      examples/schedule/generator.py
  36. 26 0
      examples/schedule/models/README.md
  37. 46 0
      examples/schedule/models/scheduling_MM.od
  38. 12 0
      examples/schedule/schedule_lib/__init__.py
  39. 63 0
      examples/schedule/schedule_lib/data.py
  40. 26 0
      examples/schedule/schedule_lib/data_modify.py
  41. 47 0
      examples/schedule/schedule_lib/data_node.py
  42. 21 0
      examples/schedule/schedule_lib/end.py
  43. 34 0
      examples/schedule/schedule_lib/exec_node.py
  44. 10 0
      examples/schedule/schedule_lib/funcs.py
  45. 8 0
      examples/schedule/schedule_lib/id_generator.py
  46. 57 0
      examples/schedule/schedule_lib/loop.py
  47. 42 0
      examples/schedule/schedule_lib/match.py
  48. 25 0
      examples/schedule/schedule_lib/null_node.py
  49. 28 0
      examples/schedule/schedule_lib/print.py
  50. 38 0
      examples/schedule/schedule_lib/rewrite.py
  51. 8 0
      examples/schedule/schedule_lib/singleton.py
  52. 16 0
      examples/schedule/schedule_lib/start.py
  53. 9 0
      examples/schedule/templates/schedule_dot.j2
  54. 35 0
      examples/schedule/templates/schedule_template.j2
  55. 47 0
      examples/schedule/templates/schedule_template_wrap.j2
  56. 1 1
      services/od.py
  57. 6 2
      transformation/rewriter.py
  58. 0 1
      transformation/rule.py
  59. 157 0
      tutorial/00_metamodeling.py
  60. 92 0
      tutorial/01_constraints.py
  61. 61 0
      tutorial/02_inheritance.py
  62. 71 0
      tutorial/03_api.py
  63. 167 0
      tutorial/04_transformation.py
  64. 213 0
      tutorial/05_advanced_transformation.py
  65. 3 0
      util/loader.py
  66. 3 1
      util/simulator.py

+ 4 - 0
README.md

@@ -30,3 +30,7 @@ The following branches exist:
  * `master` - currently equivalent to `mde2425` (this is the branch that was cloned by the students). This branch will be deleted after Sep 2025, because the name is too vague.
  * `development` - in this branch, new development will occur, primarily cleaning up the code to prepare for next year's MDE classes.
 
+
+## Tutorial
+
+A good place to learn how to use muMLE is the `tutorial` directory. Each file is an executable Python script that explains muMLE step-by-step (read the comments).

+ 4 - 1
api/cd.py

@@ -53,7 +53,7 @@ class CDAPI:
         return self.bottom.read_outgoing_elements(self.m, type_name)[0]
 
     def is_direct_subtype(self, super_type_name: str, sub_type_name: str):
-        return sub_type_name in self.direct_sub_types[super_type]
+        return sub_type_name in self.direct_sub_types[super_type_name]
 
     def is_direct_supertype(self, sub_type_name: str, super_type_name: str):
         return super_type_name in self.direct_super_types[sub_type_name]
@@ -83,3 +83,6 @@ class CDAPI:
                 result = self.find_attribute_type(supertype, attr_name)
                 if result != None:
                     return result
+
+    def get_type(self, type_name: str):
+        return next(k for k, v in self.type_model_names.items() if v == type_name)

+ 15 - 8
api/od.py

@@ -10,7 +10,8 @@ from uuid import UUID
 from typing import Optional
 from util.timer import Timer
 
-NEXT_ID = 0
+NEXT_LINK_ID = 0
+NEXT_OBJ_ID = 0
 
 # Models map names to elements
 # This builds the inverse mapping, so we can quickly lookup the name of an element
@@ -145,7 +146,7 @@ class ODAPI:
         typ = self.cdapi.get_type(type_name)
         types = set(typ) if not include_subtypes else self.cdapi.transitive_sub_types[type_name]
         for type_of_obj in self.bottom.read_outgoing_elements(obj, "Morphism"):
-            if type_of_obj in types:
+            if self.get_name(type_of_obj) in types:
                 return True
         return False
 
@@ -153,10 +154,9 @@ class ODAPI:
         self.bottom.delete_element(obj)
         self.__recompute_mappings()
 
-    # Does the class of the object have the given attribute?
+    # Does the the object have the given attribute?
     def has_slot(self, obj: UUID, attr_name: str):
-        class_name = self.get_name(self.get_type(obj))
-        return self.od.get_attr_link_name(class_name, attr_name) != None
+        return self.od.get_slot_link(obj, attr_name) != None
 
     def get_slots(self, obj: UUID) -> list[str]:
         return [attr_name for attr_name, _ in self.od.get_slots(obj)]
@@ -247,7 +247,7 @@ class ODAPI:
             raise Exception("Unimplemented type "+value)
 
     def create_link(self, link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID):
-        global NEXT_ID
+        global NEXT_LINK_ID
         types = self.bottom.read_outgoing_elements(self.mm, assoc_name) 
         if len(types) == 0:
             raise Exception(f"No such association: '{assoc_name}'")
@@ -255,13 +255,18 @@ class ODAPI:
             raise Exception(f"More than one association exists with name '{assoc_name}' - this means the MM is invalid.")
         typ = types[0]
         if link_name == None:
-            link_name = f"__{assoc_name}{NEXT_ID}"
-            NEXT_ID += 1
+            link_name = f"__{assoc_name}{NEXT_LINK_ID}"
+            NEXT_LINK_ID += 1
         link_id = self.od._create_link(link_name, typ, src, tgt)
         self.__recompute_mappings()
+
         return link_id
 
     def create_object(self, object_name: Optional[str], class_name: str):
+        global NEXT_OBJ_ID
+        if object_name == None:
+            object_name = f"__{class_name}{NEXT_OBJ_ID}"
+            NEXT_OBJ_ID += 1
         obj = self.od.create_object(object_name, class_name)
         self.__recompute_mappings()
         return obj
@@ -279,6 +284,7 @@ def bind_api_readonly(odapi):
         'get_target': odapi.get_target,
         'get_source': odapi.get_source,
         'get_slot': odapi.get_slot,
+        'get_slots': odapi.get_slots,
         'get_slot_value': odapi.get_slot_value,
         'get_slot_value_default': odapi.get_slot_value_default,
         'get_all_instances': odapi.get_all_instances,
@@ -287,6 +293,7 @@ def bind_api_readonly(odapi):
         'get_outgoing': odapi.get_outgoing,
         'get_incoming': odapi.get_incoming,
         'has_slot': odapi.has_slot,
+        'is_instance': odapi.is_instance,
     }
     return funcs
 

+ 1 - 2
bootstrap/scd.py

@@ -78,8 +78,7 @@ def bootstrap_scd(state: State) -> UUID:
     add_edge_element("gc_inh_element", glob_constr_node, element_node)
     # # Attribute inherits from Element
     add_edge_element("attr_inh_element", attr_node, element_node)
-    # # Association inherits from Element
-    # add_edge_element("assoc_inh_element", assoc_edge, element_node)
+    # # Association inherits from Class
     add_edge_element("assoc_inh_element", assoc_edge, class_node)
     # # AttributeLink inherits from Element
     add_edge_element("attr_link_inh_element", attr_link_edge, element_node)

+ 2 - 2
concrete_syntax/textual_od/parser.py

@@ -93,8 +93,8 @@ def parse_od(state,
             return (_Code(str(token[1:-1])), token.line) # strip the ``
 
         def BYTES(self, token):
-            # return (bytes(token[2:-1], "utf-8"), token.line)  # Strip b"" or b''
-            return (bytes(token[2:-1], "utf-8"), token.line)  # Strip b"" or b''
+            # Strip b"" or b'', and make \\ back to \ (happens when reading the file as a string)
+            return (token[2:-1].encode().decode('unicode_escape').encode('raw_unicode_escape'), token.line)  # Strip b"" or b''
 
         def INDENTED_CODE(self, token):
             skip = 4 # strip the ``` and the following newline character

+ 0 - 4
examples/conformance/woods.py

@@ -198,7 +198,3 @@ if yes_no("Print PlantUML?"):
     print("==================================")
     print(make_url(uml))
     print("==================================")
-    print("Go to either:")
-    print("  ▸ https://www.plantuml.com/plantuml/uml")
-    print("  ▸ https://mstro.duckdns.org/plantuml/uml")
-    print("and paste the above string.")

+ 47 - 0
examples/ftg_pm_pt/ftg_pm_pt.py

@@ -0,0 +1,47 @@
+import os
+
+# Todo: remove src.backend.muMLE from the imports
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from concrete_syntax.textual_od.parser import parse_od
+from api.od import ODAPI
+from concrete_syntax.textual_od.renderer import render_od as od_renderer
+from concrete_syntax.plantuml import make_url as plant_url, renderer as plant_renderer
+from concrete_syntax.graphviz import make_url as graphviz_url, renderer as graphviz_renderer
+
+class FtgPmPt:
+
+    def __init__(self, name: str):
+        self.state = DevState()
+        self.scd_mmm = bootstrap_scd(self.state)
+        self.meta_model = self.load_metamodel()
+        self.model = None
+        self.odapi = None
+        self.name = name
+
+    @staticmethod
+    def read_file(file_name):
+        with open(os.path.join(os.path.dirname(__file__), file_name)) as file:
+            return file.read()
+
+    def load_metamodel(self):
+        mm_cs = self.read_file("pm/metamodels/mm_design.od")
+        mm_rt_cs = mm_cs + self.read_file("pm/metamodels/mm_runtime.od")
+        mm_total = mm_rt_cs + self.read_file("pt/metamodels/mm_design.od")
+        return parse_od(self.state, m_text=mm_total, mm=self.scd_mmm)
+
+    def load_model(self, m_text: str | None = None):
+        m_text = "" if not m_text else m_text
+        self.model = parse_od(self.state, m_text=m_text, mm=self.meta_model)
+        self.odapi = ODAPI(self.state, self.model, self.meta_model)
+
+    def render_od(self):
+        return od_renderer(self.state, self.model, self.meta_model, hide_names=False)
+
+    def render_plantuml_object_diagram(self):
+        print(plant_url.make_url(plant_renderer.render_package(
+            self.name, plant_renderer.render_object_diagram(self.state, self.model, self.meta_model)))
+        )
+
+    def render_graphviz_object_diagram(self):
+        print(graphviz_url.make_url(graphviz_renderer.render_object_diagram(self.state, self.model, self.meta_model)))

+ 68 - 0
examples/ftg_pm_pt/help_functions.py

@@ -0,0 +1,68 @@
+import copy
+import pickle
+
+from api.od import ODAPI
+
+from examples.ftg_pm_pt.helpers.composite_activity import execute_composite_workflow
+
+def serialize(obj):
+    return pickle.dumps(obj)
+
+
+def deserialize(obj):
+    return pickle.loads(obj)
+
+
+def create_activity_links(od: ODAPI, activity, prev_element, ctrl_port, end_trace=None,
+                          relation_type="pt_IsFollowedBy"):
+    od.create_link(None, "pt_RelatesTo", activity, ctrl_port)
+    od.create_link(None, relation_type, prev_element, activity)
+    if end_trace:
+        od.create_link(None, "pt_IsFollowedBy", activity, end_trace)
+
+
+def extract_input_data(od: ODAPI, activity):
+    input_data = {}
+    for has_data_in in od.get_outgoing(activity, "pm_HasDataIn"):
+        data_port = od.get_target(has_data_in)
+        artefact_state = od.get_source(od.get_incoming(od.get_source(od.get_incoming(data_port, "pm_DataFlowOut")[0]), "pm_Of")[0])
+        input_data[od.get_name(data_port)] = deserialize(od.get_slot_value(artefact_state, "data"))
+    return input_data
+
+
+def execute_activity(od: ODAPI, globs, activity, input_data):
+    inp = copy.deepcopy(input_data) # Necessary, otherwise the function changes the values inside the dictionary -> need the original values for process trace
+    func = globs[od.get_slot_value(activity, "func")]
+    return func(inp) if func.__code__.co_argcount > 0 else func()
+
+
+def handle_artefact(od: ODAPI, activity, artefact_type, relation_type, data_port=None, data=None,
+                    direction="DataFlowIn"):
+    artefact = od.create_object(None, "pt_Artefact")
+    if 'pt_Consumes' == relation_type:
+        od.create_link(None, relation_type, artefact, activity)
+    else:
+        od.create_link(None, relation_type, activity, artefact)
+    if data_port:
+        flow_direction = od.get_incoming if relation_type == 'pt_Consumes' else od.get_outgoing
+        ass_side = od.get_source if relation_type == 'pt_Consumes' else od.get_target
+        pm_artefact = ass_side(flow_direction(data_port, f"pm_{direction}")[0])
+        prev_artefact = find_previous_artefact(od, od.get_incoming(pm_artefact, "pt_BelongsTo"))
+        if prev_artefact:
+            od.create_link(None, "pt_PrevVersion", artefact, prev_artefact)
+        od.create_link(None, "pt_BelongsTo", artefact, pm_artefact)
+        if data is not None:
+            artefact_state = od.get_source(od.get_incoming(pm_artefact, "pm_Of")[0])
+            od.set_slot_value(artefact_state, "data", serialize(data))
+            od.set_slot_value(artefact, "data", serialize(data))
+
+
+def find_previous_artefact(od: ODAPI, linked_artefacts):
+    return next((od.get_source(link) for link in linked_artefacts if
+                 not od.get_incoming(od.get_source(link), "pt_PrevVersion")), None)
+
+
+def update_control_states(od: ODAPI, activity, ctrl_out):
+    for has_ctrl_in in od.get_outgoing(activity, "pm_HasCtrlIn"):
+        od.set_slot_value(od.get_source(od.get_incoming(od.get_target(has_ctrl_in), "pm_Of")[0]), "active", False)
+    od.set_slot_value(od.get_source(od.get_incoming(ctrl_out, "pm_Of")[0]), "active", True)

+ 272 - 0
examples/ftg_pm_pt/helpers/composite_activity.py

@@ -0,0 +1,272 @@
+from uuid import UUID
+
+from api.od import ODAPI
+from examples.ftg_pm_pt.ftg_pm_pt import FtgPmPt
+from examples.ftg_pm_pt.runner import FtgPmPtRunner
+
+
+def find_previous_artefact(od: ODAPI, linked_artefacts):
+    return next((od.get_source(link) for link in linked_artefacts if
+                 not od.get_incoming(od.get_source(link), "pt_PrevVersion")), None)
+
+
+def create_activity_links(od: ODAPI, activity, prev_element, ctrl_port, end_trace=None,
+                          relation_type="pt_IsFollowedBy"):
+    od.create_link(None, "pt_RelatesTo", activity, ctrl_port)
+    od.create_link(None, relation_type, prev_element, activity)
+    if end_trace:
+        od.create_link(None, "pt_IsFollowedBy", activity, end_trace)
+
+
+def get_workflow_path(od: ODAPI, activity: UUID):
+    return od.get_slot_value(activity, "subworkflow_path")
+
+
+def get_workflow(workflow_path: str):
+    with open(workflow_path, "r") as f:
+        return f.read()
+
+
+############################
+
+def get_runtime_state(od: ODAPI, design_obj: UUID):
+    states = od.get_incoming(design_obj, "pm_Of")
+    if len(states) == 0:
+        print(f"Design object '{od.get_name(design_obj)}' has no runtime state.")
+        return None
+    return od.get_source(states[0])
+
+
+def get_source_incoming(od: ODAPI, obj: UUID, link_name: str):
+    links = od.get_incoming(obj, link_name)
+    if len(links) == 0:
+        print(f"Object '{od.get_name(obj)} has no incoming links of type '{link_name}'.")
+        return None
+    return od.get_source(links[0])
+
+
+def get_target_outgoing(od: ODAPI, obj: UUID, link_name: str):
+    links = od.get_outgoing(obj, link_name)
+    if len(links) == 0:
+        print(f"Object '{od.get_name(obj)} has no outgoing links of type '{link_name}'.")
+        return None
+    return od.get_target(links[0])
+
+
+def set_control_port_value(od: ODAPI, port: UUID, value: bool):
+    state = get_runtime_state(od, port)
+    od.set_slot_value(state, "active", value)
+
+
+def set_artefact_data(od: ODAPI, artefact: UUID, value: bytes):
+    state = artefact
+    # Only the proces model of the artefact contains a runtime state
+    if od.get_type_name(state) == "pm_Artefact":
+        state = get_runtime_state(od, artefact)
+    od.set_slot_value(state, "data", value)
+
+
+def get_artefact_data(od: ODAPI, artefact):
+    state = artefact
+    # Only the proces model of the artefact contains a runtime state
+    if od.get_type_name(state) == "pm_Artefact":
+        state = get_runtime_state(od, artefact)
+    return od.get_slot_value(state, "data")
+
+
+############################
+
+def set_workflow_control_source(workflow_model: FtgPmPt, ctrl_port_name: str, composite_linkage: dict):
+    od = workflow_model.odapi
+    source_port_name = composite_linkage[ctrl_port_name]
+    source_port = od.get(source_port_name)
+    set_control_port_value(od, source_port, True)
+
+
+def set_workflow_artefacts(act_od: ODAPI, activity: UUID, workflow_model: FtgPmPt, composite_linkage: dict):
+    for data_port in [act_od.get_target(data_in) for data_in in act_od.get_outgoing(activity, "pm_HasDataIn")]:
+        # Get the data source port of the inner workflow
+        data_port_name = act_od.get_name(data_port)
+        source_port_name = composite_linkage[data_port_name]
+        source_port = workflow_model.odapi.get(source_port_name)
+
+        # Get the artefact that is linked to the data port of the activity
+        act_artefact = get_source_incoming(act_od, data_port, "pm_DataFlowOut")
+        # Get the data of the artefact
+        artefact_data = get_artefact_data(act_od, act_artefact)
+
+        # Get the artefact that is linked to the data port of the inner workflow
+        workflow_artefact = get_target_outgoing(workflow_model.odapi, source_port, "pm_DataFlowIn")
+        set_artefact_data(workflow_model.odapi, workflow_artefact, artefact_data)
+
+
+def get_activity_port_from_inner_port(composite_linkage: dict, port_name: str):
+    for act_port_name, work_port_name in composite_linkage.items():
+        if work_port_name == port_name:
+            return act_port_name
+
+
+def execute_composite_workflow(od: ODAPI, activity: UUID, ctrl_port: UUID, composite_linkage: dict,
+                               packages: dict | None, matched=None):
+    activity_name = od.get_slot_value(activity, "name")
+
+    # First get the path of the object diagram file that contains the inner workflow of the activity
+    workflow_path = get_workflow_path(od, activity)
+
+    # Read the object diagram file
+    workflow = get_workflow(workflow_path)
+
+    # Create an FtgPmPt object
+    workflow_model = FtgPmPt(activity_name)
+
+    # Load the workflow to the object
+    workflow_model.load_model(workflow)
+
+    # Set the correct control source port of the workflow to active
+    set_workflow_control_source(workflow_model, od.get_name(ctrl_port), composite_linkage[activity_name])
+
+    # If a data port is linked, set the data of the artefact
+    set_workflow_artefacts(od, activity, workflow_model, composite_linkage[activity_name])
+
+    # Create an FtgPmPtRunner object with the FtgPmPt object
+    workflow_runner = FtgPmPtRunner(workflow_model)
+
+    # Set the packages if present
+    workflow_runner.set_packages(packages, is_path=False)
+
+    # Run the FtgPmPtRunner (is a subprocess necessary? This makes it more complicated because now we have direct access to the object)
+    workflow_runner.run()
+
+    # Contains all the ports of the inner workflow -> map back to the activity ports, and so we can set the correct
+    # Control ports to active and also set the data artefacts correctly
+    ports = extract_inner_workflow(workflow_model.odapi)
+    start_act = None
+    end_act = None
+    for port in [port for port in ports if port]:
+        port_name = workflow_model.odapi.get_name(port)
+        activity_port_name = get_activity_port_from_inner_port(composite_linkage[activity_name], port_name)
+        activity_port = od.get(activity_port_name)
+        match workflow_model.odapi.get_type_name(port):
+            case "pm_CtrlSource":
+                start_act = handle_control_source(od, activity_port, matched("prev_trace_element"))
+            case "pm_CtrlSink":
+                end_act = handle_control_sink(od, activity_port, start_act, matched("end_trace"))
+            case "pm_DataSource":
+                handle_data_source(od, activity_port, start_act)
+            case "pm_DataSink":
+                handle_data_sink(od, workflow_model.odapi, activity_port, port, end_act)
+
+
+def handle_control_source(od: ODAPI, port, prev_trace_elem):
+    set_control_port_value(od, port, False)
+    start_activity = od.create_object(None, "pt_StartActivity")
+    create_activity_links(od, start_activity, prev_trace_elem, port)
+    return start_activity
+
+
+def handle_control_sink(od: ODAPI, port, start_act, end_trace):
+    set_control_port_value(od, port, True)
+    end_activity = od.create_object(None, "pt_EndActivity")
+    create_activity_links(od, end_activity, start_act, port, end_trace)
+    return end_activity
+
+
+def handle_data_source(od: ODAPI, port, start_activity):
+    pt_artefact = od.create_object(None, "pt_Artefact")
+    od.create_link(None, "pt_Consumes", pt_artefact, start_activity)
+
+    pm_artefact = get_source_incoming(od, port, "pm_DataFlowOut")
+    pm_artefact_data = get_artefact_data(od, pm_artefact)
+    set_artefact_data(od, pt_artefact, pm_artefact_data)
+    prev_pt_artefact = find_previous_artefact(od, od.get_incoming(pm_artefact, "pt_BelongsTo"))
+    if prev_pt_artefact:
+        od.create_link(None, "pt_PrevVersion", pt_artefact, prev_pt_artefact)
+    od.create_link(None, "pt_BelongsTo", pt_artefact, pm_artefact)
+
+
+def handle_data_sink(act_od: ODAPI, work_od: ODAPI, act_port, work_port, end_activity):
+    pt_artefact = act_od.create_object(None, "pt_Artefact")
+    act_od.create_link(None, "pt_Produces", end_activity, pt_artefact)
+
+    work_artefact = get_source_incoming(work_od, work_port, "pm_DataFlowOut")
+    work_artefact_data = get_artefact_data(work_od, work_artefact)
+
+    act_artefact = get_target_outgoing(act_od, act_port, "pm_DataFlowIn")
+
+    set_artefact_data(act_od, act_artefact, work_artefact_data)
+    set_artefact_data(act_od, pt_artefact, work_artefact_data)
+
+    prev_pt_artefact = find_previous_artefact(act_od, act_od.get_incoming(act_artefact, "pt_BelongsTo"))
+    if prev_pt_artefact:
+        act_od.create_link(None, "pt_PrevVersion", pt_artefact, prev_pt_artefact)
+    act_od.create_link(None, "pt_BelongsTo", pt_artefact, act_artefact)
+
+
+def extract_inner_workflow(workflow: ODAPI):
+    # Get the model, this should be only one
+    name, model = workflow.get_all_instances("pm_Model")[0]
+
+    # Get the start of the process trace
+    start_trace = get_source_incoming(workflow, model, "pt_Starts")
+    # Get the end of the process trace
+    end_trace = get_source_incoming(workflow, model, "pt_Ends")
+
+    # Get the first started activity
+    first_activity = get_target_outgoing(workflow, start_trace, "pt_IsFollowedBy")
+    # Get the last ended activity
+    end_activity = get_source_incoming(workflow, end_trace, "pt_IsFollowedBy")
+
+    # Get the control port that started the activity
+    act_ctrl_in = get_target_outgoing(workflow, first_activity, "pt_RelatesTo")
+    # Get the control port that is activated when the activity is executed
+    act_ctrl_out = get_target_outgoing(workflow, end_activity, "pt_RelatesTo")
+
+    # Get the control source of the workflow
+    ports = []
+    for port in workflow.get_incoming(act_ctrl_in, "pm_CtrlFlow"):
+        source = workflow.get_source(port)
+        if workflow.get_type_name(source) == "pm_CtrlSource":
+            # Only one port can activate an activity
+            ports.append(source)
+            break
+
+    # Get the control sink of the workflow
+    for port in workflow.get_outgoing(act_ctrl_out, "pm_CtrlFlow"):
+        sink = workflow.get_target(port)
+        if workflow.get_type_name(sink) == "pm_CtrlSink":
+            # Only one port can be set to active one an activity is ended
+            ports.append(sink)
+            break
+
+    # Get the data port that the activity consumes (if used)
+    consumed_links = workflow.get_incoming(first_activity, "pt_Consumes")
+    if len(consumed_links) > 0:
+        pt_artefact = None
+        for link in consumed_links:
+            pt_artefact = workflow.get_source(link)
+            # Check if it is the first artefact -> contains no previous version
+            if len(workflow.get_outgoing(pt_artefact, "pt_PrevVersion")) == 0:
+                break
+
+        pm_artefact = get_target_outgoing(workflow, pt_artefact, "pt_BelongsTo")
+        # Find the data source port
+        for link in workflow.get_incoming(pm_artefact, "pm_DataFlowIn"):
+            source = workflow.get_source(link)
+            if workflow.get_type_name(source) == "pm_DataSource":
+                # An activity can only use one artefact as input
+                ports.append(source)
+                break
+
+    # Get all data ports that are connected to an artefact that is produced by an activity in the workflow,
+    # where the artefact is also part of main workflow
+    for port_name, data_sink in workflow.get_all_instances("pm_DataSink"):
+        pm_art = get_source_incoming(workflow, data_sink, "pm_DataFlowOut")
+        # If the pm_artefact is linked to a proces trace artefact that is produced, we can add to port
+        links = workflow.get_incoming(pm_art, "pt_BelongsTo")
+        if not len(links):
+            continue
+        # A data sink port linkage will only be added to the proces trace when an activity is ended and so an artefact
+        # is produced, meaning that if a belongsTo link exists, a proces trace artefact is linked to this data port
+        ports.append(data_sink)
+
+    return ports

+ 2 - 0
examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_lhs.od

@@ -0,0 +1,2 @@
+# Match the model
+model:RAM_pm_Model

+ 7 - 0
examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_nac.od

@@ -0,0 +1,7 @@
+model:RAM_pm_Model
+
+# Check if the model isn't already connected to a process trace
+start_trace:RAM_pt_StartTrace
+  :RAM_pt_Starts (start_trace -> model)
+end_trace:RAM_pt_EndTrace
+  :RAM_pt_Ends (end_trace -> model)

+ 12 - 0
examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_rhs.od

@@ -0,0 +1,12 @@
+# Keep the left hand side
+model:RAM_pm_Model
+
+# Connect a process trace to it
+start_trace:RAM_pt_StartTrace
+    starts:RAM_pt_Starts (start_trace -> model)
+
+end_trace:RAM_pt_EndTrace
+    ends:RAM_pt_Ends (end_trace -> model)
+
+# Connect the start with the end
+:RAM_pt_IsFollowedBy (start_trace -> end_trace)

+ 49 - 0
examples/ftg_pm_pt/operational_semantics/r_exec_activity_lhs.od

@@ -0,0 +1,49 @@
+# When a control port is active and is connected to an activity, we want to execute the activity
+# But, if the activity has input_and (input_or = False). It only can be activated if all its inputs are active
+
+
+# Match the model
+model:RAM_pm_Model
+
+# Match the a python automated activity
+py_activity:RAM_pm_PythonAutomatedActivity {
+    # Check if all connected ports are active in case of input_and
+    condition = ```
+        all_active = True
+        
+        # Check for or / and
+        if not get_slot_value(this, "input_or"):
+            # Get all the ctrl in ports
+            for has_ctrl_in in get_outgoing(this, "pm_HasCtrlIn"):
+                c_in_state = get_source(get_incoming(get_target(has_ctrl_in), "pm_Of")[0])
+                # Check if the port is active or not
+                if not get_slot_value(c_in_state, "active"):
+                    all_active = False
+                    break
+
+        all_active
+    ```;
+} model_to_activity:RAM_pm_Owns (model -> py_activity)
+
+
+# Match a control activity in port that is active
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+    RAM_active = `get_value(this)`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+# Match the activity link to the port
+activity_to_port:RAM_pm_HasCtrlIn (py_activity -> ctrl_in)
+
+# Match the end of the trace
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+# Match the previous trace element before the end trace
+prev_trace_element:RAM_pt_Event
+
+followed_by:RAM_pt_IsFollowedBy (prev_trace_element -> end_trace)
+

+ 42 - 0
examples/ftg_pm_pt/operational_semantics/r_exec_activity_rhs.od

@@ -0,0 +1,42 @@
+model:RAM_pm_Model
+
+py_activity:RAM_pm_PythonAutomatedActivity {
+
+    condition = ```
+        start_activity = create_object(None, "pt_StartActivity")
+        create_activity_links(odapi, start_activity, matched("prev_trace_element"), matched("ctrl_in"))
+        input_data = extract_input_data(odapi, this)
+        result = execute_activity(odapi, globals()["packages"], this, input_data)
+        if len(result) == 3:
+            status_code, output_data, input_used = result
+        else:
+            status_code, output_data, input_used = *result, None
+        if input_used:
+            handle_artefact(odapi, start_activity, "pt_Artefact", "pt_Consumes", get(input_used), input_data[input_used], direction="DataFlowOut")
+        end_activity = create_object(None, "pt_EndActivity")
+        ctrl_out = get(status_code)
+        create_activity_links(odapi, end_activity, start_activity, ctrl_out, end_trace=matched("end_trace"))
+        if output_data:
+            port, data = output_data
+            handle_artefact(odapi, end_activity, "pt_Artefact", "pt_Produces", get(port), data, direction="DataFlowIn")
+        update_control_states(odapi, this, ctrl_out)
+    ```;
+}
+
+model_to_activity:RAM_pm_Owns
+
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+    RAM_active = `False`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+activity_to_port:RAM_pm_HasCtrlIn (py_activity -> ctrl_in)
+
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+prev_trace_element:RAM_pt_Event
+

+ 36 - 0
examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_lhs.od

@@ -0,0 +1,36 @@
+# When a control port is active and is connected to an activity, we want to execute the activity. If it is a composite one, we execute the inner workflow of it
+# But, if the activity has input_and (input_or = False). It only can be activated if all its inputs are active
+
+
+# Match the model
+model:RAM_pm_Model
+
+# Match the a python automated activity
+activity:RAM_pm_Activity {
+
+    RAM_composite = `True`;
+
+} model_to_activity:RAM_pm_Owns (model -> activity)
+
+
+# Match a control activity in port that is active
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+    RAM_active = `get_value(this)`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+# Match the activity link to the port
+activity_to_port:RAM_pm_HasCtrlIn (activity -> ctrl_in)
+
+# Match the end of the trace
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+# Match the previous trace element before the end trace
+prev_trace_element:RAM_pt_Event
+
+followed_by:RAM_pt_IsFollowedBy (prev_trace_element -> end_trace)
+

+ 29 - 0
examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_rhs.od

@@ -0,0 +1,29 @@
+model:RAM_pm_Model
+
+activity:RAM_pm_Activity {
+
+    RAM_composite = `True`;
+
+    condition = ```
+        # Execute inner workflow
+        execute_composite_workflow(odapi, this, matched("ctrl_in"), globals()["composite_linkage"], globals()["packages"], matched)
+    ```;
+}
+
+model_to_activity:RAM_pm_Owns
+
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+    RAM_active = `False`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+activity_to_port:RAM_pm_HasCtrlIn (activity -> ctrl_in)
+
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+prev_trace_element:RAM_pt_Event
+

+ 20 - 0
examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_lhs.od

@@ -0,0 +1,20 @@
+# Match an active control output port
+out_state:RAM_pm_CtrlPortState {
+    RAM_active = `get_value(this)`;
+}
+
+out:RAM_pm_CtrlOut
+
+state_to_out:RAM_pm_Of (out_state -> out)
+
+# Match an inactive control input port
+in_state:RAM_pm_CtrlPortState {
+    RAM_active = `not get_value(this)`;
+}
+
+in:RAM_pm_CtrlIn
+
+state_to_in:RAM_pm_Of (in_state -> in)
+
+# Match the connection between those two ports
+flow:RAM_pm_CtrlFlow (out -> in)

+ 42 - 0
examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_rhs.od

@@ -0,0 +1,42 @@
+# Copy the left hand side
+
+out_state:RAM_pm_CtrlPortState {
+    # Only set the output port to inactive if all connected input ports are set to active
+    RAM_active = ```
+        set_to_active = False
+
+        output_port = matched("out")
+        outgoing_flows = get_outgoing(output_port, "pm_CtrlFlow")
+
+        # for each flow: pm_CtrlFlow -> pm_CtrlIn <- pm_Of <- pm_CtrlPortState == state 
+        all_input_port_states = [get_source(get_incoming(get_target(flow), "pm_Of")[0]) for flow in outgoing_flows]
+        input_port_state = matched("in_state")
+
+        for state in all_input_port_states:
+            is_active = get_slot_value(state, "active")
+
+            # If the state is not active and it is not the input port state we have matched and planned to set active
+            # Then we can't yet set this output port state to active
+            if not is_active and state != input_port_state:
+                set_to_active = True
+                break
+          
+        # Set the attribute to the assigned value
+        set_to_active
+    ```;
+}
+
+out:RAM_pm_CtrlOut
+
+state_to_out:RAM_pm_Of (out_state -> out)
+
+in_state:RAM_pm_CtrlPortState {
+    # Set the input port active
+    RAM_active = `True`;
+}
+
+in:RAM_pm_CtrlIn
+
+state_to_in:RAM_pm_Of (in_state -> in)
+
+flow:RAM_pm_CtrlFlow (out -> in)

+ 200 - 0
examples/ftg_pm_pt/pm/metamodels/mm_design.od

@@ -0,0 +1,200 @@
+##################################################
+
+pm_Model:Class
+
+##################################################
+
+pm_Stateful:Class
+
+##################################################
+
+pm_ModelElement:Class {
+  abstract = True;
+}
+
+##################################################
+
+pm_Activity:Class 
+  :Inheritance (pm_Activity -> pm_ModelElement)
+
+pm_Activity_name:AttributeLink (pm_Activity -> String) {
+  name = "name";
+  optional = False;
+}
+
+pm_Activity_composite:AttributeLink (pm_Activity -> Boolean) {
+  name = "composite";
+  optional = False;
+}
+
+pm_Activity_subworkflow_path:AttributeLink (pm_Activity -> String) {
+	name = "subworkflow_path";
+	optional = True;
+}
+
+
+pm_AutomatedActivity:Class {
+  abstract = True;
+} :Inheritance (pm_AutomatedActivity -> pm_Activity)
+
+pm_AutomatedActivity_input_or:AttributeLink (pm_AutomatedActivity -> Boolean) {
+  name = "input_or";
+  optional = False;
+}
+
+pm_PythonAutomatedActivity:Class 
+  :Inheritance (pm_PythonAutomatedActivity -> pm_AutomatedActivity)
+
+pm_PythonAutomatedActivity_func:AttributeLink (pm_PythonAutomatedActivity -> ActionCode) {
+  name = "func";
+  optional = False;
+}
+
+##################################################
+
+pm_Artefact:Class 
+  :Inheritance (pm_Artefact -> pm_ModelElement)
+  :Inheritance (pm_Artefact -> pm_Stateful)
+
+##################################################
+
+pm_CtrlPort:Class {
+  abstract = True;
+} :Inheritance (pm_CtrlPort -> pm_Stateful)
+
+pm_CtrlIn:Class {
+  abstract = True;
+} :Inheritance (pm_CtrlIn -> pm_CtrlPort)
+
+pm_CtrlSink:Class {
+    # 1) A control sink port must have at least one incoming control flow
+    # 2) A control sink port can't have any control flow output
+    constraint = ```
+        has_incoming = len(get_incoming(this, "pm_CtrlFlow")) > 0
+        no_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) == 0
+
+        # Return constraint
+        has_incoming and no_outgoing
+    ```;
+} :Inheritance (pm_CtrlSink -> pm_CtrlIn)
+
+pm_CtrlActivityIn:Class {
+    # 1) Must have at least one incoming control flow
+    constraint = ```
+        has_incoming = len(get_incoming(this, "pm_CtrlFlow")) > 0
+        # Return constraint
+        has_incoming
+    ```;
+} :Inheritance (pm_CtrlActivityIn -> pm_CtrlIn)
+
+pm_CtrlOut:Class {
+  abstract = True;
+} :Inheritance (pm_CtrlOut -> pm_CtrlPort)
+
+pm_CtrlSource:Class {
+    # 1) A control source port can't have any control flow inputs
+    # 2) A control source port must have at least one outgoing control flow
+    constraint = ```
+        no_incoming = len(get_incoming(this, "pm_CtrlFlow")) == 0
+        has_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) > 0
+
+        # Return constraint
+        no_incoming and has_outgoing
+    ```;
+} :Inheritance (pm_CtrlSource -> pm_CtrlOut)
+
+pm_CtrlActivityOut:Class {
+    # 1) Must have at least one outgoing control flow
+    constraint = ```
+        has_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) > 0
+
+        # Return constraint
+        has_outgoing
+    ```;
+} :Inheritance (pm_CtrlActivityOut -> pm_CtrlOut)
+
+##################################################
+
+pm_DataPort:Class {
+  abstract = True;
+}
+
+pm_DataIn:Class {
+  abstract = True;
+} :Inheritance (pm_DataIn -> pm_DataPort)
+
+pm_DataSink:Class
+  :Inheritance (pm_DataSink -> pm_DataIn)
+
+pm_DataActivityIn:Class
+  :Inheritance (pm_DataActivityIn -> pm_DataIn)
+
+pm_DataOut:Class {
+  abstract = True;
+} :Inheritance (pm_DataOut -> pm_DataPort)
+
+pm_DataSource:Class
+  :Inheritance (pm_DataSource -> pm_DataOut)
+
+pm_DataActivityOut:Class
+  :Inheritance (pm_DataActivityOut -> pm_DataOut)
+
+##################################################
+##################################################
+
+pm_Owns:Association (pm_Model -> pm_ModelElement) {
+  source_lower_cardinality = 1;
+  source_upper_cardinality = 1;
+}
+
+##################################################
+
+pm_CtrlFlow:Association (pm_CtrlPort -> pm_CtrlPort)
+
+##################################################
+
+pm_HasCtrlIn:Association (pm_Activity -> pm_CtrlIn) {
+    source_upper_cardinality = 1;
+    target_lower_cardinality = 1;
+}
+
+pm_HasCtrlOut:Association (pm_Activity -> pm_CtrlOut) {
+    source_upper_cardinality = 1;
+    target_lower_cardinality = 1;
+}
+
+pm_HasDataIn:Association (pm_Activity -> pm_DataIn) {
+    source_upper_cardinality = 1;
+}
+
+pm_HasDataOut:Association (pm_Activity -> pm_DataOut) {
+    source_upper_cardinality = 1;
+}
+
+##################################################
+
+pm_DataFlowIn:Association (pm_DataOut -> pm_Artefact) {
+    source_lower_cardinality = 1;
+    target_lower_cardinality = 1;
+}
+
+pm_DataFlowOut:Association (pm_Artefact -> pm_DataIn) {
+    source_lower_cardinality = 1;
+    target_lower_cardinality = 1;
+}
+
+##################################################
+##################################################
+
+has_source_and_sink:GlobalConstraint {
+    # There should be at least one source and sink control port
+    constraint = ```
+        contains_source = len(get_all_instances("pm_CtrlSource")) > 0
+        contains_sink = len(get_all_instances("pm_CtrlSink")) > 0
+
+        # return constraint      
+        contains_source and contains_sink
+    ```;
+}
+
+##################################################

+ 38 - 0
examples/ftg_pm_pt/pm/metamodels/mm_runtime.od

@@ -0,0 +1,38 @@
+##################################################
+
+pm_State:Class {
+  abstract = True;
+}
+
+##################################################
+
+pm_ArtefactState:Class 
+  :Inheritance (pm_ArtefactState -> pm_State)
+
+pm_ArtefactState_data:AttributeLink (pm_ArtefactState -> Bytes) {
+    name = "data";
+    optional = False;
+}
+
+##################################################
+
+pm_CtrlPortState:Class 
+  :Inheritance (pm_CtrlPortState -> pm_State)
+
+pm_CtrlPortState_active:AttributeLink (pm_CtrlPortState -> Boolean) {
+    name = "active";
+    optional = False;
+}
+
+##################################################
+##################################################
+
+pm_Of:Association (pm_State -> pm_Stateful) {
+  # one-to-one
+  source_lower_cardinality = 1;
+  source_upper_cardinality = 1;
+  target_lower_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+##################################################

+ 109 - 0
examples/ftg_pm_pt/pt/metamodels/mm_design.od

@@ -0,0 +1,109 @@
+##################################################
+
+pt_Event:Class {
+  abstract = True;
+}
+
+##################################################
+
+pt_Activity:Class {
+  abstract = True;
+} :Inheritance (pt_Activity -> pt_Event)
+
+pt_StartActivity:Class {
+  # A start activity can only be related to a control in port
+  constraint = ```
+      correct_related = True
+
+      port = get_target(get_outgoing(this, "pt_RelatesTo")[0])
+      correct_related = port in [uid for _, uid in get_all_instances("pm_CtrlIn")]
+      correct_related
+  ```;
+
+} :Inheritance (pt_StartActivity -> pt_Activity)
+
+pt_EndActivity:Class {
+  # A end activity can only be related to a control out port
+  constraint = ```
+    correct_related = True
+
+    port = get_target(get_outgoing(this, "pt_RelatesTo")[0])
+    correct_related = port in [uid for _, uid in get_all_instances("pm_CtrlOut")]
+
+    correct_related
+  ```;
+
+} :Inheritance (pt_EndActivity -> pt_Activity)
+
+##################################################
+
+pt_StartTrace:Class
+  :Inheritance (pt_StartTrace -> pt_Event)
+
+pt_EndTrace:Class
+  :Inheritance (pt_EndTrace -> pt_Event)
+
+##################################################
+
+pt_Artefact:Class
+  :Inheritance (pt_Artefact -> pt_Event)
+
+pt_Artefact_data:AttributeLink (pt_Artefact -> Bytes) {
+  name = "data";
+  optional = False;
+}
+
+##################################################
+##################################################
+
+pt_IsFollowedBy:Association (pt_Event -> pt_Event) {
+  source_upper_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+##################################################
+
+pt_RelatesTo:Association (pt_Activity -> pm_CtrlPort) {
+  source_upper_cardinality = 1;
+  target_lower_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+pt_Consumes:Association (pt_Artefact -> pt_StartActivity) {
+  source_upper_cardinality = 1;
+  target_lower_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+pt_Produces:Association (pt_EndActivity -> pt_Artefact) {
+  source_lower_cardinality = 1;
+  source_upper_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+##################################################
+
+pt_Starts:Association (pt_StartTrace -> pm_Model) {
+  source_upper_cardinality = 1;
+  target_lower_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+pt_Ends:Association (pt_EndTrace -> pm_Model) {
+  source_upper_cardinality = 1;
+  target_lower_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+##################################################
+
+pt_PrevVersion:Association (pt_Artefact -> pt_Artefact) {
+  source_upper_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+pt_BelongsTo:Association (pt_Artefact -> pm_Artefact) {
+  target_lower_cardinality = 1;
+  target_upper_cardinality = 1;
+}
+
+##################################################

+ 162 - 0
examples/ftg_pm_pt/runner.py

@@ -0,0 +1,162 @@
+import re
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+from transformation.rule import RuleMatcherRewriter
+from transformation.ramify import ramify
+from concrete_syntax.graphviz import renderer as graphviz
+from concrete_syntax.graphviz.make_url import make_url
+from concrete_syntax.plantuml import renderer as plantuml
+from concrete_syntax.plantuml.make_url import make_url as plant_make_url
+from api.od import ODAPI
+import os
+from os import listdir
+from os.path import isfile, join
+import importlib.util
+from util.module_to_dict import module_to_dict
+from examples.ftg_pm_pt import help_functions
+
+from examples.ftg_pm_pt.ftg_pm_pt import FtgPmPt
+
+
+
+class FtgPmPtRunner:
+
+    def __init__(self, model: FtgPmPt, composite_linkage: dict | None = None):
+        self.model = model
+        self.ram_mm = ramify(self.model.state, self.model.meta_model)
+        self.rules = self.load_rules()
+        self.packages = None
+        self.composite_linkage = composite_linkage
+
+    def load_rules(self):
+        return loader.load_rules(
+            self.model.state,
+            lambda rule_name, kind: os.path.join(
+                os.path.dirname(__file__),
+                f"operational_semantics/r_{rule_name}_{kind}.od"
+            ),
+            self.ram_mm,
+            ["connect_process_trace", "trigger_ctrl_flow", "exec_activity", "exec_composite_activity"]
+        )
+
+    def set_packages(self, packages: str | dict, is_path: bool):
+        if not is_path:
+            self.packages = packages
+            return
+
+        self.packages = self.parse_packages(packages)
+
+    def parse_packages(self, packages_path: str) -> dict:
+        return self.collect_functions_from_packages(packages_path, packages_path)
+
+    def collect_functions_from_packages(self, base_path, current_path):
+        functions_dict = {}
+
+        for entry in listdir(current_path):
+            entry_path = join(current_path, entry)
+
+            if isfile(entry_path) and entry.endswith(".py"):
+                module_name = self.convert_path_to_module_name(base_path, entry_path)
+                module = self.load_module_from_file(entry_path)
+
+                for func_name, func in module_to_dict(module).items():
+                    functions_dict[f"{module_name}.{func_name}"] = func
+
+            elif not isfile(entry_path):
+                nested_functions = self.collect_functions_from_packages(base_path, entry_path)
+                functions_dict.update(nested_functions)
+
+        return functions_dict
+
+    @staticmethod
+    def convert_path_to_module_name(base_path, file_path):
+        return file_path.replace(base_path, "").replace(".py", "").replace("/", "")
+
+    @staticmethod
+    def load_module_from_file(file_path):
+        spec = importlib.util.spec_from_file_location("", file_path)
+        module = importlib.util.module_from_spec(spec)
+        spec.loader.exec_module(module)
+        return module
+
+    def create_matcher(self):
+        packages = module_to_dict(help_functions)
+
+        if self.packages:
+            packages.update({ "packages": self.packages })
+
+        if self.composite_linkage:
+            packages.update({ "composite_linkage": self.composite_linkage })
+
+        matcher_rewriter = RuleMatcherRewriter(
+            self.model.state, self.model.meta_model, self.ram_mm, eval_context=packages
+        )
+        return matcher_rewriter
+
+    def visualize_model(self):
+        print(make_url(graphviz.render_object_diagram(self.model.state, self.model.model, self.model.meta_model)))
+        print(plant_make_url(plantuml.render_object_diagram(self.model.state, self.model.model, self.model.meta_model)))
+
+    @staticmethod
+    def __extract_artefact_info(od, pt_art):
+        """Extract artefact metadata and data."""
+        data = od.get_slot_value(pt_art, "data")
+        pm_art = od.get_name(od.get_target(od.get_outgoing(pt_art, "pt_BelongsTo")[0]))
+        has_prev_version = bool(od.get_outgoing(pt_art, "pt_PrevVersion"))
+        is_last_version = not od.get_incoming(pt_art, "pt_PrevVersion")
+        return {
+            "Artefact Name": pm_art,
+            "Data": data,
+            "Has previous version": has_prev_version,
+            "Is last version": is_last_version
+        }
+
+    def __extract_inputs(self, od, event_node):
+        """Extract all consumed artefacts for an event."""
+        return [
+            self.__extract_artefact_info(od, od.get_source(consumes))
+            for consumes in od.get_incoming(event_node, "pt_Consumes")
+        ]
+
+    def __extract_outputs(self, od, event_node):
+        """Extract all produced artefacts for an event."""
+        return [
+            self.__extract_artefact_info(od, od.get_target(produces))
+            for produces in od.get_outgoing(event_node, "pt_Produces")
+        ]
+
+    @staticmethod
+    def to_snake_case(experiment_type):
+        # Finds uppercase letters that are not at the start of the string.
+        # Example: AtomicExperiment -> atomic_experiment
+        return re.sub(r'(?<!^)(?=[A-Z])', '_', experiment_type).lower()
+
+    def run(self, debug_flag: bool = False):
+        matcher = self.create_matcher()
+
+        rule_performed = True
+        while rule_performed:
+
+            # Loop over all the rules first in order priority
+            for i, (rule_name, rule) in enumerate(self.rules.items()):
+                rule_performed = False
+
+                result = matcher.exec_on_first_match(
+                    self.model.model, rule, rule_name, in_place=True
+                )
+
+                # If the rule cannot be executed go to the next rule
+                if not result:
+                    continue
+
+                rule_performed = True
+                self.model.model, lhs_match, _ = result
+
+                if debug_flag:
+                    print(f"Match: {lhs_match}")
+                    self.visualize_model()
+
+                # If a rule is performed, break and start loping over the rules from the beginning
+                break

+ 66 - 0
examples/petrinet/models/schedule.od

@@ -0,0 +1,66 @@
+start:Start
+end:End
+
+transitions:Match{
+    file = "operational_semantics/transition";
+}
+
+
+d:Data_modify
+{
+    modify_dict = '
+    {
+    "tr": "t"
+    }';
+}
+
+nac_input_without:Match{
+    file = "operational_semantics/all_input_have_token";
+    n = "1";
+}
+
+inputs:Match{
+    file = "operational_semantics/all_inputs";
+}
+
+rewrite_incoming:Rewrite
+{
+    file = "operational_semantics/remove_incoming";
+}
+
+loop_trans:Loop
+loop_input:Loop
+
+p:Print
+{
+event = True;
+label = "transition: ";
+}
+
+p2:Print
+{
+event = True;
+label = "inputs: ";
+}
+
+:Exec_con(start -> transitions){gate_from = 0;gate_to = 0;}
+:Exec_con(transitions -> end){gate_from = 1;gate_to = 0;}
+:Exec_con(transitions -> loop_trans){gate_from = 0;gate_to = 0;}
+:Exec_con(loop_trans -> nac_input_without){gate_from = 0;gate_to = 0;}
+
+[//]: # (:Exec_con&#40;nac_input_without -> loop_trans&#41;{gate_from = 0;gate_to = 0;})
+:Exec_con(nac_input_without -> inputs){gate_from = 1;gate_to = 0;}
+:Exec_con(inputs -> loop_input){gate_from = 0;gate_to = 0;}
+:Exec_con(inputs -> loop_trans){gate_from = 1;gate_to = 0;}
+
+:Exec_con(loop_trans -> end){gate_from = 1;gate_to = 0;}
+
+:Data_con(transitions -> loop_trans)
+:Data_con(nac_input_without -> p)
+:Data_con(d -> nac_input_without)
+:Data_con(loop_trans -> d)
+:Data_con(loop_trans -> rewrite_incoming)
+
+
+
+

+ 13 - 0
examples/petrinet/operational_semantics/all_input_have_token.od

@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+  RAM_numTokens = `get_value(this) == 0`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)

+ 13 - 0
examples/petrinet/operational_semantics/all_inputs.od

@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+  RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)

+ 13 - 0
examples/petrinet/operational_semantics/all_output_places.od

@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+  RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)

+ 13 - 0
examples/petrinet/operational_semantics/all_output_places_update.od

@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+  RAM_numTokens = `set_value(this, get_value(this) + 1)`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)

+ 0 - 0
examples/petrinet/operational_semantics/delete_all.od


+ 1 - 1
examples/petrinet/operational_semantics/r_fire_transition_lhs.od

@@ -1 +1 @@
-t:RAM_PNTransition
+t:RAM_PNTransition

+ 1 - 0
examples/petrinet/operational_semantics/transition.od

@@ -0,0 +1 @@
+tr:RAM_PNTransition

+ 15 - 5
examples/petrinet/runner.py

@@ -1,3 +1,4 @@
+from examples.schedule.RuleExecuter import RuleExecuter
 from state.devstate import DevState
 from api.od import ODAPI
 from concrete_syntax.textual_od.renderer import render_od
@@ -9,6 +10,10 @@ from transformation.ramify import ramify
 from examples.semantics.operational import simulator
 from examples.petrinet.renderer import show_petri_net
 
+from examples.schedule.ScheduledActionGenerator import *
+from examples.schedule.RuleExecuter import *
+
+
 
 if __name__ == "__main__":
     import os
@@ -46,20 +51,25 @@ if __name__ == "__main__":
         mm_rt_ramified,
         ["fire_transition"]) # only 1 rule :(
 
-    matcher_rewriter = RuleMatcherRewriter(state, mm_rt, mm_rt_ramified)
-    action_generator = ActionGenerator(matcher_rewriter, rules)
+    # matcher_rewriter = RuleMatcherRewriter(state, mm_rt, mm_rt_ramified)
+    # action_generator = ActionGenerator(matcher_rewriter, rules)
+
+    matcher_rewriter2 = RuleExecuter(state, mm_rt, mm_rt_ramified)
+    action_generator = ScheduleActionGenerator(matcher_rewriter2, f"models/schedule.od")
 
     def render_callback(od):
         show_petri_net(od)
         # return render_od(state, od.m, od.mm)
         return render_od_jinja2(state, od.m, od.mm)
 
-    sim = simulator.Simulator(
+    action_generator.generate_dot()
+
+    sim = simulator.MinimalSimulator(
         action_generator=action_generator,
         decision_maker=simulator.InteractiveDecisionMaker(auto_proceed=False),
         # decision_maker=simulator.RandomDecisionMaker(seed=0),
-        renderer=render_callback,
+        termination_condition=action_generator.termination_condition,
         # renderer=lambda od: render_od(state, od.m, od.mm),
     )
 
-    sim.run(ODAPI(state, m_rt_initial, mm_rt))
+    sim.run(ODAPI(state, m_rt_initial, mm_rt))

+ 49 - 0
examples/schedule/RuleExecuter.py

@@ -0,0 +1,49 @@
+from concrete_syntax.textual_od.renderer import render_od
+
+import pprint
+from typing import Generator, Callable, Any
+from uuid import UUID
+import functools
+
+from api.od import ODAPI
+from concrete_syntax.common import indent
+from transformation.matcher import match_od
+from transformation.rewriter import rewrite
+from transformation.cloner import clone_od
+from util.timer import Timer
+from util.loader import parse_and_check
+
+class RuleExecuter:
+    def __init__(self, state, mm: UUID, mm_ramified: UUID, eval_context={}):
+        self.state = state
+        self.mm = mm
+        self.mm_ramified = mm_ramified
+        self.eval_context = eval_context
+
+    # Generates matches.
+    # Every match is a dictionary with entries LHS_element_name -> model_element_name
+    def match_rule(self, m: UUID, lhs: UUID, *, pivot:dict[Any, Any]):
+        lhs_matcher = match_od(self.state,
+                               host_m=m,
+                               host_mm=self.mm,
+                               pattern_m=lhs,
+                               pattern_mm=self.mm_ramified,
+                               eval_context=self.eval_context,
+                               pivot= pivot,
+                               )
+        return lhs_matcher
+
+    def rewrite_rule(self, m: UUID, rhs: UUID, *, pivot:dict[Any, Any]):
+        yield rewrite(self.state,
+                rhs_m=rhs,
+                pattern_mm=self.mm_ramified,
+                lhs_match=pivot,
+                host_m=m,
+                host_mm=self.mm,
+                eval_context=self.eval_context,
+            )
+
+
+    def load_match(self, file: str):
+        with open(file, "r") as f:
+            return parse_and_check(self.state, f.read(), self.mm_ramified, file)

+ 104 - 0
examples/schedule/ScheduledActionGenerator.py

@@ -0,0 +1,104 @@
+import importlib.util
+import io
+import os
+
+from jinja2 import FileSystemLoader, Environment
+
+from concrete_syntax.textual_od import parser as parser_od
+from concrete_syntax.textual_cd import parser as parser_cd
+from api.od import ODAPI
+from bootstrap.scd import bootstrap_scd
+from examples.schedule.generator import schedule_generator
+from examples.schedule.schedule_lib import End, NullNode
+from framework.conformance import Conformance, render_conformance_check_result
+from state.devstate import DevState
+
+
+class ScheduleActionGenerator:
+    def __init__(self, rule_executer, schedulefile:str):
+        self.rule_executer = rule_executer
+        self.rule_dict = {}
+        self.schedule: "Schedule"
+
+
+        self.state = DevState()
+        self.load_schedule(schedulefile)
+
+    def load_schedule(self, filename):
+        print("Loading schedule ...")
+        scd_mmm = bootstrap_scd(self.state)
+        with open("../schedule/models/scheduling_MM.od", "r") as f_MM:
+            mm_cs = f_MM.read()
+        with open(f"{filename}", "r") as f_M:
+            m_cs = f_M.read()
+        print("OK")
+
+        print("\nParsing models")
+
+        print(f"\tParsing meta model")
+        scheduling_mm = parser_cd.parse_cd(
+            self.state,
+            m_text=mm_cs,
+        )
+        print(f"\tParsing '{filename}_M.od' model")
+        scheduling_m = parser_od.parse_od(
+            self.state,
+            m_text=m_cs,
+            mm=scheduling_mm
+        )
+        print(f"OK")
+
+        print("\tmeta-meta-model a valid class diagram")
+        conf = Conformance(self.state, scd_mmm, scd_mmm)
+        print(render_conformance_check_result(conf.check_nominal()))
+        print(f"Is our '{filename}_M.od' model a valid '{filename}_MM.od' diagram?")
+        conf = Conformance(self.state, scheduling_m, scheduling_mm)
+        print(render_conformance_check_result(conf.check_nominal()))
+        print("OK")
+
+        od = ODAPI(self.state, scheduling_m, scheduling_mm)
+        g = schedule_generator(od)
+
+        output_buffer = io.StringIO()
+        g.generate_schedule(output_buffer)
+        open(f"schedule.py", "w").write(output_buffer.getvalue())
+        spec = importlib.util.spec_from_file_location("schedule", "schedule.py")
+        scedule_module = importlib.util.module_from_spec(spec)
+        spec.loader.exec_module(scedule_module)
+        self.schedule = scedule_module.Schedule(self.rule_executer)
+        self.load_matchers()
+
+    def load_matchers(self):
+        matchers = dict()
+        for file in self.schedule.get_matchers():
+            matchers[file] = self.rule_executer.load_match(file)
+        self.schedule.init_schedule(matchers)
+
+    def __call__(self, api: ODAPI):
+        exec_op = self.schedule(api)
+        yield from exec_op
+
+    def termination_condition(self, api: ODAPI):
+        if type(self.schedule.cur) == End:
+            return "jay"
+        if type(self.schedule.cur) == NullNode:
+            return "RRRR"
+        return None
+
+    def generate_dot(self):
+        env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')))
+        env.trim_blocks = True
+        env.lstrip_blocks = True
+        template_dot = env.get_template('schedule_dot.j2')
+
+        nodes = []
+        edges = []
+        visit = set()
+        self.schedule.generate_dot(nodes, edges, visit)
+        print("Nodes:")
+        print(nodes)
+        print("\nEdges:")
+        print(edges)
+
+        with open("test.dot", "w") as f_dot:
+            f_dot.write(template_dot.render({"nodes": nodes, "edges": edges}))

+ 0 - 0
examples/schedule/__init__.py


+ 129 - 0
examples/schedule/generator.py

@@ -0,0 +1,129 @@
+import sys
+import os
+import json
+from uuid import UUID
+
+from jinja2.runtime import Macro
+
+from api.od import ODAPI
+from jinja2 import Environment, FileSystemLoader, meta
+
+
+class schedule_generator:
+    def __init__(self, odApi:ODAPI):
+        self.env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')))
+        self.env.trim_blocks = True
+        self.env.lstrip_blocks = True
+        self.template = self.env.get_template('schedule_template.j2')
+        self.template_wrap = self.env.get_template('schedule_template_wrap.j2')
+        self.api = odApi
+
+        def get_slot_value_default(item: UUID, slot:str, default):
+            if slot in self.api.get_slots(item):
+                return self.api.get_slot_value(item, slot)
+            return default
+
+        name_dict = lambda item: {"name": self.api.get_name(item)}
+        conn_dict = lambda item: {"name_from": self.api.get_name(self.api.get_source(item)),
+                                  "name_to": self.api.get_name(self.api.get_target(item)),
+                                  "gate_from": self.api.get_slot_value(item, "gate_from"),
+                                  "gate_to": self.api.get_slot_value(item, "gate_to"),
+                                  }
+
+        conn_data_event = {"Match": lambda item: False,
+                           "Rewrite": lambda item: False,
+                           "Data_modify": lambda item: True,
+                           "Loop": lambda item: True,
+                           "Print": lambda item: get_slot_value_default(item, "event", False)
+                           }
+        conn_data_dict = lambda item: {"name_from": self.api.get_name(self.api.get_source(item)),
+                                  "name_to": self.api.get_name(self.api.get_target(item)),
+                                  "event": conn_data_event[self.api.get_type_name(target := self.api.get_target(item))](target)
+                                  }
+        rewrite_dict = lambda item: {"name": self.api.get_name(item),
+                                  "file": self.api.get_slot_value(item, "file"),
+                                  }
+        match_dict = lambda item: {"name": self.api.get_name(item),
+                                  "file": self.api.get_slot_value(item, "file"),
+                                  "n": self.api.get_slot_value(item, "n") \
+                                        if "n" in self.api.get_slots(item) else 'float("inf")'
+                                  }
+        data_modify_dict = lambda item: {"name": self.api.get_name(item),
+                                  "dict": json.loads(self.api.get_slot_value(item, "modify_dict"))
+                                  }
+        loop_dict = lambda item: {"name": self.api.get_name(item),
+                                  "choise": get_slot_value_default(item, "choise", False)}
+        print_dict = lambda item: {"name": self.api.get_name(item),
+                                   "label": get_slot_value_default(item, "label", "")}
+        arg_map = {"Start": name_dict, "End": name_dict,
+                   "Match": match_dict, "Rewrite": rewrite_dict,
+                   "Data_modify": data_modify_dict, "Loop": loop_dict,
+                   "Exec_con": conn_dict, "Data_con": conn_data_dict,
+                   "Print": print_dict}
+        self.macro_args = {tp: (macro, arg_map.get(tp)) for tp, macro in self.template.module.__dict__.items()
+                                                if type(macro) == Macro}
+
+    def _render(self, item):
+        type_name = self.api.get_type_name(item)
+        macro, arg_gen = self.macro_args[type_name]
+        return macro(**arg_gen(item))
+
+    def generate_schedule(self, stream = sys.stdout):
+        start = self.api.get_all_instances("Start")[0][1]
+        stack = [start]
+        out = {"blocks":[], "exec_conn":[], "data_conn":[], "match_files":set(), "matchers":[], "start":self.api.get_name(start)}
+        execBlocks = set()
+        exec_conn = list()
+
+        while len(stack) > 0:
+            exec_obj = stack.pop()
+            if exec_obj in execBlocks:
+                continue
+            execBlocks.add(exec_obj)
+            for conn in self.api.get_outgoing(exec_obj, "Exec_con"):
+                exec_conn.append(conn)
+                stack.append(self.api.get_target(conn))
+
+        stack = list(execBlocks)
+        data_blocks = set()
+        for name, p in self.api.get_all_instances("Print"):
+            if "event" in (event := self.api.get_slots(p)) and event:
+                stack.append(p)
+                execBlocks.add(p)
+
+
+        data_conn = set()
+        while len(stack) > 0:
+            obj = stack.pop()
+            for data_c in self.api.get_incoming(obj, "Data_con"):
+                data_conn.add(data_c)
+                source = self.api.get_source(data_c)
+                if not self.api.is_instance(source, "Exec") and \
+                        source not in execBlocks and \
+                        source not in data_blocks:
+                    stack.append(source)
+                    data_blocks.add(source)
+
+        for exec_item in execBlocks:
+            out["blocks"].append(self._render(exec_item))
+            if self.api.is_instance(exec_item, "Rule"):
+                d = self.macro_args[self.api.get_type_name(exec_item)][1](exec_item)
+                out["match_files"].add(d["file"])
+                out["matchers"].append(d)
+        for exec_c in exec_conn:
+            out["exec_conn"].append(self._render(exec_c))
+
+        for data_c in data_conn:
+            out["data_conn"].append(self._render(data_c))
+
+        for data_b in data_blocks:
+            out["blocks"].append(self._render(data_b))
+
+        print(self.template_wrap.render(out), file=stream)
+
+
+
+
+
+        # print("with open('test.dot', 'w') as f:", file=stream)
+        # print(f"\tf.write({self.api.get_name(start)}.generate_dot())", file=stream)

+ 26 - 0
examples/schedule/models/README.md

@@ -0,0 +1,26 @@
+
+### association Exec_con
+    Integer gate_from;
+    Integer gate_to;
+
+### association Data_con
+
+### class Start [1..1]
+### class End [1..*]
+
+
+### class Match
+    optional Integer n;
+
+### class Rewrite
+
+### class Data_modify
+    String modify_dict;
+
+### class Loop
+    optional Boolean choise;
+
+## debugging tools
+
+### class Print(In_Exec, Out_Exec, In_Data)
+    optional Boolean event;

+ 46 - 0
examples/schedule/models/scheduling_MM.od

@@ -0,0 +1,46 @@
+abstract class Exec
+abstract class In_Exec(Exec)
+abstract class Out_Exec(Exec)
+
+association Exec_con  [0..*] Out_Exec -> In_Exec [0..*] {
+    Integer gate_from;
+    Integer gate_to;
+}
+
+abstract class Data
+abstract class In_Data(Data)
+abstract class Out_Data(Data)
+association Data_con  [0..*] Out_Data -> In_Data [0..*]
+
+class Start [1..1] (Out_Exec)
+class End [1..*] (In_Exec)
+
+
+abstract class Rule (In_Exec, Out_Exec, In_Data, Out_Data)
+{
+    String file;
+}
+class Match (Rule)
+{
+    optional Integer n;
+}
+
+class Rewrite (Rule)
+
+class Data_modify(In_Data, Out_Data)
+{
+    String modify_dict;
+}
+
+class Loop(In_Exec, Out_Exec, In_Data, Out_Data)
+{
+    optional Boolean choise;
+}
+
+# debugging tools
+
+class Print(In_Exec, Out_Exec, In_Data) 
+{
+    optional Boolean event;
+    optional String label;
+}

+ 12 - 0
examples/schedule/schedule_lib/__init__.py

@@ -0,0 +1,12 @@
+from .data_node import DataNode
+from .data_modify import DataModify
+from .end import End
+from .exec_node import ExecNode
+from .loop import Loop
+from .match import Match
+from .null_node import NullNode
+from .print import Print
+from .rewrite import Rewrite
+from .start import Start
+
+__all__ = ["DataNode", "End", "ExecNode", "Loop", "Match", "NullNode", "Rewrite", "Print", "DataModify", "Start"]

+ 63 - 0
examples/schedule/schedule_lib/data.py

@@ -0,0 +1,63 @@
+import functools
+from typing import Any, Generator, Callable
+
+
+class Data:
+    def __init__(self, super) -> None:
+        self.data: list[dict[Any, Any]] = list()
+        self.success: bool = False
+        self.super = super
+
+    @staticmethod
+    def store_output(func: Callable) -> Callable:
+        def wrapper(self, *args, **kwargs) -> Any:
+            output = func(self, *args, **kwargs)
+            self.success = output
+            return output
+        return wrapper
+
+    @store_output
+    def store_data(self, data_gen: Generator, n: int) -> bool:
+        self.data.clear()
+        if n == 0:
+            return True
+        i: int = 0
+        while (match := next(data_gen, None)) is not None:
+            self.data.append(match)
+            i+=1
+            if i >= n:
+                break
+        else:
+            if n == float("inf"):
+                return bool(len(self.data))
+            self.data.clear()
+            return False
+        return True
+
+    def get_super(self) -> int:
+        return self.super
+
+    def replace(self, data: "Data") -> None:
+        self.data.clear()
+        self.data.extend(data.data)
+
+    def append(self, data: Any) -> None:
+        self.data.append(data)
+
+    def clear(self) -> None:
+        self.data.clear()
+
+    def pop(self, index = -1) -> Any:
+        return self.data.pop(index)
+
+    def empty(self) -> bool:
+        return len(self.data) == 0
+
+    def __getitem__(self, index):
+        return self.data[index]
+
+    def __iter__(self):
+        return self.data.__iter__()
+
+    def __len__(self):
+        return self.data.__len__()

+ 26 - 0
examples/schedule/schedule_lib/data_modify.py

@@ -0,0 +1,26 @@
+import functools
+from typing import TYPE_CHECKING, Callable, List
+
+from api.od import ODAPI
+from examples.schedule.RuleExecuter import RuleExecuter
+from .exec_node import ExecNode
+from .data_node import DataNode
+
+
+class DataModify(DataNode):
+    def __init__(self, modify_dict: dict[str,str]) -> None:
+        DataNode.__init__(self)
+        self.modify_dict: dict[str,str] = modify_dict
+
+    def input_event(self, success: bool) -> None:
+        if success or self.data_out.success:
+            self.data_out.data.clear()
+            for data in self.data_in.data:
+                self.data_out.append({self.modify_dict[key]: value for key, value in data.items() if key in self.modify_dict.keys()})
+            DataNode.input_event(self, success)
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=modify]")
+        super().generate_dot(nodes, edges, visited)

+ 47 - 0
examples/schedule/schedule_lib/data_node.py

@@ -0,0 +1,47 @@
+from typing import Any, Generator, List
+
+from examples.schedule.schedule_lib.id_generator import IdGenerator
+from .data import Data
+
+class DataNode:
+    def __init__(self) -> None:
+        if not hasattr(self, 'id'):
+            self.id = IdGenerator().generate_id()
+        self.data_out : Data = Data(self)
+        self.data_in: Data | None = None
+        self.eventsub: list[DataNode] = list()
+
+    def connect_data(self, data_node: "DataNode", eventsub=True) -> None:
+        data_node.data_in = self.data_out
+        if eventsub:
+            self.eventsub.append(data_node)
+
+    def store_data(self, data_gen: Generator, n: int) -> None:
+        success: bool = self.data_out.store_data(data_gen, n)
+        for sub in self.eventsub:
+            sub.input_event(success)
+
+    def get_input_data(self) -> list[dict[Any, Any]]:
+        if not self.data_in.success:
+            raise Exception("Invalid input data: matching has failed")
+        data = self.data_in.data
+        if len(data) == 0:
+            raise Exception("Invalid input data: no data present")
+        return data
+
+    def input_event(self, success: bool) -> None:
+        self.data_out.success = success
+        for sub in self.eventsub:
+            sub.input_event(success)
+
+    def get_id(self) -> int:
+        return self.id
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        visited.add(self.id)
+        if self.data_in is not None:
+            edges.append(f"{self.data_in.get_super().get_id()} -> {self.get_id()} [color = green]")
+            self.data_in.get_super().generate_dot(nodes, edges, visited)
+        for sub in self.eventsub:
+            sub.generate_dot(nodes, edges, visited)
+

+ 21 - 0
examples/schedule/schedule_lib/end.py

@@ -0,0 +1,21 @@
+import functools
+from typing import TYPE_CHECKING, List, Callable, Generator
+
+from api.od import ODAPI
+from .exec_node import ExecNode
+
+class End(ExecNode):
+    def __init__(self) -> None:
+        super().__init__(out_connections=1)
+
+    def execute(self, od: ODAPI) -> Generator | None:
+        return self.terminate(od)
+
+    @staticmethod
+    def terminate(od: ODAPI) -> Generator:
+        yield f"end:", functools.partial(lambda od:(od, ""), od)
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=end]")

+ 34 - 0
examples/schedule/schedule_lib/exec_node.py

@@ -0,0 +1,34 @@
+from typing import TYPE_CHECKING, List, Callable, Generator
+from api.od import ODAPI
+
+from .id_generator import IdGenerator
+
+class ExecNode:
+    def __init__(self, out_connections: int = 1) -> None:
+        from .null_node import NullNode
+        self.next_state: list[ExecNode] = []
+        if out_connections > 0:
+            self.next_state = [NullNode()]*out_connections
+        self.id: int = IdGenerator().generate_id()
+
+    def nextState(self) -> "ExecNode":
+        return self.next_state[0]
+
+    def connect(self, next_state: "ExecNode", from_gate: int = 0, to_gate: int = 0) -> None:
+        if from_gate >= len(self.next_state):
+            raise IndexError
+        self.next_state[from_gate] = next_state
+
+    def execute(self, od: ODAPI) -> Generator | None:
+        return None
+
+    def get_id(self) -> int:
+        return self.id
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        visited.add(self.id)
+        for edge in self.next_state:
+            edges.append(f"{self.id} -> {edge.get_id()}")
+        for next in self.next_state:
+            next.generate_dot(nodes, edges, visited)
+

+ 10 - 0
examples/schedule/schedule_lib/funcs.py

@@ -0,0 +1,10 @@
+from typing import Callable
+
+def generate_dot_wrap(func) -> Callable:
+    def wrapper(self, *args, **kwargs) -> str:
+        nodes = []
+        edges = []
+        self.reset_visited()
+        func(self, nodes, edges, *args, **kwargs)
+        return f"digraph G {{\n\t{"\n\t".join(nodes)}\n\t{"\n\t".join(edges)}\n}}"
+    return wrapper

+ 8 - 0
examples/schedule/schedule_lib/id_generator.py

@@ -0,0 +1,8 @@
+from .singleton import Singleton
+
+class IdGenerator(metaclass=Singleton):
+    def __init__(self):
+        self.id = -1
+    def generate_id(self) -> int:
+        self.id += 1
+        return self.id

+ 57 - 0
examples/schedule/schedule_lib/loop.py

@@ -0,0 +1,57 @@
+import functools
+from random import choice
+from typing import TYPE_CHECKING, Callable, List, Generator
+
+from api.od import ODAPI
+from examples.schedule.RuleExecuter import RuleExecuter
+from .exec_node import ExecNode
+from .data_node import DataNode
+from .data_node import Data
+
+
+class Loop(ExecNode, DataNode):
+    def __init__(self, choice) -> None:
+        ExecNode.__init__(self, out_connections=2)
+        DataNode.__init__(self)
+        self.choice: bool = choice
+        self.cur_data: Data = Data(-1)
+
+    def nextState(self) -> ExecNode:
+        return self.next_state[not self.data_out.success]
+
+    def execute(self, od: ODAPI) -> Generator | None:
+        if self.cur_data.empty():
+            self.data_out.clear()
+            self.data_out.success = False
+            DataNode.input_event(self, False)
+            return None
+
+        if self.choice:
+            def select_data() -> Generator:
+                for i in range(len(self.cur_data)):
+                    yield f"choice: {self.cur_data[i]}", functools.partial(self.select_next,od, i)
+            return select_data()
+        else:
+            self.select_next(od, -1)
+        return None
+
+    def input_event(self, success: bool) -> None:
+        if (b := self.data_out.success) or success:
+            self.cur_data.replace(self.data_in)
+            self.data_out.clear()
+            self.data_out.success = False
+            if b:
+                DataNode.input_event(self, False)
+
+    def select_next(self,od: ODAPI, index: int) -> tuple[ODAPI, list[str]]:
+        self.data_out.clear()
+        self.data_out.append(self.cur_data.pop(index))
+        DataNode.input_event(self, True)
+        return (od, ["data selected"])
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=Loop]")
+        ExecNode.generate_dot(self, nodes, edges, visited)
+        DataNode.generate_dot(self, nodes, edges, visited)

+ 42 - 0
examples/schedule/schedule_lib/match.py

@@ -0,0 +1,42 @@
+import functools
+from typing import TYPE_CHECKING, Callable, List, Generator
+
+from api.od import ODAPI
+from examples.schedule.RuleExecuter import RuleExecuter
+from .exec_node import ExecNode
+from .data_node import DataNode
+
+
+class Match(ExecNode, DataNode):
+    def __init__(self, label: str, n: int | float) -> None:
+        ExecNode.__init__(self, out_connections=2)
+        DataNode.__init__(self)
+        self.label: str = label
+        self.n:int = n
+        self.rule = None
+        self.rule_executer : RuleExecuter
+
+    def nextState(self) -> ExecNode:
+        return self.next_state[not self.data_out.success]
+
+    def execute(self, od: ODAPI) -> Generator | None:
+        self.match(od)
+        return None
+
+    def init_rule(self, rule, rule_executer):
+        self.rule = rule
+        self.rule_executer = rule_executer
+
+    def match(self, od: ODAPI) -> None:
+        pivot = {}
+        if self.data_in is not None:
+            pivot = self.get_input_data()[0]
+        print(f"matching: {self.label}\n\tpivot: {pivot}")
+        self.store_data(self.rule_executer.match_rule(od.m, self.rule, pivot=pivot), self.n)
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=M_{self.label.split("/")[-1]}_{self.n}]")
+        ExecNode.generate_dot(self, nodes, edges, visited)
+        DataNode.generate_dot(self, nodes, edges, visited)

+ 25 - 0
examples/schedule/schedule_lib/null_node.py

@@ -0,0 +1,25 @@
+import functools
+from symtable import Function
+from typing import List, Callable, Generator
+
+from api.od import ODAPI
+from .singleton import Singleton
+
+from .exec_node import ExecNode
+
+class NullNode(ExecNode, metaclass=Singleton):
+    def __init__(self):
+        ExecNode.__init__(self, out_connections=0)
+
+    def execute(self, od: ODAPI) -> Generator | None:
+        raise Exception('Null node should already have terminated the schedule')
+
+    @staticmethod
+    def terminate(od: ODAPI):
+        return None
+        yield # verrrry important line, dont remove this unreachable code
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=Null]")

+ 28 - 0
examples/schedule/schedule_lib/print.py

@@ -0,0 +1,28 @@
+import functools
+from typing import TYPE_CHECKING, Callable, List, Generator
+
+from api.od import ODAPI
+from examples.schedule.RuleExecuter import RuleExecuter
+from .exec_node import ExecNode
+from .data_node import DataNode
+
+
+class Print(ExecNode, DataNode):
+    def __init__(self, label: str = "") -> None:
+        ExecNode.__init__(self, out_connections=1)
+        DataNode.__init__(self)
+        self.label = label
+
+    def execute(self, od: ODAPI) -> Generator | None:
+        self.input_event(True)
+        return None
+
+    def input_event(self, success: bool) -> None:
+        print(f"{self.label}{self.data_in.data}")
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=Print_{self.label.replace(":", "")}]")
+        ExecNode.generate_dot(self, nodes, edges, visited)
+        DataNode.generate_dot(self, nodes, edges, visited)

+ 38 - 0
examples/schedule/schedule_lib/rewrite.py

@@ -0,0 +1,38 @@
+import functools
+from typing import List, Callable, Generator
+
+from api.od import ODAPI
+from .exec_node import ExecNode
+from .data_node import DataNode
+from ..RuleExecuter import RuleExecuter
+
+
+class Rewrite(ExecNode, DataNode):
+    def __init__(self, label: str) -> None:
+        ExecNode.__init__(self, out_connections=1)
+        DataNode.__init__(self)
+        self.label = label
+        self.rule = None
+        self.rule_executer : RuleExecuter
+
+    def init_rule(self, rule, rule_executer):
+        self.rule = rule
+        self.rule_executer= rule_executer
+
+    def execute(self, od: ODAPI) -> Generator | None:
+        yield "ghello", functools.partial(self.rewrite, od)
+
+    def rewrite(self, od):
+        print("rewrite" + self.label)
+        pivot = {}
+        if self.data_in is not None:
+            pivot = self.get_input_data()[0]
+        self.store_data(self.rule_executer.rewrite_rule(od.m, self.rule, pivot=pivot), 1)
+        return ODAPI(od.state, od.m, od.mm),[f"rewrite {self.label}\n\tpivot: {pivot}\n\t{"success" if self.data_out.success else "failure"}\n"]
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=R_{self.label.split("/")[-1]}]")
+        ExecNode.generate_dot(self, nodes, edges, visited)
+        DataNode.generate_dot(self, nodes, edges, visited)

+ 8 - 0
examples/schedule/schedule_lib/singleton.py

@@ -0,0 +1,8 @@
+from abc import ABCMeta
+
+class Singleton(ABCMeta):
+    _instances = {}
+    def __call__(cls, *args, **kwargs):
+        if cls not in cls._instances:
+            cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
+        return cls._instances[cls]

+ 16 - 0
examples/schedule/schedule_lib/start.py

@@ -0,0 +1,16 @@
+from typing import TYPE_CHECKING, Callable, List, Any
+
+from .funcs import generate_dot_wrap
+
+from .exec_node import ExecNode
+
+
+class Start(ExecNode):
+    def __init__(self) -> None:
+        ExecNode.__init__(self, out_connections=1)
+
+    def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
+        if self.id in visited:
+            return
+        nodes.append(f"{self.id}[label=start]")
+        super().generate_dot(nodes, edges, visited)

+ 9 - 0
examples/schedule/templates/schedule_dot.j2

@@ -0,0 +1,9 @@
+digraph G {
+{% for node in nodes %}
+    {{ node }}
+{% endfor %}
+
+{% for edge in edges %}
+    {{ edge }}
+{% endfor %}
+}

+ 35 - 0
examples/schedule/templates/schedule_template.j2

@@ -0,0 +1,35 @@
+{% macro Start(name) %}
+{{ name }} = Start()
+{%- endmacro %}
+
+{% macro End(name) %}
+{{ name }} = End()
+{%- endmacro %}
+
+{% macro Match(name, file, n) %}
+{{ name }} = Match("{{ file }}", {{ n }})
+{%- endmacro %}
+
+{% macro Rewrite(name, file) %}
+{{ name }} = Rewrite("{{ file }}")
+{%- endmacro %}
+
+{% macro Data_modify(name, dict) %}
+{{ name }} = DataModify({{ dict }})
+{%- endmacro %}
+
+{% macro Exec_con(name_from, name_to, gate_from, gate_to) %}
+{{ name_from }}.connect({{ name_to }},{{ gate_from }},{{ gate_to }})
+{%- endmacro %}
+
+{% macro Data_con(name_from, name_to, event) %}
+{{ name_from }}.connect_data({{ name_to }}, {{ event }})
+{%- endmacro %}
+
+{% macro Loop(name, choise) %}
+{{ name }} = Loop({{ choise }})
+{%- endmacro %}
+
+{% macro Print(name, label) %}
+{{ name }} = Print("{{ label }}")
+{%- endmacro %}

+ 47 - 0
examples/schedule/templates/schedule_template_wrap.j2

@@ -0,0 +1,47 @@
+from examples.schedule.schedule_lib import *
+
+class Schedule:
+    def __init__(self, rule_executer):
+        self.start: Start
+        self.cur: ExecNode = None
+        self.rule_executer = rule_executer
+
+    def __call__(self, od):
+        self.cur = self.cur.nextState()
+        while not isinstance(self.cur, NullNode):
+            action_gen = self.cur.execute(od)
+            if action_gen is not None:
+            # if (action_gen := self.cur.execute(od)) is not None:
+                return action_gen
+            self.cur = self.cur.nextState()
+        return NullNode.terminate(od)
+
+    @staticmethod
+    def get_matchers():
+        return [
+            {% for file in match_files %}
+              "{{ file }}.od",
+            {% endfor %}
+        ]
+
+    def init_schedule(self, matchers):
+    {% for block in blocks%}
+        {{ block }}
+    {% endfor %}
+
+    {% for conn in exec_conn%}
+        {{ conn }}
+    {% endfor %}
+    {% for conn_d in data_conn%}
+        {{ conn_d }}
+    {% endfor %}
+        self.start = {{ start }}
+        self.cur = {{ start }}
+
+    {% for match in matchers %}
+        {{ match["name"] }}.init_rule(matchers["{{ match["file"] }}.od"], self.rule_executer)
+    {% endfor %}
+        return None
+
+    def generate_dot(self, *args, **kwargs):
+        return self.start.generate_dot(*args, **kwargs)

+ 1 - 1
services/od.py

@@ -148,7 +148,7 @@ class OD:
         actioncode_t.create(value)
         return self.create_model_ref(name, "ActionCode", actioncode_node)
 
-    def create_bytes_value(self, name: str, value: str):
+    def create_bytes_value(self, name: str, value: bytes):
         from services.primitives.bytes_type import Bytes
         bytes_node = self.bottom.create_node()
         bytes_t = Bytes(bytes_node, self.bottom.state)

+ 6 - 2
transformation/rewriter.py

@@ -22,7 +22,6 @@ class TryAgainNextRound(Exception):
 
 # Rewrite is performed in-place (modifying `host_m`)
 def rewrite(state,
-    lhs_m: UUID, # LHS-pattern
     rhs_m: UUID, # RHS-pattern
     pattern_mm: UUID, # meta-model of both patterns (typically the RAMified host_mm)
     lhs_match: dict, # a match, morphism, from lhs_m to host_m (mapping pattern name -> host name), typically found by the 'match_od'-function.
@@ -223,7 +222,12 @@ def rewrite(state,
             result = exec_then_eval(python_expr,
                 _globals=eval_globals,
                 _locals={'this': host_obj}) # 'this' can be used to read the previous value of the slot
-            host_odapi.overwrite_primitive_value(host_obj_name, result, is_code=False)
+            # print("EVAL", common_name, python_expr, "RESULT", result, host_obj_name)
+            try:
+                host_odapi.overwrite_primitive_value(host_obj_name, result, is_code=False)
+            except Exception as e:
+                e.add_note(f"while evaluating attribute {common_name}")
+                raise
         else:
             msg = f"Don't know what to do with element '{common_name}' -> '{host_obj_name}:{host_type}')"
             # print(msg)

+ 0 - 1
transformation/rule.py

@@ -117,7 +117,6 @@ class RuleMatcherRewriter:
 
         try:
             rhs_match = rewrite(self.state,
-                lhs_m=lhs,
                 rhs_m=rhs,
                 pattern_mm=self.mm_ramified,
                 lhs_match=lhs_match,

+ 157 - 0
tutorial/00_metamodeling.py

@@ -0,0 +1,157 @@
+# Before we can create a model in muMLE, we have to create a meta-model.
+
+# Here's an example of a (silly) meta-model.
+# We use a textual concrete syntax:
+
+mm_cs = """
+    # A class named 'A':
+    A:Class
+
+    # A class named 'B':
+    B:Class
+
+    # An association from 'A' to 'B':
+    a2b:Association (A -> B) {
+        # Every 'A' must be associated with at least one 'B'
+        target_lower_cardinality = 1;
+    }
+"""
+
+# Now, we create a model that is an instance of our meta-model:
+
+m_cs = """
+    myA:A
+
+    myB:B
+
+    myLnk:a2b (myA -> myB)
+"""
+
+# Notice that the syntax for meta-model and model is the same: We always declare a named object/link, followed by a colon (:) and the name of the type. The type name refers to the name of an object/link in the meta-model of our model.
+
+
+# So far we've only created text strings in Python. To parse them as models, we first create our 'state', which is a mutable graph that will contain our models and meta-models:
+
+
+from state.devstate import DevState
+
+state = DevState()
+
+
+# Next, we must load the Simple Class Diagrams (SCD) meta-meta-model into our 'state'. The SCD meta-meta-model is a meta-model for our meta-model, and it is also a meta-model for itself.
+
+# The meta-meta-model is not specified in textual syntax because it is typed by itself. In textual syntax, it would contain things like:
+#    Class:Class
+# which is an object typed by itself. The parser cannot handle this (or circular dependencies in general). Therefore, we load the meta-meta-model by mutating the 'state' directly at a very low level:
+
+from bootstrap.scd import bootstrap_scd
+
+print("Loading meta-meta-model...")
+mmm = bootstrap_scd(state)
+print("OK")
+
+# Now that the meta-meta-model has been loaded, we can parse our meta-model:
+
+from concrete_syntax.textual_od import parser
+
+print()
+print("Parsing meta-model...")
+mm = parser.parse_od(
+    state,
+    m_text=mm_cs, # the string of text to parse
+    mm=mmm, # the meta-model of class diagrams (= our meta-meta-model)
+)
+print("OK")
+
+
+# And we can parse our model, the same way:
+
+print()
+print("Parsing model...")
+m = parser.parse_od(
+    state,
+    m_text=m_cs,
+    mm=mm, # this time, the meta-model is the previous model we parsed
+)
+print("OK")
+
+
+# Now we can do a conformance check:
+
+from framework.conformance import Conformance, render_conformance_check_result
+
+print()
+print("Is our model a valid instance of our meta model?")
+conf = Conformance(state, m, mm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# Looks like it is OK!
+
+
+# We can also check if our meta-model is a valid class diagram:
+
+print()
+print("Is our meta-model a valid class diagram?")
+conf = Conformance(state, mm, mmm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# Also good.
+
+
+# Finally, we can even check if the meta-meta-model is a valid instance of itself (it should be):
+
+print()
+print("Is our meta-model a valid class diagram?")
+conf = Conformance(state, mmm, mmm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# All good!
+
+
+# Now let's make things a bit more interesting and introduce non-conformance:
+
+m2_cs = """
+    myA:A
+    myA2:A
+
+    myB:B
+
+    myLnk:a2b (myA -> myB)
+"""
+
+# Parse it:
+
+m2 = parser.parse_od(
+    state,
+    m_text=m2_cs,
+    mm=mm,
+)
+
+# The above model is non-conformant because 'myA2' should have at least one outgoing link of type 'a2b', but it doesn't.
+
+print()
+print("Is model 'm2' a valid instance of our meta-model? (it should not be)")
+conf = Conformance(state, m2, mm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# It should be non-conformant.
+
+
+# Finally, let's render everything as PlantUML:
+
+from concrete_syntax.plantuml import renderer as plantuml
+from concrete_syntax.plantuml.make_url import make_url
+
+uml = (""
+ + plantuml.render_package("Meta-model", plantuml.render_class_diagram(state, mm))
+ + plantuml.render_package("Model", plantuml.render_object_diagram(state, m, mm))
+ + plantuml.render_trace_conformance(state, m, mm)
+ # + plantuml.render_package("Meta-meta-model", plantuml.render_class_diagram(state, mmm))
+ # + plantuml.render_trace_conformance(state, mm, mmm)
+)
+
+print()
+print("PlantUML output:", make_url(uml))
+
+
+# On to the next tutorial...

+ 92 - 0
tutorial/01_constraints.py

@@ -0,0 +1,92 @@
+# We now make our meta-model more interesting by adding a 'price' attribute to B, and constraints to it.
+
+mm_cs = """
+    # class named 'A':
+    A:Class
+
+    # class named 'B':
+    B:Class {
+        constraint = ```
+            # Price must be less than 100
+            get_value(get_slot(this, "price")) < 100 
+        ```;
+    }
+
+    # 'B' has an attribute 'price':
+    B_price:AttributeLink (B -> Integer) {
+        name = "price";
+        optional = False;
+    }
+
+    # An association from 'A' to 'B':
+    a2b:Association (A -> B) {
+        # Every 'A' must be associated with at least one 'B'
+        target_lower_cardinality = 1;
+    }
+
+    totalPriceLessThan500:GlobalConstraint {
+        constraint = ```
+            total_price = 0;
+            for b_name, b_id in get_all_instances("B"):
+                total_price += get_value(get_slot(b_id, "price"))
+            total_price < 500
+        ```;
+    }
+"""
+
+####
+# Note: The name 'B_price' follows a fixed format: <class_name>_<attribute_name>.
+#  This format must be followed!
+####
+
+# We update our model to include a price:
+
+m_cs = """
+    myA:A
+
+    myB:B {
+        price = 1000;
+    }
+
+    myLnk:a2b (myA -> myB)
+"""
+
+
+# And do a conformance check:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from concrete_syntax.textual_od import parser
+from framework.conformance import Conformance, render_conformance_check_result
+
+state = DevState()
+print("Loading meta-meta-model...")
+mmm = bootstrap_scd(state)
+print("OK")
+
+print()
+print("Parsing meta-model...")
+mm = parser.parse_od(
+    state,
+    m_text=mm_cs, # the string of text to parse
+    mm=mmm, # the meta-model of class diagrams (= our meta-meta-model)
+)
+print("OK")
+
+print()
+print("Parsing model...")
+m = parser.parse_od(
+    state,
+    m_text=m_cs,
+    mm=mm, # this time, the meta-model is the previous model we parsed
+)
+print("OK")
+
+print()
+print("Is our model a valid instance of our meta model?")
+conf = Conformance(state, m, mm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# Can you fix the constraint violation?
+
+

+ 61 - 0
tutorial/02_inheritance.py

@@ -0,0 +1,61 @@
+# The following meta-model has an inheritance relation:
+
+mm_cs = """
+    MyAbstractClass:Class {
+        abstract = True;
+    }
+
+    MyConcreteClass:Class
+
+    :Inheritance (MyConcreteClass -> MyAbstractClass)
+
+    Z:Class
+
+    myZ:Association (MyAbstractClass -> Z) {
+        target_lower_cardinality = 1;
+    }
+
+"""
+
+# Note that we didn't give our inheritance link a name. A unique name will be auto-generated by the parser.
+
+
+# A (non-conforming) instance:
+
+m_nonconform_cs = """
+    cc:MyConcreteClass
+    z:Z
+"""
+
+
+# Check conformance:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+
+state = DevState()
+mmm = bootstrap_scd(state)
+
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+
+print("should be non-conform:")
+m_nonconform = loader.parse_and_check(state, m_nonconform_cs, mm, "m_nonconform")
+
+
+# The reason for the non-conformance is that all cardinalities and constraints are inherited. Therefore 'MyConcreteClass' must have at least one outgoing 'myZ' link as well.
+
+# We fix the non-conformance by adding this link:
+
+m_conform_cs = m_nonconform_cs + """
+    :myZ (cc -> z)
+"""
+
+# Now everything will be fine
+
+print("should be conform:")
+m_conform = loader.parse_and_check(state, m_conform_cs, mm, "m_conform")
+print("OK")
+
+
+# On to the next tutorial...

+ 71 - 0
tutorial/03_api.py

@@ -0,0 +1,71 @@
+# We reuse our (meta-)model from the previous tutorial. For this tutorial, it doesn't really matter what the models look like.
+
+mm_cs = """
+    MyAbstractClass:Class {
+        abstract = True;
+    }
+
+    MyConcreteClass:Class
+
+    :Inheritance (MyConcreteClass -> MyAbstractClass)
+
+    Z:Class
+
+    myZ:Association (MyAbstractClass -> Z) {
+        target_lower_cardinality = 1;
+    }
+"""
+
+m_cs = """
+    cc:MyConcreteClass
+    z:Z
+    :myZ (cc -> z)
+"""
+
+
+# We parse everything:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+
+state = DevState()
+mmm = bootstrap_scd(state)
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+m = loader.parse_and_check(state, m_cs, mm, "m")
+
+
+# We can query the model via an API called ODAPI (Object Diagram API):
+
+from api.od import ODAPI
+
+odapi = ODAPI(state, m, mm)
+
+ls = odapi.get_all_instances("MyAbstractClass", include_subtypes=True)
+
+print("result of get_all_instances:")
+print(ls)
+
+# Observing the output above, we see that we got a list of tuples (object_name, UUID).
+# We can also modify the model via the same API:
+
+(cc_name, cc_id) = ls[0]
+z2 = odapi.create_object("z2", "Z")
+odapi.create_link("lnk", "myZ", cc_id, z2)
+
+# And we can observe the modified model:
+
+from concrete_syntax.textual_od.renderer import render_od
+from concrete_syntax.common import indent
+
+print()
+print("the modified model:")
+print(indent(render_od(state, m, mm, hide_names=False), 2))
+
+# BTW, notice that the anonymous link of type 'myZ' from the original model was automatically given a unique name (starting with two underscores).
+
+# The full ODAPI is documented on page 6 of this PDF:
+#   http://msdl.uantwerpen.be/people/hv/teaching/MSBDesign/202425/assignments/assignment6.pdf
+
+
+# On to the next tutorial...

+ 167 - 0
tutorial/04_transformation.py

@@ -0,0 +1,167 @@
+# We now get to the interesting part: model transformation.
+
+# We start with a meta-model and a model, and parse them:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+from concrete_syntax.textual_od.renderer import render_od
+from concrete_syntax.common import indent
+from concrete_syntax.plantuml import renderer as plantuml
+from concrete_syntax.plantuml.make_url import make_url as make_plantuml_url
+from framework.conformance import Conformance, render_conformance_check_result
+
+mm_cs = """
+    Bear:Class
+    Animal:Class {
+        abstract = True;
+    }
+    Man:Class {
+        lower_cardinality = 1;
+        upper_cardinality = 2;
+    }
+    Man_weight:AttributeLink (Man -> Integer) {
+        name = "weight";
+        optional = False;
+    }
+    afraidOf:Association (Man -> Animal) {
+        # Every Man afraid of at least one Animal
+        target_lower_cardinality = 1;
+    }
+    :Inheritance (Man -> Animal)
+    :Inheritance (Bear -> Animal)
+"""
+
+m_cs = """
+    george:Man {
+        weight = 80;
+    }
+    mrBrown:Bear
+    teddy:Bear
+    :afraidOf (george -> mrBrown)
+    :afraidOf (george -> teddy)
+"""
+
+state = DevState()
+mmm = bootstrap_scd(state)
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+m = loader.parse_and_check(state, m_cs, mm, "m")
+
+
+# We will perform a simple model transformation, where we specify a Left Hand Side (LHS) and Right Hand Side (RHS) pattern. As we will see, both the LHS- and RHS-patterns are models too, and thus we need a meta-model for them. This meta-model can be auto-generated as follows:
+
+from transformation.ramify import ramify
+
+ramified_mm = ramify(state, mm)
+
+# Let's see what it looks like:
+
+print("RAMified meta-model:")
+print(indent(render_od(state, ramified_mm, mmm), 2))
+
+# Note that our RAMified meta-model is also a valid class diagram:
+
+print()
+print("Is valid class diagram?")
+print(render_conformance_check_result(Conformance(state, ramified_mm, mmm).check_nominal()))
+
+# We now specify our patterns.
+# We create a rule that looks for a Man with weight > 60, who is afraid of an animal:
+
+lhs_cs = """
+    # object to match
+    man:RAM_Man {
+        # match only men heavy enough
+        RAM_weight = `get_value(this) > 60`;
+    }
+
+    scaryAnimal:RAM_Animal
+    manAfraidOfAnimal:RAM_afraidOf (man -> scaryAnimal)
+"""
+
+lhs = loader.parse_and_check(state, lhs_cs, ramified_mm, "lhs")
+
+# As you can see, in our pattern-language, the names of the types have been prefixed with 'RAM_'. This is to distinguish them from the original types.
+# Further, the type of the 'weight'-attribute has changed: it used to be Integer, but now it's ActionCode, meaning we can write Python-expression in it. In a LHS-pattern, we write an expression that evaluates to a (Python) boolean. In our example, the expression is evaluated on every Man-object. If the result is True, the object can be matched, otherwise it cannot.
+
+
+# Let's see what happens if we match our LHS-pattern with our model:
+
+from transformation.matcher import match_od
+
+generator = match_od(state, m, mm, lhs, ramified_mm)
+
+# Matching is lazy: 'match_od' returns a generator object, so it will only look for the next match if you ask it to do so. The reason is that sometimes, we're only interested in the first match, whereas producing all the matches can take a lot of time on big models, and the number of matches can also be very big. But our example is small so let's just generate all the matches:
+
+all_matches = list(generator) # generate all matches
+
+import pprint
+
+print()
+print("All matches:\n", pprint.pformat(all_matches))
+
+# A match is just a Python dictionary mapping names of our LHS-pattern to names of our model.
+# There should be 2 matches: 'man' will always be matched with 'george', but 'scaryAnimal' can be matched with either 'mrBrown' or 'teddy'.
+
+
+# So far we've only queried our model. We can modify the model by specifying a RHS-pattern:
+#   Objects/links that occur in RHS but not in LHS are CREATED
+#   Objects/links that occur in LHS but not in RHS are DELETED
+#   Objects/links that occur in both LHS and RHS remain, but we can still UPDATE their attributes.
+
+# Here's a RHS-pattern:
+
+rhs_cs = """
+    man:RAM_Man {
+        # man gains weight
+        RAM_weight = `get_value(this) + 5`;
+    }
+
+    # to create:
+    bill:RAM_Man {
+        RAM_weight = `100`;
+    }
+    billAfraidOfMan:RAM_afraidOf (bill -> man)
+"""
+
+rhs = loader.parse_and_check(state, rhs_cs, ramified_mm, "rhs")
+
+
+# Our RHS-pattern does not contain the objects 'scaryAnimal' or 'manAfraidOfAnimal' of our LHS, so these will be deleted. The objects 'bill' and 'billAfraidOfMan' will be created. The attribute 'weight' of 'man' (matched with 'george' in our example) will be incremented by 5.
+
+# Notice that the weight of the new object 'bill' is the Python-expression `100` (in backticks), not the Integer 100.
+
+# Let's rewrite our model:
+
+from transformation.cloner import clone_od
+from transformation import rewriter
+
+m_rewritten = clone_od(state, m, mm) # copy our model before rewriting (this is optional - we do this so we can later render the model before and after rewrite in a single PlantUML diagram)
+
+lhs_match = all_matches[0] # select one match
+rhs_match = rewriter.rewrite(state, rhs, ramified_mm, lhs_match, m_rewritten, mm)
+
+# Let's render everything as PlantUML:
+
+uml = (""
+    + plantuml.render_package("MM", plantuml.render_class_diagram(state, mm))
+    + plantuml.render_package("RAMified MM", plantuml.render_class_diagram(state, ramified_mm))
+    + plantuml.render_package("LHS", plantuml.render_object_diagram(state, lhs, ramified_mm))
+    + plantuml.render_package("RHS", plantuml.render_object_diagram(state, rhs, ramified_mm))
+    + plantuml.render_package("M (before rewrite)", plantuml.render_object_diagram(state, m, mm))
+    + plantuml.render_package("M (after rewrite)", plantuml.render_object_diagram(state, m_rewritten, mm))
+
+    + plantuml.render_trace_ramifies(state, mm, ramified_mm)
+
+    + plantuml.render_trace_match(state, lhs_match, lhs, m, "orange")
+    + plantuml.render_trace_match(state, rhs_match, rhs, m_rewritten, "red")
+
+    + plantuml.render_trace_conformance(state, lhs, ramified_mm)
+    + plantuml.render_trace_conformance(state, rhs, ramified_mm)
+    + plantuml.render_trace_conformance(state, m, mm)
+    + plantuml.render_trace_conformance(state, m_rewritten, mm)
+)
+
+print()
+print("PlantUML:", make_plantuml_url(uml))
+

+ 213 - 0
tutorial/05_advanced_transformation.py

@@ -0,0 +1,213 @@
+# In this tutorial, we implement the semantics of Petri Nets by means of model transformation.
+# Compared to the previous tutorial, it only introduces one more feature: pivots.
+# Consider the following Petri Net language meta-model:
+
+mm_cs = """
+    Place:Class
+    Transition:Class
+
+    Place_tokens:AttributeLink (Place -> Integer) {
+        optional = False;
+        name = "tokens";
+        constraint = `get_value(get_target(this)) >= 0`;
+    }
+
+    P2T:Association (Place -> Transition)
+    T2P:Association (Transition -> Place)
+
+    P2T_weight:AttributeLink (P2T -> Integer) {
+        optional = False;
+        name = "weight";
+        constraint = `get_value(get_target(this)) >= 0`;
+    }
+
+    T2P_weight:AttributeLink (T2P -> Integer) {
+        optional = False;
+        name = "weight";
+        constraint = `get_value(get_target(this)) >= 0`;
+    }
+"""
+
+# We now create the following Petri Net:
+#  https://upload.wikimedia.org/wikipedia/commons/4/4d/Two-boundedness-cb.png
+
+m_cs = """
+    p1:Place  { tokens = 0; }
+    p2:Place  { tokens = 0; }
+    cp1:Place { tokens = 2; }
+    cp2:Place { tokens = 2; }
+
+    t1:Transition
+    t2:Transition
+    t3:Transition
+
+    :T2P (t1  -> p1)  { weight = 1; }
+    :P2T (p1  -> t2)  { weight = 1; }
+    :T2P (t2  -> cp1) { weight = 1; }
+    :P2T (cp1 -> t1)  { weight = 1; }
+
+    :T2P (t2  -> p2)  { weight = 1; }
+    :P2T (p2  -> t3)  { weight = 1; }
+    :T2P (t3  -> cp2) { weight = 1; }
+    :P2T (cp2 -> t2)  { weight = 1; }
+"""
+
+# The usual...
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+from transformation.ramify import ramify
+from transformation.matcher import match_od
+from transformation.cloner import clone_od
+from transformation import rewriter
+from concrete_syntax.textual_od.renderer import render_od
+from concrete_syntax.common import indent
+from api.od import ODAPI
+
+state = DevState()
+mmm = bootstrap_scd(state)
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+m = loader.parse_and_check(state, m_cs, mm, "m")
+
+mm_ramified = ramify(state, mm)
+
+
+# We will now implement Petri Net operational semantics by means of model transformation.
+
+
+# Look for any transition:
+
+lhs_transition_cs = """
+    t:RAM_Transition
+"""
+
+# But, if that transition has an incoming arc (P2T) from a place with not enough tokens, the transition cannot fire. We can express this as a pattern:
+
+lhs_transition_disabled_cs = """
+    t:RAM_Transition
+    p:RAM_Place
+    :RAM_P2T (p -> t) {
+        condition = ```
+            place = get_source(this)
+            tokens = get_slot_value(place, "tokens")
+            weight = get_slot_value(this, "weight")
+            tokens < weight # True means: cannot fire
+        ```;
+    }
+"""
+
+# Parse these patterns:
+lhs_transition = loader.parse_and_check(state, lhs_transition_cs, mm_ramified, "lhs_transition")
+lhs_transition_disabled = loader.parse_and_check(state, lhs_transition_disabled_cs, mm_ramified, "lhs_transition_disabled")
+
+# To find enabled transitions, we first match our first pattern (looking for a transition), and then we try to 'grow' that match with our second, "Negative Application Condition" (NAC) pattern. If growing the match with the second pattern is possible, we abort and look for another transition.
+# To grow a match, we use the 'pivot'-argument of the match-function. A pivot is a partial match that needs to be grown.
+# This results in the following generator function:
+
+def find_enabled_transitions(m):
+    for match in match_od(state, m, mm, lhs_transition, mm_ramified):
+        for match_nac in match_od(state, m, mm, lhs_transition_disabled, mm_ramified, pivot=match): # <-- notice the pivot :)
+            # transition is disabled
+            break # find next transition
+        else:
+            # we've found an enabled transition:
+            yield match
+
+# Let's see if it works:
+
+enabled = list(find_enabled_transitions(m))
+print("enabled PN transitions:", enabled)
+
+
+# Next, to fire a transition:
+#  - we decrement the number of tokens of every incoming place
+#  - we increment the number of tokens of every outgoing place
+# We do this also by growing our match: given an enabled transition (already matched), we match for *any* incoming place, and rewrite that place to reduce its tokens. Next, we look for *any* outgoing place, and increment its tokens.
+
+# Decrement incoming
+lhs_incoming_cs = """
+    t:RAM_Transition # <-- we already know this transition is enabled
+    inplace:RAM_Place {
+        RAM_tokens = `True`; # this needs to be here, otherwise, the rewriter will try to create a new attribute rather than update the existing one
+    }
+    inarc:RAM_P2T (inplace -> t)
+"""
+rhs_incoming_cs = """
+    t:RAM_Transition
+    inplace:RAM_Place {
+        RAM_tokens = ```
+            weight = get_slot_value(matched("inarc"), "weight")
+            print("adding", weight, "tokens to", get_name(this))
+            get_value(this) - weight
+        ```;
+    }
+    inarc:RAM_P2T (inplace -> t)
+"""
+
+# Increment outgoing
+lhs_outgoing_cs = """
+    t:RAM_Transition
+    outplace:RAM_Place {
+        RAM_tokens = `True`; # this needs to be here, otherwise, the rewriter will try to create a new attribute rather than update the existing one
+    }
+    outarc:RAM_T2P (t -> outplace)
+"""
+rhs_outgoing_cs = """
+    t:RAM_Transition
+    outplace:RAM_Place {
+        RAM_tokens = ```
+            weight = get_slot_value(matched("outarc"), "weight")
+            print("removing", weight, "tokens from", get_name(this))
+            get_value(this) + weight
+        ```;
+    }
+    outarc:RAM_T2P (t -> outplace)
+"""
+
+# Parse all the patterns
+lhs_incoming = loader.parse_and_check(state, lhs_incoming_cs, mm_ramified, "lhs_incoming")
+rhs_incoming = loader.parse_and_check(state, rhs_incoming_cs, mm_ramified, "rhs_incoming")
+lhs_outgoing = loader.parse_and_check(state, lhs_outgoing_cs, mm_ramified, "lhs_outgoing")
+rhs_outgoing = loader.parse_and_check(state, rhs_outgoing_cs, mm_ramified, "rhs_outgoing")
+
+# Firing is really simple:
+def fire_transition(m, transition_match):
+    for match_incoming in match_od(state, m, mm, lhs_incoming, mm_ramified, pivot=transition_match):
+        rewriter.rewrite(state, rhs_incoming, mm_ramified, match_incoming, m, mm)
+    for match_outgoing in match_od(state, m, mm, lhs_outgoing, mm_ramified, pivot=transition_match):
+        rewriter.rewrite(state, rhs_outgoing, mm_ramified, match_outgoing, m, mm)
+
+def show_petri_net(m):
+    odapi = ODAPI(state, m, mm)
+    p1 = odapi.get_slot_value(odapi.get("p1"), "tokens")
+    p2 = odapi.get_slot_value(odapi.get("p2"), "tokens")
+    cp1 = odapi.get_slot_value(odapi.get("cp1"), "tokens")
+    cp2 = odapi.get_slot_value(odapi.get("cp2"), "tokens")
+    return f"""
+     t1                   t2                   t3  
+     ┌─┐        p1        ┌─┐        p2        ┌─┐ 
+     │ │        ---       │ │        ---       │ │ 
+     │ ├─────► ( {p1} )─────►│ │─────► ( {p2} )─────►│ │ 
+     └─┘        ---       └─┘        ---       └─┘ 
+      ▲                   │ ▲                   │  
+      │                   │ │                   │  
+      │                   │ │                   │  
+      │                   │ │                   │  
+      │        ---        │ │       ---         │  
+      └───────( {cp1} )◄──────┘ └──────( {cp2} )◄───────┘  
+               ---                  ---            
+               cp1                  cp2            """
+
+# Let's see if it works:
+while len(enabled) > 0:
+    print(show_petri_net(m))
+    print("\nenabled PN transitions:", enabled)
+    to_fire = enabled[0]['t']
+    print("press ENTER to fire", to_fire)
+    input()
+    print("firing transition:", to_fire)
+    fire_transition(m, enabled[0])
+    enabled = list(find_enabled_transitions(m))
+
+# That's it!

+ 3 - 0
util/loader.py

@@ -39,8 +39,11 @@ KINDS = ["nac", "lhs", "rhs"]
 # Phony name generator that raises an error if you try to use it :)
 class LHSNameGenerator:
     def __call__(self, type_name):
+        if type_name == "GlobalCondition":
+            return parser.DefaultNameGenerator()(type_name)
         raise Exception(f"Error: Object or link of type '{type_name}' does not have a name.\nAnonymous objects/links are not allowed in the LHS of a rule, because they can have unintended consequences. Please give all of the elements in the LHS explicit names.")
 
+
 # load model transformation rules
 def load_rules(state, get_filename, rt_mm_ramified, rule_names, check_conformance=True):
     rules = {}

+ 3 - 1
util/simulator.py

@@ -27,6 +27,8 @@ class RandomDecisionMaker(DecisionMaker):
 
     def __call__(self, actions):
         arr = [action for descr, action in actions]
+        if len(arr) == 0:
+            return
         i = math.floor(self.r.random()*len(arr))
         return arr[i]
 
@@ -91,7 +93,7 @@ class MinimalSimulator:
         self._print("Start simulation")
         self._print(f"Decision maker: {self.decision_maker}")
         step_counter = 0
-        while True:
+        while step_counter < 10:
             termination_reason = self.termination_condition(model)
             if termination_reason != None:
                 self._print(f"Termination condition satisfied.\nReason: {termination_reason}.")