Browse Source

Added all files from AToMPM

rparedis 2 years ago
commit
af6c56b088

+ 0 - 0
__init__.py


+ 0 - 0
core/__init__.py


+ 564 - 0
core/himesis.py

@@ -0,0 +1,564 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+import uuid, copy, igraph as ig
+
+
+class HConstants:
+    '''
+        Himesis constants must start with '$' to ensure there are no name clashes 
+        with user-attributes (which are prohibited from starting with '$')
+    '''
+    GUID                      = '$GUID__'
+    METAMODELS              = '$mms__'
+    MISSING_METAMODELS = '$missing_mms__'
+    FULLTYPE               = '$ft__'
+    CONNECTOR_TYPE      = '$ct__'
+    MT_LABEL              = '$MT_label__'
+    MT_CONSTRAINT          = '$MT_constraint__'
+    MT_ACTION              = '$MT_action__'
+    MT_SUBTYPE_MATCH      = '$MT_subtypeMatching__'
+    MT_SUBTYPES          = '$MT_subtypes__'
+    MT_DIRTY              = '$MT_dirty__'
+    MT_PIVOT_IN          = '$MT_pivotIn__'
+    MT_PIVOT_OUT         = '$MT_pivotOut__'
+
+
+
+class Himesis(ig.Graph):
+    """
+        Creates a typed, attributed, directed, multi-graph.
+        @param num_nodes: the total number of nodes. If not known, you can add more vertices later
+        @param edges: the list of edges where each edge is a tuple representing the ids of the source and target nodes
+    """
+    Constants = HConstants
+    EDGE_LIST_THRESHOLD = 10**3
+
+
+    @staticmethod
+    def is_RAM_attribute(attr_name):
+        return not attr_name.startswith('$')
+
+    def __init__(self, name='', num_nodes=0, edges=[]):
+        """
+            Creates a typed, attributed, directed, multi-graph.
+            @param name: the name of this graph
+            @param num_nodes: the total number of nodes. If not known, you can add more vertices later
+            @param edges: the list of edges where each edge is a tuple representing the ids of the source and target nodes
+        """
+        ig.Graph.__init__(self, directed=True, n=num_nodes, edges=edges)
+        if not name:
+            name = self.__class__.__name__
+        self.name = name
+
+        # mmTypeData: enables add_node() to properly initialize to-be nodes s.t. they reflect the default values specified by their metamodels
+        # _guid2index: a fast lookup of the node's index by its guid
+        # session: area which provides a clean and efficient way to remember information across rules
+        self.mmTypeData = {}
+        self._guid2index = {}
+        self.session = {}
+
+    def copy(self):
+        cpy = ig.Graph.copy(self)
+        cpy._guid2index = copy.deepcopy(self._guid2index)
+        ''' hergin :: motif-integration FIX for mmTypeData bug '''
+        cpy.mmTypeData = copy.deepcopy(self.mmTypeData)
+        cpy.session = copy.deepcopy(self.session)
+
+        cpy.name = copy.deepcopy(self.name)
+        return cpy
+
+    def __copy__(self):
+        return self.copy()
+
+    def __deepcopy__(self, memo):
+        return self.__copy__()
+
+    def __str__(self):
+        s = super(Himesis, self).__str__()
+        return self.name + ' ' + s[s.index('('):] + ' ' + str(self[Himesis.Constants.GUID])
+
+    def get_id(self):
+        """
+            Returns the unique identifier of the graph
+        """
+        return self[Himesis.Constants.GUID]
+
+    def node_iter(self):
+        """
+            Iterates over the nodes in the graph, by index
+        """
+        return range(self.vcount())
+
+    def edge_iter(self):
+        """
+            Iterates over the edges in the graph, by index
+        """
+        return range(self.ecount())
+
+    def add_node(self, fulltype=None, isConnector=None, newNodeGuid=None):
+        newNodeIndex = self.vcount()
+        if newNodeGuid == None :
+            newNodeGuid = uuid.uuid4()
+        self.add_vertices(1)
+        self.vs[newNodeIndex][Himesis.Constants.GUID] = newNodeGuid
+        self.vs[newNodeIndex][Himesis.Constants.FULLTYPE] = fulltype
+        self.vs[newNodeIndex][Himesis.Constants.CONNECTOR_TYPE] = isConnector
+        if fulltype in self.mmTypeData :
+            for attr,val in self.mmTypeData[fulltype].items():
+                self.vs[newNodeIndex][str(attr)] = val
+        self._guid2index[newNodeGuid] = newNodeIndex
+        return newNodeIndex
+
+    def delete_nodes(self, nodes):
+        self.delete_vertices(nodes)
+        # Regenerate the lookup because node indices have changed
+        self._guid2index = dict((self.vs[node][Himesis.Constants.GUID], node) for node in self.node_iter())
+
+    def get_node(self,guid):
+        """
+            Retrieves the node instance with the specified guid
+            @param guid: The guid of the node.
+        """
+        if guid in self._guid2index:
+            if self._guid2index[guid] >= self.vcount() or \
+                    self.vs[self._guid2index[guid]][Himesis.Constants.GUID] != guid :
+                self._guid2index = dict((self.vs[node][Himesis.Constants.GUID], node) for node in self.node_iter())
+            try:
+                return self._guid2index[guid]
+            except KeyError:
+                #TODO: This should be a TransformationLanguageSpecificException
+                raise KeyError('Invalid node id. Make sure to only delete nodes via Himesis.delete_nodes(): ' + str(guid))
+        else :
+            #TODO: This should be a TransformationLanguageSpecificException
+            raise KeyError('Node not found with specified id. Make sure to only create nodes via Himesis.add_node(): ' + str(guid))
+
+    def draw(self, visual_style={}, label=None, show_guid=False, show_id=False, debug=False, width=600, height=900):
+        """
+        Visual graphic rendering of the graph.
+        @param label: The attribute to use as node label in the figure.
+                      If not provided, the index of the node is used.
+        @param visual_style: More drawing options
+        (see http://igraph.sourceforge.net/doc/python/igraph.Graph-class.html#__plot__ for more details).
+        """
+        if 'layout' not in visual_style:
+            visual_style["layout"] = 'fr'
+        if 'margin' not in visual_style:
+            visual_style["margin"] = 10
+
+        # Set the labels
+        if not label:
+            if show_guid:
+                visual_style["vertex_label"] = [str(self.vs[i][Himesis.Constants.GUID])[:4] for i in self.node_iter()]
+            elif show_id:
+                visual_style["vertex_label"] = [str(i) for i in self.node_iter()]
+            else:
+                visual_style["vertex_label"] = [''] *  self.vcount()
+        else:
+            try:
+                visual_style["vertex_label"] = self.vs[label]
+                for n in self.node_iter():
+                    if not visual_style["vertex_label"][n]:
+                        visual_style["vertex_label"][n] = self.vs[n][Himesis.Constants.FULLTYPE]
+                        if debug:
+                            visual_style["vertex_label"][n] = str(n) + ':' + visual_style["vertex_label"][n]
+                    elif debug:
+                        visual_style["vertex_label"][n] = str(n) + ':' + visual_style["vertex_label"][n]
+            except:
+                raise Exception('%s is not a valid attribute' % label)
+
+        return ig.plot(self, bbox=(0, 0, width, height), **visual_style)
+
+    def execute(self, *args):
+        raise AttributeError('This method is not implemented')
+
+
+class HimesisPattern(Himesis):
+    def __init__(self, name='', num_nodes=0, edges=[]):
+        super(HimesisPattern, self).__init__(name, num_nodes, edges)
+        self.nodes_label = {}
+        self.nodes_pivot_out = {}
+
+    def get_node_with_label(self, label):
+        """
+            Retrieves the index of the node with the specified label.
+            @param label: The label of the node.
+        """
+        if not self.nodes_label:
+            self.nodes_label = dict([(self.vs[i][Himesis.Constants.MT_LABEL], i) for i in self.node_iter()])
+        if label in self.nodes_label:
+            return self.nodes_label[label]
+
+    def get_pivot_out(self, pivot):
+        """
+            Retrieves the index of the pivot node
+            @param pivot: The label of the pivot.
+        """
+        if not self.nodes_pivot_out and Himesis.Constants.MT_PIVOT_OUT in self.vs.attribute_names():
+            self.nodes_pivot_out = dict([(i, self.vs[i][Himesis.Constants.MT_PIVOT_OUT]) for i in self.node_iter()])
+        if pivot in self.nodes_pivot_out:
+            return self.nodes_pivot_out[pivot]
+
+
+class HimesisPreConditionPattern(HimesisPattern):
+    def __init__(self, name='', num_nodes=0, edges=[]):
+        super(HimesisPreConditionPattern, self).__init__(name, num_nodes, edges)
+        self.nodes_pivot_in = {}
+
+    def get_pivot_in(self, pivot):
+        """
+            Retrieves the index of the pivot node
+            @param pivot: The label of the pivot.
+        """
+        if not self.nodes_pivot_in and Himesis.Constants.MT_PIVOT_IN in self.vs.attribute_names():
+            self.nodes_pivot_in = dict([(self.vs[i][Himesis.Constants.MT_PIVOT_IN], i) for i in self.node_iter()])
+        if pivot in self.nodes_pivot_in:
+            return self.nodes_pivot_in[pivot]
+
+    def constraint(self, mtLabel2graphIndexMap, graph):
+        """
+            If a constraint shall be specified, the corresponding Himesis graph must override this method.
+            The condition must be specified in the pattern graph and not the input graph.
+            By default, the constraint evaluates to True.
+            @param PreMatch: The current match, before the rewriting.
+            @param graph: The whole input graph.
+        """
+        raise NotImplementedError('Use graph[Himesis.Constants.MT_CONSTRAINT]() instead')
+
+
+class HimesisPreConditionPatternLHS(HimesisPreConditionPattern):
+    def __init__(self, name='', num_nodes=0, edges=[]):
+        super(HimesisPreConditionPatternLHS, self).__init__(name, num_nodes, edges)
+        self.NACs = []
+        self.bound_start_index = 0  # index of first bound NAC in NACs list
+
+    def addNAC(self, nac):
+        """
+            Appends the NAC to this LHS pattern
+        """
+        if nac.LHS != self:
+            nac.LHS = self
+        if nac.bridge is None:
+            nac.bridge = nac.compute_bridge()
+        self.NACs.append(nac)
+
+    def addNACs(self, NACs):
+        """
+            Stores the list of NACs in decreasing order of their size
+            @param nacs: list of NACs
+            @postcondition: the NACs will be stored in decreasing order of their bridge sizes
+        """
+        bound = []
+        unbound = []
+        for nac in NACs:
+            nac.LHS = self
+            nac.bridge_size = nac.compute_bridge().vcount()
+            if nac.bridge_size > 0:
+                bound.append(nac)
+            else:
+                unbound.append(nac)
+        bound.sort(key=lambda nac: (nac.bridge_size, nac.vcount()), reverse=True)
+        unbound.sort(key=lambda nac: nac.vcount(), reverse=True)
+        self.NACs = unbound + bound
+        self.bound_start_index = len(unbound)
+
+    def getUnboundNACs(self):
+        return self.NACs[:self.bound_start_index]
+
+    def getBoundNACs(self):
+        return self.NACs[self.bound_start_index:]
+
+    def hasBoundNACs(self):
+        return self.bound_start_index < len(self.NACs)
+
+
+class HimesisPreConditionPatternNAC(HimesisPreConditionPattern):
+    def __init__(self, LHS=None, name='', num_nodes=0, edges=[]):
+        super(HimesisPreConditionPatternNAC, self).__init__(name, num_nodes, edges)
+        self.LHS = LHS
+        self.bridge_size = 0
+
+    def set_bridge_size(self):
+        """
+            Computes the bridge and stores the number of its nodes.
+        """
+        if self.LHS is None:
+            raise Exception('Missing LHS to compute bridge')
+        self.bridge_size = self.compute_bridge().vcount()
+
+    def compute_bridge(self):
+        """
+            Creates a HimesisPreConditionPattern defined as the intersection of graph with this instance.
+            This is called the 'bridge'.
+            From a topological point of view, this method computes the largest common subgraph of these two graphs.
+            However, the similarity of nodes of the bridge relies on the meta-model type of the nodes. 
+            Furthermore, every attribute value is the conjunction of the constraints defined in each graph.
+        """
+        # G1 is the smallest graph and G2 is the bigger graph
+        G1 = self
+        G2 = self.LHS
+        if G1.vcount() > G2.vcount():
+            # Swap
+            G1, G2 = G2, G1
+        # The bridge
+        G = HimesisPreConditionPattern()
+        G[Himesis.Constants.GUID] = uuid.uuid4()
+
+        # We don't need to actually solve the largest common subgraph (LCS) problem
+        # because we assume that the nodes are labelled uniquely in each graph
+        # and that if a label is in G1 and in G2, then it will be in G
+        if len(G1.vs) == 0:
+            return G
+
+        Labels2 = G2.vs[Himesis.Constants.MT_LABEL]
+        for label in G1.vs[Himesis.Constants.MT_LABEL]:
+            if label in Labels2:
+                # Get the corresponding node from G1 
+                v1 = G1.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == label)
+                if len(v1) == 1:
+                    v1 = v1[0]
+                elif len(v1) == 0:
+                    #unreachable line...
+                    raise Exception('Label does not exist :: ' + str(label))
+                else:
+                    raise Exception('Label is not unique :: ' + str(label))
+                # Get the corresponding node from G2
+                v2 = G2.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == label)
+                if len(v2) == 1:
+                    v2 = v2[0]
+                elif len(v2) == 0:
+                    # Unreachable line...
+                    raise Exception('Label does not exist :: ' + str(label))
+                else:
+                    raise Exception('Label is not unique :: ' + str(label))
+                newNodeIndex = G.add_node()
+                # Now do a conjunction of the attributes
+                for attr in v1.attribute_names():
+                    G.vs[newNodeIndex][attr] = v1[attr]
+                for attr in v2.attribute_names():
+                    # The attribute is not in v1
+                    if attr not in G.vs[newNodeIndex].attribute_names():
+                        G.vs[newNodeIndex][attr] = v2[attr]
+                    # Give this node its own GUID attribute
+                    elif attr == Himesis.Constants.GUID:
+                        G.vs[newNodeIndex][Himesis.Constants.GUID] = uuid.uuid4()
+                        continue
+                    # Ignore non-RAM attributes ('special' and HConstants attributes)
+                    elif not Himesis.is_RAM_attribute(attr):
+                        continue
+                    # Handle normal attribute
+                    else :
+                        if not v2[attr]:
+                            # There is no constraint for this attribute
+                            continue
+
+                        # The attribute constraint code is the conjunction of the LHS constraint
+                        # with the NAC constraint for this attribute
+                        def get_evalAttrConditions(_attr,_v1,_v2) :
+                            def evalAttrConditions(mtLabel2graphIndexMap,graph):
+                                return G1.vs[_v1][_attr](mtLabel2graphIndexMap, graph) and \
+                                       G2.vs[_v2][_attr](mtLabel2graphIndexMap, graph)
+                            return evalAttrConditions
+                        G.vs[newNodeIndex][attr] = get_evalAttrConditions(attr,v1.index,v2.index)
+                    #else: v1[attr] == v2[attr], so we don't need to do anything more 
+        # Now add the edges
+        # We only need to go through the edges of the smaller graph
+        for e in G1.edge_iter():
+            src_label = G1.vs[G1.es[e].source][Himesis.Constants.MT_LABEL]
+            tgt_label = G1.vs[G1.es[e].target][Himesis.Constants.MT_LABEL]
+            src = G.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == src_label)
+            tgt = G.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == tgt_label)
+            if len(src) == len(tgt) == 1:
+                src = src[0]
+                tgt = tgt[0]
+                G.add_edges([(src.index, tgt.index)])
+            elif len(src) == 0 :
+                #                raise Exception('Label does not exist :: '+str(src_label))
+                pass
+            elif len(tgt) == 0 :
+                #                raise Exception('Label does not exist :: '+str(tgt_label))
+                pass
+            elif len(src) > 1 :
+                raise Exception('Label is not unique :: ' + str(src_label))
+            elif len(tgt) > 1 :
+                raise Exception('Label is not unique :: ' + str(tgt_label))
+        return G
+
+
+
+class HimesisPostConditionPattern(HimesisPattern):
+    def __init__(self, name='', num_nodes=0, edges=[]):
+        super(HimesisPostConditionPattern, self).__init__(name, num_nodes, edges)
+        self.pre = None
+
+    def action(self, mtLabel2graphIndexMap, graph):
+        """
+        If an action shall be specified, the corresponding Himesis graph must override this method.
+        The action must be specified in the pattern graph and not the input graph.
+        """
+        raise NotImplementedError('Use graph[Himesis.Constants.MT_ACTION]() instead')
+
+    # This method implements the rewriting part of the rule.
+    '''    
+        NOTE 
+            certain rule applications may have side-effects that aren't caused by
+            the rewriting per se... at present, the only instance of this is when a
+              rule produces entities of a formalism not loaded on the asworker... in 
+            this case, we prepend appropriate {'op':'LOADMM','name':...} entries to
+            packet.deltas 
+            
+
+        NOTE     
+            when creating new nodes, information about the match is bundled so that
+            the said new nodes' icons get created near the icons of nodes matched
+            in the LHS 
+            
+        
+        NOTE
+            deletes must be performed last because they alter igraph indices and 
+            which we use to map __pLabels to source graph nodes... however, to 
+            avoid violating maximum association multiplicities, deletes in the 
+            source model must be performed first... thus, RM* operations, if any,
+            are placed at the start of packet.deltas 
+    '''
+    def execute(self, packet, match):
+        graph = packet.graph
+
+        # Changes to packet.graph are logged in packet.deltas
+        packet.deltas = []
+
+        # Init deltas with rule side-effects (see NOTE)
+        for mm in self[Himesis.Constants.MISSING_METAMODELS]() :
+            packet.deltas.append({'op':'LOADMM','name':mm})
+
+
+        # Set the attributes of graph.vs[graphNodeIndex] to match those of self.vs[rhsNodeIndex]
+        def set_attributes(rhsNodeIndex, graphNodeIndex, newNode, pLabel2graphIndexMap) :
+            changedSomething = False
+            for attrName in self.vs[rhsNodeIndex].attribute_names() :
+                if Himesis.is_RAM_attribute(attrName) :
+                    attrVal = self.vs[rhsNodeIndex][attrName]
+                    if attrVal == None :
+                        # Not 'really' an attribute
+                        continue
+                    oldVal = None
+                    try :
+                        if not newNode :
+                            oldVal = graph.vs[graphNodeIndex][attrName]
+
+                        newVal = self.vs[rhsNodeIndex][attrName](pLabel2graphIndexMap, graph)
+                        if oldVal != newVal :
+                            graph.vs[graphNodeIndex][attrName] = newVal
+                            packet.deltas.append(
+                                {'op':'CHATTR',
+                                 'guid':graph.vs[graphNodeIndex][Himesis.Constants.GUID],
+                                 'attr':attrName,
+                                 'old_val':oldVal,
+                                 'new_val':newVal})
+                            changedSomething = True
+                    except Exception as e :
+                        raise Exception("An error has occurred while computing the value of the attribute '%s' :: %s" % (attrName, e))
+            return changedSomething
+
+        # Build a dictionary {label: node index} mapping each label of the pattern to a node in the graph to rewrite.
+        # Because of the uniqueness property of labels in a rule, we can store all LHS labels
+        # and subsequently add the labels corresponding to the nodes to be created.
+        labels = match.copy()
+
+        # Update attribute values
+        LHS_labels = self.pre_labels
+        for label in LHS_labels:
+            rhsNodeIndex = self.get_node_with_label(label)
+            if rhsNodeIndex is None:
+                continue        # not in the interface graph (LHS n RHS)
+            if set_attributes(rhsNodeIndex, labels[label], False, labels) :
+                graph.vs[labels[label]][Himesis.Constants.MT_DIRTY] = True
+
+        # Create new nodes (non-connectors first)
+        if self.vcount() == 0 :
+            RHS_labels = []
+        else :
+            RHS_labels = self.vs[Himesis.Constants.MT_LABEL]
+            # sort non-connectors first
+            RHS_labels.sort(key=lambda x: self.vs[ self.get_node_with_label(x) ][Himesis.Constants.CONNECTOR_TYPE] or False)
+            neighborhood = [graph.vs[labels[l]].attributes() for l in LHS_labels]
+
+        new_labels = []
+        for label in RHS_labels:
+            rhsNodeIndex = self.get_node_with_label(label)
+            if label not in LHS_labels:
+                new_labels += [label]
+                newNodeIndex = graph.add_node(
+                    self.vs[rhsNodeIndex][Himesis.Constants.FULLTYPE],
+                    self.vs[rhsNodeIndex][Himesis.Constants.CONNECTOR_TYPE])
+                packet.deltas.append(
+                    {'op':'MKNODE',
+                     'neighborhood':neighborhood,
+                     'guid':graph.vs[newNodeIndex][Himesis.Constants.GUID]})
+                labels[label] = newNodeIndex
+                set_attributes(rhsNodeIndex, newNodeIndex, True, labels)
+
+        # Link new nodes (Create new edges)
+        visited_edges = []
+        for label in sorted(new_labels):
+            for edge in self.es.select(lambda e: (e.index not in visited_edges and
+                                                  (label == self.vs[e.source][Himesis.Constants.MT_LABEL] or
+                                                   label == self.vs[e.target][Himesis.Constants.MT_LABEL]))):
+                src_label = self.vs[edge.source][Himesis.Constants.MT_LABEL]
+                tgt_label = self.vs[edge.target][Himesis.Constants.MT_LABEL]
+                graph.add_edges([(labels[src_label], labels[tgt_label])])
+                packet.deltas.append(
+                    {'op':'MKEDGE',
+                     'guid1':graph.vs[labels[src_label]][Himesis.Constants.GUID],
+                     'guid2':graph.vs[labels[tgt_label]][Himesis.Constants.GUID]})
+                visited_edges.append(edge.index)
+
+        # Set the output pivots
+        if Himesis.Constants.MT_PIVOT_OUT in self.vs.attribute_names():
+            for node in self.vs.select(lambda v: v[Himesis.Constants.MT_PIVOT_OUT]):
+                node = node.index
+                label = self.vs[node][Himesis.Constants.MT_LABEL]
+                pivot_out = self.vs[node][Himesis.Constants.MT_PIVOT_OUT]
+                packet.global_pivots[pivot_out] = graph.vs[labels[label]][Himesis.Constants.GUID]
+
+        # Perform the post-action
+        try:
+            packet.deltas.extend(self[Himesis.Constants.MT_ACTION](labels, graph))
+        except Exception as e:
+            raise Exception('An error has occurred while applying the post-action', e)
+
+        # Delete nodes (automatically deletes adjacent edges)
+        labels_to_delete = []
+        rmnodes = []
+        rmedges = []
+        for label in LHS_labels:
+            if label not in RHS_labels:
+                labels_to_delete.append(labels[label])
+                rmnodes.append({'op':'RMNODE','attrs':graph.vs[labels[label]].attributes()})
+                for edge in graph.es.select(lambda e: (labels[label] == e.source or labels[label] == e.target)) :
+                    found = False
+                    for rmedge in rmedges :
+                        if rmedge['guid1'] == graph.vs[edge.source][Himesis.Constants.GUID] and \
+                                rmedge['guid2'] == graph.vs[edge.target][Himesis.Constants.GUID] :
+                            found = True
+                            break
+                    if not found :
+                        rmedges.append({'op':'RMEDGE',
+                                        'guid1':graph.vs[edge.source][Himesis.Constants.GUID],
+                                        'guid2':graph.vs[edge.target][Himesis.Constants.GUID]})
+        if len(labels_to_delete) > 0 :
+            packet.deltas = rmedges + rmnodes + packet.deltas
+            graph.delete_nodes(labels_to_delete)
+
+            ''' hergin :: motif-integration start :: remove the deleted nodes from pivots list '''
+            for uuid in packet.global_pivots:
+                deleted=False
+                for toBeDeleted in rmnodes:
+                    if toBeDeleted['attrs']['$GUID__'] == packet.global_pivots[uuid]:
+                        del packet.global_pivots[uuid]
+                        deleted=True
+                        continue
+                if deleted:
+                    continue
+            ''' hergin :: motif-integration end '''  

+ 686 - 0
core/match_algo.py

@@ -0,0 +1,686 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+import sys
+from .himesis import Himesis
+
+
+class Priority(object):
+    """
+        Implements heuristics for the HimesisMatcher algorithm.
+        Determines the order in which the candidate pairs should be computed.
+        By default, the order is the index order of the nodes in igraph.
+        To refine this heuristic order, you should sub-class Priority and override its methods. 
+    """
+    def __init__(self):
+        """
+            Implements heuristics for the HimesisMatcher algorithm.
+            Determines the order in which the candidate pairs should be computed.
+            By default, the order is the index order of the nodes in igraph.
+            To refine this heuristic order, you should sub-class Priority and override its methods.
+        """
+        self.source_graph = None
+        self.pattern_graph = None
+
+    def cache_info(self, source_graph, pattern_graph):
+        """
+            Pre-computes any information required by the order and order_all methods
+            @param source_graph: The source graph.
+            @param pattern_graph: The pattern graph.
+        """
+        pass
+
+    def order_source(self, candidate_list):
+        """
+            Specifies the order for the terminal sets for the source graph.
+            @param candidate_list: The list of possible candidates.
+        """
+        return sorted(candidate_list)
+
+    def order_pattern(self, candidate_list):
+        """
+            Specifies the order for the terminal sets for the pattern graph.
+            @param candidate_list: The list of possible candidates.
+        """
+        return sorted(candidate_list)
+
+    def order_all_source(self, candidate_list):
+        """
+            Specifies the order for all source nodes.
+            @param candidate_list: The list of possible candidates.
+        """
+        return candidate_list
+
+    def order_all_pattern(self, candidate_list):
+        """
+            Specifies the order for all pattern nodes.
+            @param candidate_list: The list of possible candidates.
+        """
+        return candidate_list
+
+
+class HimesisMatcher(object):
+    """
+        Represents a pattern matching algorithm for typed attributed multi-graphs.
+        The pattern matching algorithm is based on VF2.
+    """
+    def __init__(self, source_graph, pattern_graph, priority=Priority(), pred1={}, succ1={}):
+        """
+            Represents a pattern matching algorithm for typed attributed multi-graphs.
+            @param source_graph: The source graph.
+            @param pattern_graph: The pattern graph.
+            @param priority: Instance of a sub-class of the Priority class.
+                            It is used to determine the order in which the candidate pairs should be computed.
+            @param pred1: Pre-built dictionary of predecessors in the source graph.
+            @param succ1: Pre-built dictionary of successors in the source graph.
+        """
+        self.G1 = source_graph
+        self.G2 = pattern_graph
+        self.pred1 = pred1
+        self.succ1 = succ1
+
+        assert(isinstance(priority, Priority))
+        self.priority = priority
+        self.priority.source_graph = source_graph
+        self.priority.pattern_graph = pattern_graph
+
+        # Set recursion limit
+        self.old_recursion_limit = sys.getrecursionlimit()
+        expected_max_recursion_level = self.G2.vcount()
+        if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
+            # Give some breathing room
+            sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
+
+        # Initialize the state
+        self.initialize()
+
+        # Check whether we are considering multi-graph
+        #        if reduce(lambda x,y: x or y, self.G2.is_multiple()):
+        #            self.cache_info_multi(self.G1_nodes, self.G2_nodes)
+
+        # Scan the two graphs to cache required information.
+        # Typically stores the results of expensive operation on the graphs.
+        # This speeds up the algorithm significantly.
+        self.cache_info()
+
+    def cache_info(self):
+        """
+            Cache information on the nodes.
+            Typically stores the results of expensive operation on the graphs.
+            This speeds up the algorithm significantly.
+        """
+        # Cache individual nodes
+        self.G1_nodes = self.G1.node_iter()
+        self.G2_nodes = self.G2.node_iter()
+
+        #        # Memoize the predecessor & successor information:
+        #        # for each node store the number of neighbours and the list
+        #        if len(self.pred1) == 0 or len(self.succ1) == 0:
+        #            self.pred1 = {}
+        #            self.succ1 = {}
+        #            for node in self.G1_nodes:
+        #                self.pred1[node] = (len(self.G1.predecessors(node)), self.G1.predecessors(node))
+        #                self.succ1[node] = (len(self.G1.successors(node)), self.G1.successors(node))
+        #        self.pred2 = {}
+        #        self.succ2 = {}
+        #        for node in self.G2_nodes:
+        #            self.pred2[node] = (len(self.G2.predecessors(node)), self.G2.predecessors(node))
+        #            self.succ2[node] = (len(self.G2.successors(node)), self.G2.successors(node))
+
+        # Cache any further data used for the heuristic prioritization for computing the candidate pair
+        # This is done when initializing the priority class
+        self.priority.cache_info(self.G1, self.G2)
+
+    def reset_recursion_limit(self):
+        """
+            Restores the recursion limit.
+        """
+        sys.setrecursionlimit(self.old_recursion_limit)
+
+    def initialize(self):
+        """
+            (Re)Initializes the state of the algorithm.
+        """
+        #=======================================================================
+        # The algorithm is based on VF2.
+        # The following are the data-structures used:
+        #    - M_1: the current partial mapping from G1 to G2
+        #    - M_2: the current partial mapping from G2 to G1
+        #    - T1_in: the in-neighbours of the nodes in M_1
+        #    - T2_in: the in-neighbours of the nodes in M_2
+        #    - T1_out: the out-neighbours of the nodes in M_1
+        #    - T2_out: the out-neighbours of the nodes in M_2
+        #=======================================================================
+
+        # core_1[n] contains the index of the node m paired with n, if n is in the mapping
+        self.core_1 = {}   # This is M_1
+        # core_2[m] contains the index of the node n paired with m, if m is in the mapping
+        self.core_2 = {}   # This is M_2
+
+        # The value stored is the depth of the search tree when the node became part of the corresponding set
+        # Non-zero if n is in M_1 or in T_1^{in}
+        self.in_1 = {}
+        # Non-zero if n is in M_1 or in T_1^{out}
+        self.out_1 = {}
+        # Non-zero if m is in M_2 or in T_2^{in}
+        self.in_2 = {}
+        # Non-zero if m is in M_2 or in T_2^{out}
+        self.out_2 = {}
+        # To improve the performance, we also store the following vectors
+        # Non-zero if n is in M_1 or in T_1^{in} or in T_1^{out}
+        self.inout_1 = {}
+        # Non-zero if n is in M_2 or in T_2^{in} or in T_2^{out}
+        self.inout_2 = {}
+
+        # Prepare the necessary data structures required for backtracking
+        self.state = HimesisMatcherState(self)
+
+        # Provide a convenient way to access the isomorphism mapping.
+        self.mapping = self.core_2.copy()
+
+    def are_compatibile(self, src_node, patt_node):
+        """
+            Verifies if a candidate pair is compatible.
+            More specifically, verify degree and meta-model compatibility.
+            @param src_node: The candidate from the source graph.
+            @param patt_node: The candidate from the pattern graph.
+        """
+        sourceNode = self.G1.vs[src_node]
+        patternNode = self.G2.vs[patt_node]
+
+        # First check if they are of the same type
+        if sourceNode[Himesis.Constants.FULLTYPE] == patternNode[Himesis.Constants.FULLTYPE]:
+            # Then check for the degree compatibility
+            return (self.pred2[patt_node][0] <= self.pred1[src_node][0]
+                    and self.succ2[patt_node][0] <= self.succ1[src_node][0])
+        # Otherwise, first check for the degree compatibility
+        elif not (self.pred2[patt_node][0] <= self.pred1[src_node][0]
+                  and self.succ2[patt_node][0] <= self.succ1[src_node][0]):
+            return False
+        # Then check sub-types compatibility
+        else:
+            return (patternNode[Himesis.Constants.MT_SUBTYPE_MATCH]
+                    and sourceNode[Himesis.Constants.FULLTYPE] in patternNode[Himesis.Constants.MT_SUBTYPES])
+
+    def candidate_pairs_iter(self):
+        """
+            Iterator over candidate pairs of nodes in G1 and G2, according to the VF2 algorithm.
+            The candidate pairs have all passed the compatibility check before output.
+            @return: The candidate pair (source node, pattern node)
+        """
+
+        #=======================================================================
+        # Here we compute P(s) = (p1,p2) the candidate pair
+        # for the current partial mapping M(s).
+        #=======================================================================
+
+        # First try the nodes that are in both Ti_in and Ti_out
+        if len(self.inout_1) > len(self.core_1) and len(self.inout_2) > len(self.core_2):
+            for patt_node in self.priority.order_pattern(self.inout_2):
+                if patt_node not in self.core_2:
+                    break
+            for src_node in self.priority.order_source(self.inout_1):
+                if src_node not in self.core_1:
+                    yield src_node, patt_node
+
+        # If T1_out and T2_out are both non-empty:
+        # P(s) = T1_out x {min T2_out}
+        elif len(self.out_1) > len(self.core_1) and len(self.out_2) > len(self.core_2):
+            for patt_node in self.priority.order_pattern(self.out_2):
+                if patt_node not in self.core_2:
+                    break
+            for src_node in self.priority.order_source(self.out_1):
+                if src_node not in self.core_1:
+                    yield src_node, patt_node
+
+        # If T1_in and T2_in are both non-empty:
+        # P(s) = T1_in x {min T2_in}
+        elif len(self.in_1) > len(self.core_1) and len(self.in_2) > len(self.core_2):
+            for patt_node in self.priority.order_pattern(self.in_2):
+                if patt_node not in self.core_2:
+                    break
+            for src_node in self.priority.order_source(self.in_1):
+                if src_node not in self.core_1:
+                    yield src_node, patt_node
+
+        # If all terminal sets are empty:
+        # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
+        else:
+            for patt_node in self.priority.order_all_pattern(self.G2_nodes):
+                if patt_node not in self.core_2:
+                    break
+            for src_node in self.priority.order_all_source(self.G1_nodes):
+                if src_node not in self.core_1:
+                    yield src_node, patt_node
+
+    def are_syntactically_feasible(self, src_node, patt_node):
+        """
+            Determines whether the two nodes are syntactically feasible,
+            i.e., it ensures that adding this candidate pair does not make it impossible to find a total mapping.
+            @param src_node: The candidate from the source graph.
+            @param patt_node: The candidate from the pattern graph.
+            @return: True if they are syntactically feasible, False otherwise.
+        """
+        #=======================================================================
+        # The syntactic feasibility considers the topology of the two graphs.
+        # It verifies that edges directly or indirectly connected to M(s + P(s))
+        # does not violate the subgraph matching conditions.
+        #=======================================================================
+
+        # Check for self-loops
+        #        e1, e2 = -1, -1
+        #        if patt_node in self.succ2[patt_node] or patt_node in self.pred2[patt_node]:
+        #            if src_node in self.succ1[src_node] or src_node in self.pred1[src_node]:
+        #                e1 = self.G1.get_eid(src_node, src_node)
+        #                e2 = self.G2.get_eid(patt_node, patt_node)
+        #                if self.G1.count_multiple(e1) < self.G2.count_multiple(e2):
+        #                    return False
+        #            else:
+        #                return False
+
+        # Counters for in and out edges found 
+        in1 = 0
+        in2 = 0
+        out1 = 0
+        out2 = 0
+        inout1 = 0
+        inout2 = 0
+
+        # Checks if successors are compatible
+        for successor2 in self.succ2[patt_node][1]:
+            tmp = self.G2.predecessors(successor2)
+            self.pred2[successor2] = (len(tmp), tmp)
+            tmp = self.G2.successors(successor2)
+            self.succ2[successor2] = (len(tmp), tmp)
+            if successor2 not in self.core_2:
+                for successor1 in self.succ1[src_node][1]:
+                    tmp = self.G1.predecessors(successor1)
+                    self.pred1[successor1] = (len(tmp), tmp)
+                    tmp = self.G1.successors(successor1)
+                    self.succ1[successor1] = (len(tmp), tmp)
+                    if (self.succ2[successor2][0] <= self.succ1[successor1][0]
+                            and self.pred2[successor2][0] <= self.pred1[successor1][0]
+                            and successor1 not in self.core_1):
+                        break
+                else:
+                    return False
+                # They are compatible, so update the counters of the pattern node
+                if self.pred2[successor2][1]:
+                    in2 += 1
+                if self.succ2[successor2][1]:
+                    out2 += 1
+                if not self.pred2[successor2][1] and not self.succ2[successor2][1]:
+                    inout2 += 1
+            else:
+                if self.core_2[successor2] not in self.succ1[src_node][1]:
+                    return False
+
+        # Checks if predecessors are compatible
+        for predecessor2 in self.pred2[patt_node][1]:
+            tmp = self.G2.predecessors(predecessor2)
+            self.pred2[predecessor2] = (len(tmp), tmp)
+            tmp = self.G2.successors(predecessor2)
+            self.succ2[predecessor2] = (len(tmp), tmp)
+            if predecessor2 not in self.core_2:
+                for predecessor1 in self.pred1[src_node][1]:
+                    tmp = self.G1.predecessors(predecessor1)
+                    self.pred1[predecessor1] = (len(tmp), tmp)
+                    tmp = self.G1.successors(predecessor1)
+                    self.succ1[predecessor1] = (len(tmp), tmp)
+                    if (self.pred2[predecessor2][0] <= self.pred1[predecessor1][0]
+                            and self.pred2[predecessor2][0] <= self.pred1[predecessor1][0]
+                            and predecessor1 not in self.core_1):
+                        break
+                else:
+                    return False
+                # They are compatible, so update the counters of the pattern node
+                if self.pred2[predecessor2][1]:
+                    in2 += 1
+                if self.pred2[predecessor2][1]:
+                    out2 += 1
+                if not self.pred2[predecessor2][1] and not self.pred2[predecessor2][1]:
+                    inout2 += 1
+            else:
+                if self.core_2[predecessor2] not in self.pred1[src_node][1]:
+                    return False
+
+        # Now compute the counters of the source node
+        for successor1 in self.succ1[src_node][1]:
+            if successor1 not in self.core_1:
+                tmp = self.G1.predecessors(successor1)
+                self.pred1[successor1] = (len(tmp), tmp)
+                tmp = self.G1.successors(successor1)
+                self.succ1[successor1] = (len(tmp), tmp)
+                if self.pred1[successor1][1]:
+                    in1 += 1
+                if self.succ1[successor1][1]:
+                    out1 += 1
+                if not self.pred1[successor1][1] and not self.succ1[successor1][1]:
+                    inout1 += 1
+            # For induced matches
+            #else:
+            #    if self.core_1[successor1] not in self.succ2[patt_node]:
+            #        return False
+
+        # Now compute the counters of the source node
+        for predecessor1 in self.pred1[src_node][1]:
+            if predecessor1 not in self.core_1:
+                tmp = self.G1.predecessors(predecessor1)
+                self.pred1[predecessor1] = (len(tmp), tmp)
+                tmp = self.G1.successors(predecessor1)
+                self.succ1[predecessor1] = (len(tmp), tmp)
+                if self.pred1[predecessor1][1]:
+                    in1 += 1
+                if self.pred1[predecessor1][1]:
+                    out1 += 1
+                if not self.pred1[predecessor1][1] and not self.pred1[predecessor1][1]:
+                    inout1 += 1
+            # For induced matches
+            #else:
+            #    if self.core_1[predecessor1] not in self.pred2[patt_node]:
+            #        return False
+
+        # Finally, verify if all counters satisfy the subgraph matching conditions
+        # For induced matches
+        #return in2 <= in1 and out2 <= out1 and inout2 <= inout1
+        return in2 <= in1 and out2 <= out1 and (in2 + out2 + inout2) <= (in1 + out1 + inout1)
+
+    def are_semantically_feasible(self, src_node, patt_node):
+        """
+            Determines whether the two nodes are syntactically feasible,
+            i.e., it ensures that adding this candidate pair does not make it impossible to find a total mapping.
+            @param src_node: The candidate from the source graph.
+            @param patt_node: The candidate from the pattern graph.
+            @return: True if they are semantically feasible, False otherwise.
+        """
+        #=======================================================================
+        # This feasibility check looks at the data stored in the pair of candidates.
+        # It verifies that all attribute constraints are satisfied.
+        #=======================================================================
+
+        src_node = self.G1.vs[src_node]
+        patt_node = self.G2.vs[patt_node]
+
+        # check the type information
+        exact_type_match = src_node[Himesis.Constants.FULLTYPE] == patt_node[Himesis.Constants.FULLTYPE]
+        sub_type_match = patt_node[Himesis.Constants.MT_SUBTYPE_MATCH] and \
+             src_node[Himesis.Constants.FULLTYPE] in patt_node[Himesis.Constants.MT_SUBTYPES]
+        if not (exact_type_match or sub_type_match):
+            return False
+
+        # Check for attributes value/constraint
+        for attr in patt_node.attribute_names():
+            # Ignore non-RAM attributes 
+            if not Himesis.is_RAM_attribute(attr) :
+                continue
+            # If the attribute does not "in theory" exist
+            # because igraph actually stores all attribute names in all nodes. 
+            elif patt_node[attr] == None:
+                continue
+
+            # Node patt_node has not yet been matched to src_node... however,
+            # patt_node[attr](..) is expecting a mapping of patt_node's mtLabel
+            # to src_node's index in self.G1... so we build this mapping first
+            mtLabel2graphIndexMap = {}
+            mtLabel2graphIndexMap[ patt_node[Himesis.Constants.MT_LABEL] ] = src_node.index
+
+            try:
+                if not patt_node[attr](mtLabel2graphIndexMap,self.G1):
+                    return False
+            except Exception as e:
+                #TODO: This should be a TransformationLanguageSpecificException
+                raise Exception("An error has occurred while checking the constraint of the attribute '%s' :: %s" % (attr, str(e)))
+        return True
+
+    def _match(self):
+        """
+            Extends the pattern matching mapping.
+            This method is recursively called to determine if the pattern G2
+            can be completely matched on G1.
+            @return: The mapping {pattern node index : source node index}
+        """
+        #=======================================================================
+        # It cleans up the class variables after each recursive call.
+        # If a match is found, we yield the mapping.
+        #=======================================================================
+
+        # Base condition when a complete match is found
+        if len(self.core_2) == self.G2.vcount():
+            # Save the final mapping, otherwise garbage collection deletes it
+            self.mapping = self.core_2.copy()
+            yield self.mapping
+        else:
+            for src_node, patt_node in self.candidate_pairs_iter():
+
+                # Cache the predecessors and successors of the candidate pairs on the fly 
+                self.pred1, self.succ1, self.pred2, self.succ2 = {}, {}, {}, {}
+                self.pred1[src_node] = (len(self.G1.predecessors(src_node)), self.G1.predecessors(src_node))
+                self.succ1[src_node] = (len(self.G1.successors(src_node)), self.G1.successors(src_node))
+                self.pred2[patt_node] = (len(self.G2.predecessors(patt_node)), self.G2.predecessors(patt_node))
+                self.succ2[patt_node] = (len(self.G2.successors(patt_node)), self.G2.successors(patt_node))
+
+                if self.are_compatibile(src_node, patt_node):
+                    if self.are_syntactically_feasible(src_node, patt_node):
+                        if self.are_semantically_feasible(src_node, patt_node):
+                            # Recursive call, adding the feasible state
+                            newstate = self.state.__class__(self, src_node, patt_node)
+                            for mapping in self._match():
+                                yield mapping
+
+                            # restore data structures
+                            newstate.restore()
+
+    def has_match(self, context={}):
+        """
+            Determines if the pattern graph can be matched on the source graph. 
+            @param context: Optional predefined mapping {string:uuid}.
+            @return: True if a match is found, False otherwise.
+        """
+        try:
+            self.match_iter(context).next()
+            return True
+        except StopIteration:
+            return False
+
+    def match_iter(self, context={}):
+        """
+            Iterator over matchings of the pattern graph on the source graph.
+            @param context: Optional predefined mapping {pattern node index: source node index}.
+            @return: The mapping {pattern node index : source node index}.
+        """
+        self.initialize()
+        for p in context:
+            if self.are_semantically_feasible(context[p], p):
+                self.state.__class__(self, context[p], p)
+            else:
+                # Additional constraints on the pivot nodes are not satisfied: no match is possible
+                return
+        for mapping in self._match():
+            yield mapping
+
+
+class HimesisMatcherState(object):
+    """
+        Internal representation of state for the HimesisMatcher class.
+        
+        This class is used internally by the HimesisMatcher class.  It is used
+        only to store state specific data. There will be at most V(pattern graph) of
+        these objects in memory at a time, due to the depth-first search
+        strategy employed by the VF2 algorithm.
+    """
+    def __init__(self, matcher, src_node=None, patt_node=None):
+        """
+            Internal representation of state for the HimesisMatcher class.
+            @param matcher: The HimesisMatcher object.
+            @param src_node: The source node of the candidate pair.
+            @param src_node: The pattern node of the candidate pair.
+        """
+        self.matcher = matcher
+
+        # Initialize the last stored node pair.
+        self.src_node = None
+        self.patt_node = None
+        self.depth = len(matcher.core_1)
+
+        if src_node is None or patt_node is None:
+            # Then we reset the class variables
+            matcher.core_1 = {}
+            matcher.core_2 = {}
+            matcher.in_1 = {}
+            matcher.in_2 = {}
+            matcher.out_1 = {}
+            matcher.out_2 = {}
+            matcher.inout_1 = {}
+            matcher.inout_2 = {}
+
+        # Watch out! src_node == 0 should evaluate to True.
+        if src_node is not None and patt_node is not None:
+            # Add the node pair to the isomorphism mapping.
+            matcher.core_1[src_node] = patt_node
+            matcher.core_2[patt_node] = src_node
+
+            # Store the node that was added last.
+            self.src_node = src_node
+            self.patt_node = patt_node
+
+            # Now we must update the other four vectors.
+            # We will add only if it is not in there already!
+            self.depth = len(matcher.core_1)
+
+            # First we add the new nodes...
+            for vector in (matcher.in_1, matcher.out_1, matcher.inout_1):
+                if src_node not in vector:
+                    vector[src_node] = self.depth
+            for vector in (matcher.in_2, matcher.out_2, matcher.inout_2):
+                if patt_node not in vector:
+                    vector[patt_node] = self.depth
+
+            # Now we add every other node...
+
+            # Updates for T_1^{in}
+            new_nodes_in = []
+            for node in matcher.core_1:
+                n = [predecessor for predecessor in matcher.G1.predecessors(node)
+                     if predecessor not in matcher.core_1 and predecessor not in new_nodes_in]
+                new_nodes_in += n
+            for node in new_nodes_in:
+                if node not in matcher.in_1:
+                    matcher.in_1[node] = self.depth
+
+            # Updates for T_1^{out}
+            new_nodes_out = []
+            for node in matcher.core_1:
+                n = [successor for successor in matcher.G1.successors(node)
+                     if successor not in matcher.core_1 and successor not in new_nodes_out]
+                new_nodes_out += n
+            for node in new_nodes_out:
+                if node not in matcher.out_1:
+                    matcher.out_1[node] = self.depth
+
+            # Updates for T_1^{inout}
+            for node in set(list(matcher.in_1.keys()) + list(matcher.out_1.keys())):
+                if node in matcher.out_1 and node in matcher.in_1 and node not in matcher.inout_1:
+                    matcher.inout_1[node] = self.depth
+
+            # Updates for T_2^{in}
+            new_nodes_in = []
+            for node in matcher.core_2:
+                n = [predecessor for predecessor in matcher.G2.predecessors(node)
+                     if predecessor not in matcher.core_2 and predecessor not in new_nodes_in]
+                new_nodes_in += n
+            for node in new_nodes_in:
+                if node not in matcher.in_2:
+                    matcher.in_2[node] = self.depth
+
+            # Updates for T_2^{out}
+            new_nodes_out = []
+            for node in matcher.core_2:
+                n = [successor for successor in matcher.G2.successors(node)
+                     if successor not in matcher.core_2 and successor not in new_nodes_out]
+                new_nodes_out += n
+            for node in new_nodes_out:
+                if node not in matcher.out_2:
+                    matcher.out_2[node] = self.depth
+
+            # Updates for T_2^{inout}
+            for node in set(list(matcher.in_2.keys()) + list(matcher.out_2.keys())):
+                if node in matcher.out_2 and node in matcher.in_2 and node not in matcher.inout_2:
+                    matcher.inout_2[node] = self.depth
+
+    def restore(self):
+        """
+            Deletes the HimesisMatcherState object and restores the class variables.
+        """
+
+        # First we remove the node that was added from the core vectors.
+        # Watch out! src_node == 0 should evaluate to True.
+        if self.src_node is not None and self.patt_node is not None:
+            del self.matcher.core_1[self.src_node]
+            del self.matcher.core_2[self.patt_node]
+
+        # Now we revert the other four vectors.        
+        # Thus, we delete all entries which have this depth level.
+        for vector in (self.matcher.in_1, self.matcher.in_2, self.matcher.out_1, self.matcher.out_2, self.matcher.inout_1, self.matcher.inout_2):
+            for node in list(vector.keys()):
+                if vector[node] == self.depth:
+                    del vector[node]
+
+
+class VF2(HimesisMatcher):
+    """
+        The native VF2 algorithm for subgraph isomorphism.
+    """
+    def __init__(self, G1, G2):
+        """
+            The native VF2 algorithm for subgraph isomorphism.
+            @param G1: The bigger graph.
+            @param G2: The smaller graph. 
+        """
+        HimesisMatcher.__init__(self, G1, G2)
+
+    def match_iter(self):
+        """
+            Iterator over mappings of G2 on a subgraph of G1.
+            @return: The mapping {pattern node uuid : source node uuid}.
+        """
+        for mapping in self.G1.get_subisomorphisms_vf2(self.G2):
+            # mapping is a list for which mapping[i] is the source node index mapped to the pattern node index i
+            # So we need to convert it into a dictionary  
+            match = {}
+            for pattern_node, src_node in enumerate(mapping):
+                match[pattern_node] = src_node
+            yield match
+
+
+class SubgraphIsoMatcher(HimesisMatcher):
+    """
+        The VF2 algorithm for subgraph isomorphism as implemented in HimesisMatcher.
+        Basically this is the same as HimesisMatcher but no node data is taken into consideration. 
+    """
+    def __init__(self, source_graph, pattern_graph, priority=Priority()):
+        """
+            The VF2 algorithm for subgraph isomorphism as implemented in HimesisMatcher.
+            Basically this is the same as HimesisMatcher but no node data is taken into consideration. 
+        """
+        HimesisMatcher.__init__(self, source_graph, pattern_graph, priority)
+
+    def are_compatibile(self, src_node, patt_node):
+        """
+            Verifies if a candidate pair is compatible.
+            More specifically, verify degree compatibility.
+            @param src_node: The candidate from the source graph.
+            @param patt_node: The candidate from the pattern graph.
+        """
+
+        return (self.pred2[patt_node][0] <= self.pred1[src_node][0]
+                and self.succ2[patt_node][0] <= self.succ1[src_node][0])
+
+    def are_semantically_feasible(self, sourceNode, patternNode):
+        """
+            Since no data is considered, the graphs have no semantics.
+            @param src_node: The candidate from the source graph.
+            @param patt_node: The candidate from the pattern graph.
+            @return: True always. 
+        """
+        return True

+ 0 - 0
rules/__init__.py


+ 81 - 0
rules/arule.py

@@ -0,0 +1,81 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..tcore.composer import Composer
+from ..tcore.matcher import Matcher
+from ..tcore.iterator import Iterator
+from ..tcore.rewriter import Rewriter
+from ..tcore.resolver import Resolver
+
+class ARule(Composer):
+    '''
+        Applies the transformation on one match.
+    '''
+    def __init__(self, LHS, RHS,sendAndApplyDeltaFunc):
+        '''
+            Applies the transformation on one match.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+        '''
+        super(ARule, self).__init__()
+        self.M = Matcher(condition=LHS, max=1)
+        self.I = Iterator(max_iterations=1)
+        self.W = Rewriter(condition=RHS,sendAndApplyDeltaFunc=sendAndApplyDeltaFunc)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+
+        # Choose the only match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        # Rewrite
+        packet = self.W.packet_in(packet)
+        if not self.W.is_success:
+            self.exception = self.W.exception
+            return packet
+
+        # Output success packet
+        self.is_success = True
+        return packet
+
+
+class ARule_r(ARule):
+    '''
+        Applies the transformation on one match.
+    '''
+    def __init__(self, LHS, RHS, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies the transformation on one match.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this ARule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(ARule_r, self).__init__(LHS, RHS)
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        packet = super(ARule_r, self).packet_in(packet)
+        # is_success is True
+        if self.exception is None:
+            # Resolve any conflicts if necessary
+            packet = self.R.packet_in(packet)
+            if not self.R.is_success:
+                self.exception = self.R.exception
+                return packet
+            # Output success packet
+        else:
+            self.is_success = False
+        return packet

+ 45 - 0
rules/brule.py

@@ -0,0 +1,45 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..tcore.composer import Composer
+from ..util.seeded_random import Random
+
+
+class BRule(Composer):
+    '''
+        Selects a branch in which the matcher succeeds.
+    '''
+    def __init__(self, branches):
+        '''
+            Selects a branch in which the matcher succeeds.
+            @param branches: A list of ARules.
+        '''
+        super(BRule, self).__init__()
+        self.branches = branches
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        remaining_branches = list(range(len(self.branches)))
+        ''' hergin motif-integration ::: clone commented in observance of not need
+             report bugs if have '''
+        #original = packet.clone()
+        # Success on the first branch that is in success
+        while len(remaining_branches) > 0:
+            branch_no = Random.choice(remaining_branches)
+            branch = self.branches[branch_no]
+            packet = branch.packet_in(packet)
+            if not branch.is_success:
+                if branch.exception is not None:
+                    self.exception = branch.exception
+                    break
+                else:
+                    # Ignore this branch for next try
+                    remaining_branches.remove(branch_no)
+                    #packet = original.clone()
+            else:
+                self.is_success = True
+                break
+        return packet
+        

+ 39 - 0
rules/bsrule.py

@@ -0,0 +1,39 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from ..tcore.composer import Composer
+from .brule import BRule
+
+
+class BSRule(Composer):
+    '''
+        Selects a branch in which the matcher succeeds, as long as matches can be found.
+    '''
+    def __init__(self, branches, max_iterations=INFINITY):
+        '''
+            Selects a branch in which the matcher succeeds, as long as matches can be found.
+            @param branches: A list of ARules.
+            @param max_iterations: The maximum number of times to apply the transformation.
+        '''
+        super(BSRule, self).__init__()
+        self.brule = BRule(branches)
+        self.max_iterations = max_iterations
+        self.iterations = 0
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+
+        while self.iterations < self.max_iterations:
+            # Re-apply the BRule
+            packet = self.brule.packet_in(packet)
+            if not self.brule.is_success:
+                self.exception = self.brule.exception
+                return packet
+            else:
+                self.is_success = True
+            self.iterations += 1
+
+        return packet

+ 102 - 0
rules/crule.py

@@ -0,0 +1,102 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..tcore.composer import Composer
+from ...tconstants import TConstants as TC
+
+class CRule(Composer):
+
+    def __init__(self,motifContext):
+        self._mtContexts = []
+        self._mtContexts.append(motifContext)
+        self.cruleEnd = False
+        self.packet = None
+
+    def packet_in(self, packet):
+
+        self.cruleEnd = False
+        self._mtContexts[-1]._expired=False
+        self._mtContexts[-1]._lastStep={}
+
+        self.exception = None
+        self.is_success = False
+        self.packet = packet
+        while not self.cruleEnd:
+            result = self._step()
+            self.is_success = result if result != None else False
+
+        return self.packet
+
+
+    def _nextRule(self) :
+        mtc = self._mtContexts[-1]
+
+        ns = mtc.nextStep()
+
+        if 'id' in ns :
+
+            fulltype = mtc.t['nodes'][ns['id']]['$type']
+
+            if fulltype == mtc.metamodel+"CRule":
+
+                #self._loadTransform(ns['rule'])
+
+                return self._nextRule()
+
+            elif fulltype.startswith('/Formalisms/__Transformations__/Transformation/T-Core') or \
+                    fulltype.startswith('/Formalisms/__Transformations__/Transformation/MoTif'):
+
+                return ns
+
+        elif 'trafoResult' in ns:
+            return ns;
+
+    def _step(self) :
+
+        def runRule(r) :
+
+            ar = r['rule']
+
+            self.packet = ar.packet_in(self.packet)
+
+            if ar.is_success :
+                return (self.packet.deltas,TC.SUCCEEDED)
+            elif not ar.is_success :
+                if ar.exception :
+                    return (str(ar.exception),TC.EXCEPTION)
+                else :
+                    return (None,TC.FAILED)
+
+        try :
+            nr = self._nextRule()
+        except Exception :
+            self.cruleEnd = True
+            return
+
+        if 'trafoResult' in nr:
+            self.cruleEnd = True
+            return nr['trafoResult']
+
+        if nr.__class__ != {}.__class__ :
+            self.cruleEnd = True
+            return
+
+        elif '$err' in nr :
+            self.cruleEnd = True
+            return
+
+        else :
+            (res,ai) = runRule(nr)
+
+            self._mtContexts[-1].setLastStepApplicationInfo(ai)
+
+            if ai == TC.FAILED :
+                pass
+                #self._aswPrintReq(TC.RULE_FAILURE_MSG+" ("+self._mtContexts[-1]._lastStep['alias']+":"+self._mtContexts[-1]._lastStep['name']+")")
+            elif ai == TC.EXCEPTION :
+                pass
+                #self._aswPrintReq(TC.RULE_EXCEPTION_MSG + res)
+            else :
+                return True
+                #self._mtContexts[-1].setLastStepFeedbackReceived()

+ 110 - 0
rules/frule.py

@@ -0,0 +1,110 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .arule import ARule
+from ..tcore.resolver import Resolver
+
+
+class FRule(ARule):
+    '''
+        Applies the transformation on all matches found.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY,sendAndApplyDeltaFunc=None):
+        '''
+            Applies the transformation on all matches found.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the rule.
+        '''
+        super(FRule, self).__init__(LHS, RHS,sendAndApplyDeltaFunc)
+        # Matcher needs to find many matches
+        self.M.max = max_iterations
+        self.I.max_iterations = max_iterations
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                self.exception = self.W.exception
+                return packet
+
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                else:
+                    # Output success packet
+                    self.is_success = True
+                return packet
+
+
+class FRule_r(ARule):
+    '''
+        Applies the transformation on all matches found.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies the transformation on all matches found.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the rule.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this FRule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(FRule_r, self).__init__(LHS, RHS, max_iterations)
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                self.exception = self.W.exception
+                return packet
+            # Resolve any conflicts if necessary
+            packet = self.R.packet_in(packet)
+            if not self.R.is_success:
+                self.exception = self.R.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                else:
+                    # Output success packet
+                    self.is_success = True
+                return packet
+

+ 89 - 0
rules/lfrule.py

@@ -0,0 +1,89 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .lrule import LRule
+from ..tcore.rewriter import Rewriter
+from ..tcore.resolver import Resolver
+
+class LFRule(LRule):
+    '''
+        Applies an inner rule for each application of the outer rule.
+    '''
+    def __init__(self, LHS, RHS, inner_rule, outer_first, sendAndApplyDeltaFunc, max_iterations=INFINITY, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies an inner rule for each application of the outer rule.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param inner_rule: The rule to apply in the loop.
+            @param outer_first: Whether the outer rule should be applied before the inner rule. 
+            @param max_iterations: The maximum number of matches of the LHS.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this FRule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(LFRule, self).__init__(LHS, inner_rule, max_iterations)
+        self.W = Rewriter(condition=RHS,sendAndApplyDeltaFunc=sendAndApplyDeltaFunc)
+        self.outer_first = outer_first
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+            if self.outer_first:
+                # Rewrite
+                packet = self.W.packet_in(packet)
+                if not self.W.is_success:
+                    self.exception = self.W.exception
+                    return packet
+
+
+                # Resolve any conflicts if necessary
+                ''' hergin :: motif-integration commented '''
+                #packet = self.R.packet_in(packet)
+                #if not self.R.is_success:
+                #    self.exception = self.R.exception
+                #    return packet
+            # Apply the inner rule
+            packet = self.inner_rule.packet_in(packet)
+            if not self.inner_rule.is_success:
+                self.exception = self.inner_rule.exception
+                return packet
+
+            if not self.outer_first:
+                # Rewrite
+                packet = self.W.packet_in(packet)
+                if not self.W.is_success:
+                    self.exception = self.W.exception
+                    return packet
+
+
+                # Resolve any conflicts if necessary
+                ''' hergin :: motif-integration commented '''
+                #packet = self.R.packet_in(packet)
+                #if not self.R.is_success:
+                #    self.exception = self.R.exception
+                #    return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                else:
+                    # Output success packet
+                    self.is_success = True
+                return packet

+ 69 - 0
rules/lqsrule.py

@@ -0,0 +1,69 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .lrule import LRule
+
+class LQSRule(LRule):
+    '''
+        Applies an inner rule for each match of the LHS as long as matches can be found.
+    '''
+    def __init__(self, LHS, inner_rule, max_iterations=INFINITY):
+        '''
+            Applies an inner rule for each match of the LHS as long as matches can be found.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param inner_rule: The rule to apply in the loop.
+            @param max_iterations: The maximum number of matches of the LHS.
+        '''
+        super(LQSRule, self).__init__(LHS, inner_rule, max_iterations)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+            # Apply the inner rule
+            packet = self.inner_rule.packet_in(packet)
+            if not self.inner_rule.is_success:
+                if self.inner_rule.exception:
+                    self.exception = self.inner_rule.exception
+                return packet
+            # Rule has been applied once, so it's a success anyway
+            self.is_success = True
+
+
+            if self.I.iterations == self.I.max_iterations:
+                return packet
+
+            ''' hergin :: motif-integration : clean Matchset before rematch because only LHS doesnot have a rewriter '''
+            #packet.match_sets = {}
+            #try:
+            #    if  len(packet.match_sets[self.I.condition].matches) == 0:
+            #        del packet.match_sets[self.I.condition]
+            #except KeyError:
+            #    pass
+
+            # Re-Match
+            packet = self.M.packet_in(packet)
+            if not self.M.is_success:
+                self.exception = self.M.exception
+                return packet
+
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                return packet

+ 62 - 0
rules/lrule.py

@@ -0,0 +1,62 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from ..tcore.composer import Composer
+from ..tcore.matcher import Matcher
+from ..tcore.iterator import Iterator
+
+
+class LRule(Composer):
+    '''
+        Applies an inner rule for each match of the LHS.
+    '''
+    def __init__(self, LHS, inner_rule, max_iterations=INFINITY):
+        '''
+            Applies an inner rule for each match of the LHS.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param inner_rule: The rule to apply in the loop.
+            @param max_iterations: The maximum number of matches of the LHS.
+        '''
+        super(LRule, self).__init__()
+        self.M = Matcher(condition=LHS, max=max_iterations)
+        self.I = Iterator(max_iterations=max_iterations)
+        self.inner_rule = inner_rule
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+            # Apply the inner rule
+            packet = self.inner_rule.packet_in(packet)
+            if not self.inner_rule.is_success:
+                if self.inner_rule.exception:
+                    self.exception = self.inner_rule.exception
+                return packet
+
+
+            # Clean the packet: required since there is no Rewriter in a Query
+            if  len(packet.match_sets[self.I.condition].matches) == 0:
+                del packet.match_sets[self.I.condition]
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                else:
+                    # Output success packet
+                    self.is_success = True
+                return packet

+ 156 - 0
rules/lsrule.py

@@ -0,0 +1,156 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .lrule import LRule
+from ..tcore.rewriter import Rewriter
+from ..tcore.resolver import Resolver
+
+
+class LSRule(LRule):
+    '''
+        Applies an inner rule for each application of the outer rule as long as matches can be found.
+    '''
+    def __init__(self, LHS, RHS, inner_rule, outer_first, sendAndApplyDeltaFunc, max_iterations=INFINITY):
+        '''
+            Applies an inner rule for each application of the outer rule as long as matches can be found.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param inner_rule: The rule to apply in the loop.
+            @param outer_first: Whether the outer rule should be applied before the inner rule.
+            @param max_iterations: The maximum number of matches of the LHS.
+        '''
+        super(LSRule, self).__init__(LHS, inner_rule, max_iterations)
+        self.W = Rewriter(condition=RHS,sendAndApplyDeltaFunc=sendAndApplyDeltaFunc)
+        self.outer_first = outer_first
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+            if self.outer_first:
+                # Rewrite
+                packet = self.W.packet_in(packet)
+                if not self.W.is_success:
+                    self.exception = self.W.exception
+                    return packet
+
+            # Apply the inner rule
+            packet = self.inner_rule.packet_in(packet)
+            if not self.inner_rule.is_success:
+                self.exception = self.inner_rule.exception
+                return packet
+
+            if not self.outer_first:
+                # Rewrite
+                packet = self.W.packet_in(packet)
+                if not self.W.is_success:
+                    self.exception = self.W.exception
+                    return packet
+            # Rule has been applied once, so it's a success anyway
+            self.is_success = True
+            if self.I.iterations == self.I.max_iterations:
+                return packet
+            # Re-Match
+            packet = self.M.packet_in(packet)
+            if not self.M.is_success:
+                self.exception = self.M.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                return packet
+
+
+
+class LSRule_r(LSRule):
+    '''
+        Applies an inner rule for each application of the outer rule as long as matches can be found.
+    '''
+    def __init__(self, LHS, RHS, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies an inner rule for each application of the outer rule as long as matches can be found.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param inner_rule: The rule to apply in the loop.
+            @param outer_first: Whether the outer rule should be applied before the inner rule. 
+            @param max_iterations: The maximum number of matches of the LHS.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this FRule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(LSRule_r, self).__init__()
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+        while True:
+            if self.outer_first:
+                # Rewrite
+                packet = self.W.packet_in(packet)
+                if not self.W.is_success:
+                    self.exception = self.W.exception
+                    return packet
+                # Resolve any conflicts if necessary
+                packet = self.R.packet_in(packet)
+                if not self.R.is_success:
+                    self.exception = self.R.exception
+                    return packet
+            # Apply the inner rule
+            packet = self.inner_rule.packet_in(packet)
+            if not self.inner_rule.is_success:
+                self.exception = self.inner_rule.exception
+                return packet
+            if not self.outer_first:
+                # Rewrite
+                packet = self.W.packet_in(packet)
+                if not self.W.is_success:
+                    self.exception = self.W.exception
+                    return packet
+                # Resolve any conflicts if necessary
+                packet = self.R.packet_in(packet)
+                if not self.R.is_success:
+                    self.exception = self.R.exception
+                    return packet
+            # Rule has been applied once, so it's a success anyway
+            self.is_success = True
+            if self.I.iterations == self.I.max_iterations:
+                return packet
+            # Re-Match
+            packet = self.M.packet_in(packet)
+            if not self.M.is_success:
+                self.exception = self.M.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                return packet

+ 54 - 0
rules/ndarule.py

@@ -0,0 +1,54 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..tcore.composer import Composer
+from ..tcore.matcher import Matcher
+from ..tcore.iterator import Iterator
+from ..tcore.rewriter import Rewriter
+from ..tcore.resolver import Resolver
+
+
+class NDARule(Composer):
+    '''
+        Applies the transformation on one match.
+    '''
+    def __init__(self, LHS, RHS, rng, sendAndApplyDeltaFunc,ignore_resolver=False, external_matches_only=False,
+                 custom_resolution=lambda packet: False):
+        '''
+            Applies the transformation on one match.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param ignore_resolver: Specifies whether or not a resolver is needed.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this ARule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(NDARule, self).__init__()
+        self.ignore_resolver = ignore_resolver
+        self.M = Matcher(condition=LHS)
+        self.I = Iterator(max_iterations=1, rng=rng)
+        self.W = Rewriter(condition=RHS,sendAndApplyDeltaFunc=sendAndApplyDeltaFunc)
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the only match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+        # Rewrite
+        packet = self.W.packet_in(packet)
+        if not self.W.is_success:
+            self.exception = self.W.exception
+            return packet
+        # Output success packet
+        self.is_success = True
+        return packet

+ 157 - 0
rules/query.py

@@ -0,0 +1,157 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..tcore.composer import Composer
+from ..tcore.matcher import Matcher
+from ..tcore.iterator import Iterator
+
+
+class Query(Composer):
+    '''
+        Finds a match for the LHS.
+    '''
+    def __init__(self, LHS):
+        '''
+            Finds a match for the LHS.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+        '''
+        super(Query, self).__init__()
+        self.M = Matcher(condition=LHS, max=1)
+        self.I = Iterator(max_iterations=1)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the only match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            # Clean the packet: required since there is no Rewriter in a Query
+            if  len(packet.match_sets[self.I.condition].matches) == 0:
+                del packet.match_sets[self.I.condition]
+            self.exception = self.I.exception
+            return packet
+        # Output success packet
+        self.is_success = True
+        return packet
+
+class CQuery2(Composer):
+    '''
+        Finds a match for the LHS.
+    '''
+    def __init__(self, LHS, innerQuery):
+        '''
+            Finds a match for the LHS.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+        '''
+        super(CQuery2, self).__init__()
+        self.M = Matcher(condition=LHS)
+        self.I = Iterator()
+        self.innerQuery=innerQuery
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+
+            packet = self.innerQuery.packet_in(packet)
+            if self.innerQuery.is_success:
+                if self.innerQuery.exception:
+                    self.exception = self.innerQuery.exception
+                    return packet
+
+                # Choose another match
+                packet = self.I.next_in(packet)
+                # No more iterations are left
+                if not self.I.is_success:
+                    if self.I.exception:
+                        self.exception = self.I.exception
+                    else:
+                        # Output success packet
+                        self.is_success = False
+                    return packet
+            else:
+                self.is_success=True
+                return packet
+
+class CQuery3(Composer):
+    '''
+        Finds a match for the LHS.
+    '''
+    def __init__(self, LHS, innerQuery, secondInnerQuery):
+        '''
+            Finds a match for the LHS.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+        '''
+        super(CQuery3, self).__init__()
+        self.M = Matcher(condition=LHS)
+        self.I = Iterator()
+        self.innerQuery=innerQuery
+        self.secondInnerQuery=secondInnerQuery
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+
+            packet = self.innerQuery.packet_in(packet)
+            if self.innerQuery.is_success:
+                if self.innerQuery.exception:
+                    self.exception = self.innerQuery.exception
+                    return packet
+
+                # Choose another match
+                packet = self.I.next_in(packet)
+                # No more iterations are left
+                if not self.I.is_success:
+                    if self.I.exception:
+                        self.exception = self.I.exception
+                    else:
+                        self.is_success = False
+                    return packet
+            else:
+
+                packet = self.secondInnerQuery.packet_in(packet)
+                if self.secondInnerQuery.is_success:
+                    if self.secondInnerQuery.exception:
+                        self.exception = self.secondInnerQuery.exception
+                        return packet
+                    packet = self.I.next_in(packet)
+                    if not self.I.is_success:
+                        if self.I.exception:
+                            self.exception = self.I.exception
+                        else:
+                            self.is_success = False
+                        return packet
+                else:
+                    self.is_success=True
+                    return packet

+ 31 - 0
rules/sequence.py

@@ -0,0 +1,31 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..tcore.composer import Composer
+
+
+class Sequence(Composer):
+    '''
+        Applies each rule in the order provided.
+    '''
+    def __init__(self, rules):
+        '''
+            Applies each rule in the order provided.
+            @param rules: The rules to apply.
+        '''
+        super(Sequence, self).__init__()
+        self.rules = rules
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        for rule in self.rules:
+            packet = rule.packet_in(packet)
+            packet.clean()
+            if not rule.is_success:
+                if rule.exception is not None:
+                    self.exception = rule.exception
+                return packet
+        self.is_success = True
+        return packet

+ 119 - 0
rules/srule.py

@@ -0,0 +1,119 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .arule import ARule
+from ..tcore.resolver import Resolver
+
+
+class SRule(ARule):
+    '''
+        Applies the transformation as long as matches can be found.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY,sendAndApplyDeltaFunc=None):
+        '''
+            Applies the transformation as long as matches can be found.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the transformation.
+        '''
+        super(SRule, self).__init__(LHS, RHS,sendAndApplyDeltaFunc)
+        self.I.max_iterations = max_iterations
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                self.exception = self.W.exception
+                return packet
+
+            # Rule has been applied once, so it's a success anyway
+            self.is_success = True
+            if self.I.iterations == self.I.max_iterations:
+                return packet
+            # Re-Match
+            packet = self.M.packet_in(packet)
+            if not self.M.is_success:
+                self.exception = self.M.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                return packet
+
+
+class SRule_r(SRule):
+    '''
+        Applies the transformation on one match.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies the transformation as long as matches can be found.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the transformation.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this SRule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(SRule_r, self).__init__(LHS, RHS, max_iterations)
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            self.exception = self.M.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            self.exception = self.I.exception
+            return packet
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                self.exception = self.W.exception
+                return packet
+            # Resolve any conflicts if necessary
+            packet = self.R.packet_in(packet)
+            if not self.R.is_success:
+                self.exception = self.R.exception
+                return packet
+            # Rule has been applied once, so it's a success anyway
+            self.is_success = True
+            if self.I.iterations == self.I.max_iterations:
+                return packet
+            # Re-Match
+            packet = self.M.packet_in(packet)
+            if not self.M.is_success:
+                self.exception = self.M.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    self.exception = self.I.exception
+                return packet

+ 159 - 0
rules/xfrule.py

@@ -0,0 +1,159 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .frule import FRule
+from ..tcore.rollbacker import Rollbacker
+from ..tcore.resolver import Resolver
+
+
+class XFRule(FRule):
+    '''
+        Applies the transformation on all matches found with roll-back capability.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY):
+        '''
+            Applies the transformation on all matches found with roll-back capability.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the rule.
+        '''
+        super(XFRule, self).__init__(LHS, RHS, max_iterations)
+        # max_iterations=1 because no all matches have been exhausted after first application
+        self.B = Rollbacker(condition=LHS, max_iterations=1)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Checkpoint the original packet
+        self.B.packet_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+            return packet
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            packet = self.B.restore(packet)
+            if self.M.exception:
+                self.exception = self.M.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            packet = self.B.restore(packet)
+            if self.I.exception:
+                self.exception = self.I.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                packet = self.B.restore(packet)
+                if self.W.exception:
+                    self.exception = self.W.exception
+                elif self.B.exception:
+                    self.exception = self.B.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    packet = self.B.restore(packet)
+                    if self.B.exception:
+                        self.exception = self.B.exception
+                    self.exception = self.I.exception
+                    self.is_success = False
+                else:
+                    # Output success packet
+                    self.is_success = True
+                return packet
+
+    def next_in(self, packet):
+        # Only one roll-back
+        self.exception = None
+        self.is_success = False
+        packet = self.B.next_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+        return packet
+
+
+class XFRule_r(XFRule):
+    '''
+        Applies the transformation on one match.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies the transformation on all matches found with roll-back capability.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the rule.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this FRule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(XFRule_r, self).__init__(LHS, RHS, max_iterations)
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Checkpoint the original packet
+        self.B.packet_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+            return packet
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            packet = self.B.restore(packet)
+            if self.M.exception:
+                self.exception = self.M.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            packet = self.B.restore(packet)
+            if self.I.exception:
+                self.exception = self.I.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                packet = self.B.restore(packet)
+                if self.W.exception:
+                    self.exception = self.W.exception
+                elif self.B.exception:
+                    self.exception = self.B.exception
+                return packet
+            # Resolve any conflicts if necessary
+            packet = self.R.packet_in(packet)
+            if not self.R.is_success:
+                self.exception = self.R.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    packet = self.B.restore(packet)
+                    if self.B.exception:
+                        self.exception = self.B.exception
+                    self.exception = self.I.exception
+                    self.is_success = False
+                else:
+                    # Output success packet
+                    self.is_success = True
+                return packet
+

+ 142 - 0
rules/xrule.py

@@ -0,0 +1,142 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .arule import ARule
+from ..tcore.rollbacker import Rollbacker
+from ..tcore.resolver import Resolver
+
+
+class XRule(ARule):
+    '''
+        Applies the transformation on one match with roll-back capability.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY):
+        '''
+            Applies the transformation on one match with roll-back capability.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the transformation.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+        '''
+        # external_matches_only=True because further matches of this rule are only processed after a roll-back
+        super(XRule, self).__init__(LHS, RHS)
+        self.M.max = max_iterations
+        self.I.max_iterations = max_iterations
+        self.B = Rollbacker(condition=LHS, max_iterations=max_iterations)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Checkpoint the original packet
+        self.B.packet_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+            return packet
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            packet = self.B.restore(packet)
+            if self.M.exception:
+                self.exception = self.M.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Choose one match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            packet = self.B.restore(packet)
+            if self.I.exception:
+                self.exception = self.I.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Rewrite
+        packet = self.W.packet_in(packet)
+        if not self.W.is_success:
+            packet = self.B.restore(packet)
+            if self.W.exception:
+                self.exception = self.W.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        self.is_success = True
+        return packet
+
+    def next_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        packet = self.B.next_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+            return packet
+        # Choose the next match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            packet = self.B.next_in(packet)
+            if self.I.exception:
+                self.exception = self.I.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Rewrite
+        packet = self.W.packet_in(packet)
+        if not self.W.is_success:
+            packet = self.B.next_in(packet)
+            if self.W.exception:
+                self.exception = self.W.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Output success packet
+        self.is_success = True
+        return packet
+
+
+class XRule_r(XRule):
+    '''
+        Applies the transformation on one match with roll-back capability.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies the transformation on one match with roll-back capability.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the transformation.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this ARule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(XRule_r, self).__init__(LHS, RHS, max_iterations)
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        packet = super(XRule_r, self).packet_in(packet)
+        # is_success is True
+        if self.exception is None:
+            # Resolve any conflicts if necessary
+            packet = self.R.packet_in(packet)
+            if not self.R.is_success:
+                self.exception = self.R.exception
+                return packet
+            # Output success packet
+        else:
+            self.is_success = False
+        return packet
+
+    def next_in(self, packet):
+        packet = super(XRule_r, self).next_in(packet)
+        # is_success is True
+        if self.exception is None:
+            # Resolve any conflicts if necessary
+            packet = self.R.packet_in(packet)
+            if not self.R.is_success:
+                self.exception = self.R.exception
+                return packet
+            # Output success packet
+        else:
+            self.is_success = False
+        return packet
+

+ 170 - 0
rules/xsrule.py

@@ -0,0 +1,170 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.infinity import INFINITY
+from .srule import SRule
+from ..tcore.rollbacker import Rollbacker
+from ..tcore.resolver import Resolver
+
+
+class XSRule(SRule):
+    '''
+        Applies the transformation as long as matches can be found with roll-back capability.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY):
+        '''
+            Applies the transformation on all matches found with roll-back capability.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to match.
+        '''
+        super(XSRule, self).__init__(LHS, RHS, max_iterations)
+        # max_iterations=1 because no all matches have been exhausted after first application
+        self.B = Rollbacker(condition=LHS, max_iterations=1)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Checkpoint the original packet
+        self.B.packet_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+            return packet
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            packet = self.B.restore()
+            if self.M.exception:
+                self.exception = self.M.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            packet = self.B.restore()
+            if self.I.exception:
+                self.exception = self.I.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                packet = self.B.restore()
+                if self.W.exception:
+                    self.exception = self.W.exception
+                elif self.B.exception:
+                    self.exception = self.B.exception
+                return packet
+            # Rule has been applied once, so it's a success anyway
+            self.is_success = True
+            if self.I.iterations == self.I.max_iterations:
+                return packet
+            # Re-Match
+            packet = self.M.packet_in(packet)
+            if not self.M.is_success:
+                self.exception = self.M.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    packet = self.B.restore()
+                    if self.B.exception:
+                        self.exception = self.B.exception
+                    self.exception = self.I.exception
+                    self.is_success = False
+                return packet
+
+    def next_in(self, packet):
+        # Only one roll-back
+        self.exception = None
+        self.is_success = False
+        packet = self.B.next_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+        return packet
+
+
+class XSRule_r(XSRule):
+    '''
+        Applies the transformation as long as matches can be found with roll-back capability.
+    '''
+    def __init__(self, LHS, RHS, max_iterations=INFINITY, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Applies the transformation as long as matches can be found with roll-back capability.
+            @param LHS: The pre-condition pattern (LHS + NACs).
+            @param RHS: The post-condition pattern (RHS).
+            @param max_iterations: The maximum number of times to apply the rule.
+            @param external_matches_only: Resolve conflicts ignoring the matches found in this FRule.
+            @param custom_resolution: Override the default resolution function.
+        '''
+        super(XSRule_r, self).__init__(LHS, RHS, max_iterations)
+        self.R = Resolver(external_matches_only=external_matches_only,
+                          custom_resolution=custom_resolution)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        # Checkpoint the original packet
+        self.B.packet_in(packet)
+        if not self.B.is_success:
+            self.exception = self.B.exception
+            return packet
+        # Match
+        packet = self.M.packet_in(packet)
+        if not self.M.is_success:
+            packet = self.B.restore()
+            if self.M.exception:
+                self.exception = self.M.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        # Choose the first match
+        packet = self.I.packet_in(packet)
+        if not self.I.is_success:
+            packet = self.B.restore()
+            if self.I.exception:
+                self.exception = self.I.exception
+            elif self.B.exception:
+                self.exception = self.B.exception
+            return packet
+        while True:
+            # Rewrite
+            packet = self.W.packet_in(packet)
+            if not self.W.is_success:
+                packet = self.B.restore()
+                if self.W.exception:
+                    self.exception = self.W.exception
+                elif self.B.exception:
+                    self.exception = self.B.exception
+                return packet
+            # Resolve any conflicts if necessary
+            packet = self.R.packet_in(packet)
+            if not self.R.is_success:
+                self.exception = self.R.exception
+                return packet
+            # Rule has been applied once, so it's a success anyway
+            self.is_success = True
+            if self.I.iterations == self.I.max_iterations:
+                return packet
+            # Re-Match
+            packet = self.M.packet_in(packet)
+            if not self.M.is_success:
+                self.exception = self.M.exception
+                return packet
+            # Choose another match
+            packet = self.I.next_in(packet)
+            # No more iterations are left
+            if not self.I.is_success:
+                if self.I.exception:
+                    packet = self.B.restore()
+                    if self.B.exception:
+                        self.exception = self.B.exception
+                    self.exception = self.I.exception
+                    self.is_success = False
+                return packet

+ 0 - 0
tcore/__init__.py


+ 24 - 0
tcore/composer.py

@@ -0,0 +1,24 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from .composite_primitive import CompositePrimitive
+
+
+class Composer(CompositePrimitive):
+    '''
+        Encapsulates T-Core primitives.
+        Both packet_in & next_in methods must be overridden to provide meaningful behaviour. 
+    '''
+    def __init__(self):
+        '''
+            Encapsulates T-Core primitives.
+            Both packet_in & next_in methods must be overridden to provide meaningful behaviour. 
+        '''
+        super(Composer, self).__init__()
+
+    def packet_in(self, packet):
+        return packet
+
+    def next_in(self, packet):
+        return packet

+ 16 - 0
tcore/composite_primitive.py

@@ -0,0 +1,16 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from .primitive import Primitive
+
+# Abstract class
+class CompositePrimitive(Primitive):
+    def __init__(self):
+        super(CompositePrimitive, self).__init__()
+
+    def packet_in(self, packet):
+        raise AttributeError('Method not implemented')
+
+    def next_in(self, packet):
+        raise AttributeError('Method not implemented')

+ 22 - 0
tcore/control_primitive.py

@@ -0,0 +1,22 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from .primitive import Primitive
+
+# Abstract class
+class ControlPrimitive(Primitive):
+    def __init__(self):
+        super(ControlPrimitive, self).__init__()
+        self.success = []   # [Packet]
+        self.fail = []      # [Packet]
+
+    def success_in(self, packet):
+        raise AttributeError('Method not implemented')
+
+    def fail_in(self, packet):
+        raise AttributeError('Method not implemented')
+
+    def reset(self):
+        self.success = []
+        self.fail = []

+ 72 - 0
tcore/iterator.py

@@ -0,0 +1,72 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.seeded_random import Random
+from ..util.infinity import INFINITY
+from .rule_primitive import RulePrimitive
+#from messages import TransformationException
+
+
+class Iterator(RulePrimitive):
+    '''
+        Chooses randomly one match from the packet.
+    '''
+    def __init__(self, condition=None, max_iterations=INFINITY, rng=None):
+        '''
+            Selects one match from the packet.
+            @param condition: The pre-condition pattern.
+            @param max_iterations: The maximum number of times to select.
+                                    By default, this is +INFINITY.
+        '''
+        super(Iterator, self).__init__()
+        self.max_iterations = max_iterations
+        self.iterations = 0
+        self.rng = rng
+        if condition:
+            self.condition = condition.get_id()
+
+    def cancelIn(self, cancel):
+        if self.condition not in cancel.exclusions:
+            super(Iterator, self).cancelIn(cancel)
+            self.iterations = 0
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        if packet.current in packet.match_sets:
+            self.condition = packet.current
+            # Promote the selected match to be the match to rewrite
+            packet.match_sets[packet.current].match2rewrite = self._choose(packet)
+            self._globalize_pivots(packet)
+            self.iterations = 1
+            self.is_success = True
+        return packet
+
+    def next_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        packet.current = self.condition
+        if self.iterations < self.max_iterations and packet.current in packet.match_sets:
+            if  len(packet.match_sets[self.condition].matches) == 0:
+                del packet.match_sets[self.condition]
+                return packet
+            # Promote the selected match to be the match to rewrite
+            packet.match_sets[packet.current].match2rewrite = self._choose(packet)
+            self._globalize_pivots(packet)
+            self.iterations += 1
+            self.is_success = True
+        return packet
+
+    def _choose(self, packet):
+        # Choose a match form the current match set and remove it from the list of matches
+        return packet.match_sets[packet.current].matches.pop((self.rng if self.rng != None else Random).randint(0, len(packet.match_sets[packet.current].matches) - 1))
+
+    def _globalize_pivots(self, packet):
+        """
+            Puts all local pivots of the current match in the global pivots of the packet.
+            Of course, local pivots have priority over global pivots.
+        """
+        local_pivots = packet.match_sets[packet.current].match2rewrite.local_pivots
+        for p in local_pivots:
+            packet.global_pivots[p] = local_pivots[p]

+ 173 - 0
tcore/matcher.py

@@ -0,0 +1,173 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+import sys
+from copy import deepcopy
+from ..util.infinity import INFINITY
+from ..core.match_algo import HimesisMatcher
+from ..core.himesis import HConstants as HC
+from .rule_primitive import RulePrimitive
+from .messages import MatchSet, Match, TransformationException
+
+if sys.version_info[0] >= 3:
+    from functools import reduce
+
+class Matcher(RulePrimitive):
+    '''
+        Binds the source graph according to the pre-condition pattern.
+    '''
+    def __init__(self, condition, max=INFINITY):
+        '''
+            Binds the source graph according to the pre-condition pattern.
+            @param condition: The pre-condition pattern.
+            @param max: The maximum number of matches.
+        '''
+        super(Matcher, self).__init__()
+        self.max = max
+        self.condition = condition
+
+    def __str__(self):
+        s = super(Matcher, self).__str__()
+        s = s.split(' ')
+        s.insert(1, '[%s]' % self.condition.name)
+        return reduce(lambda x, y: '%s %s' % (x,y), s)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        if self.condition[HC.GUID] in packet.match_sets:
+            matchSet = packet.match_sets[self.condition[HC.GUID]]
+        else:
+            matchSet = MatchSet()
+
+        # Find the matches
+        try:
+            i = 1
+            if i <= self.max:
+                for mapping in self._match(packet.graph, packet.global_pivots):
+                    # Convert the mapping to a Match object
+                    match = Match()
+                    match.from_mapping(mapping, packet.graph, self.condition)
+                    matchSet.matches.append(match)
+                    i += 1
+                    if i > self.max:
+                        # We don't need any more matches
+                        break
+        except Exception as e:
+            self.is_success = False
+            self.exception = TransformationException(e)
+            self.exception.packet = packet
+            self.exception.transformation_unit = self
+            return packet
+
+        # Don't forget to add the match set to the packet, even if no matches were found
+        if len(matchSet.matches) > 0:
+            packet.match_sets[self.condition[HC.GUID]] = matchSet
+
+        # Identify that this is the condition we are currently processing
+        packet.current = self.condition[HC.GUID]
+
+        # Success only if matches were found
+        self.is_success = len(matchSet.matches) > 0
+        return packet
+
+    def _match(self, graph, pivots) :
+        '''
+            Matcher with pivots and (possibly) multiple NACs
+            1. Verify that no unbound NAC has a match
+            2. Let the "bridge" denote the biggest graph that is the intersection of the LHS and a NAC, among all NACs
+            3. Match the common part between the LHS & the NAC, i.e., the "bridge"
+            3.1 Continue the matching ensuring no occurrence of the NAC
+            3.2. If a NAC is found, ignore the current bridge mapping
+            3.3. Continue to find complete matches of the LHS,
+                 given each partial match found in 3.1.
+            3.4. For each valid match, verify that no occurrence of any remaining bound NAC is found,
+                 given the mapping found in 3.3.
+        '''
+        pred1 = {}  # To optimize the matcher, since otherwise matcher will compute the predecessors of the source graph many times
+        succ1 = {}  # To optimize the matcher, since otherwise matcher will compute the successors of the source graph many times
+
+        # Cache the pivot nodes of the source graph
+        pivots = deepcopy(pivots)
+        pivots.to_source_node_indices(graph)
+
+        #===================================================================
+        # First process the NACs that are not bound to the LHS
+        #===================================================================
+        for NAC in self.condition.getUnboundNACs():
+            # Look for a NAC match
+            nacMatcher = HimesisMatcher(source_graph=graph, pattern_graph=NAC)
+            # Convert the pivots
+            nac_pivots = pivots.to_mapping(graph, NAC)
+            try:
+                for mapping in nacMatcher.match_iter(context=nac_pivots):
+                    # Make the mapping into {...,NAClabel:graphIndex,...}
+                    match = Match()
+                    match.from_mapping(mapping, graph, self.condition)
+                    if NAC[HC.MT_CONSTRAINT](match.to_label_mapping(graph), graph):
+                        # An unbound NAC has been found: this pattern can never match
+                        return
+            except: raise
+            finally: nacMatcher.reset_recursion_limit()
+            # For further matching optimizations
+            pred1 = nacMatcher.pred1
+            succ1 = nacMatcher.succ1
+
+        # Either there are no NACs, or there were only unbound NACs that do not match, so match the LHS now
+        if not self.condition.hasBoundNACs():
+            lhsMatcher = HimesisMatcher(source_graph=graph, pattern_graph=self.condition, pred1=pred1, succ1=succ1)
+            # Convert the pivots
+            lhs_pivots = pivots.to_mapping(graph, self.condition)
+            try:
+                for mapping in lhsMatcher.match_iter(context=lhs_pivots):
+                    # Make the mapping into {...,LHSlabel:graphIndex,...}
+                    match = Match()
+                    match.from_mapping(mapping, graph, self.condition)
+                    if self.condition[HC.MT_CONSTRAINT](match.to_label_mapping(graph), graph):
+                        yield mapping
+            except: raise
+            finally: lhsMatcher.reset_recursion_limit()
+
+            # The matching is complete
+            return
+
+        #===================================================================
+        # Now process the NACs that have some nodes bound to the LHS
+        #===================================================================
+
+        # Continue the matching looking for the LHS now
+        lhsMatcher = HimesisMatcher(source_graph=graph, pattern_graph=self.condition, pred1=pred1, succ1=succ1)
+        # Augment the bridge mapping with the pivot mappings
+        lhs_pivots = pivots.to_mapping(graph, self.condition)
+
+        try:
+            for mapping in lhsMatcher.match_iter(context=lhs_pivots):
+                # Make the mapping into {...,LHSlabel:graphIndex,...}
+                match = Match()
+                match.from_mapping(mapping, graph, self.condition)
+                if self.condition[HC.MT_CONSTRAINT](match.to_label_mapping(graph), graph):
+                    # A match of the LHS is found: ensure that no remaining NAC do match
+                    invalid = False
+                    for NAC in self.condition.getBoundNACs():
+                        # This mapping represents the mapping of the bridge of this NAC with the LHS
+                        bridgeMapping = match.to_mapping(graph, NAC)
+
+                        # Now continue the matching looking for a match of the corresponding NAC
+                        nacMatcher = HimesisMatcher(source_graph=graph, pattern_graph=NAC, pred1=pred1, succ1=succ1)
+                        for nac_mapping in nacMatcher.match_iter(context=bridgeMapping):
+                            # Make the mapping into {...,NAClabel:graphIndex,...}
+                            match = Match()
+                            match.from_mapping(nac_mapping, graph, NAC)
+                            if NAC[HC.MT_CONSTRAINT](match.to_label_mapping(graph), graph):
+                                # An occurrence of the NAC is found: current mapping is not valid
+                                invalid = True
+                                break
+                        if invalid:
+                            # An occurrence of the NAC was found: current mapping is not valid
+                            break
+                    else:
+                        # Either there are no bound NACs or no occurrence of any bound NAC was found: current mapping is valid
+                        yield mapping
+        except: raise
+        finally: lhsMatcher.reset_recursion_limit()

+ 341 - 0
tcore/messages.py

@@ -0,0 +1,341 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+import copy, traceback
+from ..core.himesis import Himesis
+
+# Abstract class
+class Message(object): pass
+
+
+class TransformationException(Message, Exception):
+    class ExceptionStatus:
+        ACTIVE = 'active'
+        HANDLING = 'handling'
+        HANDLED = 'handled'
+    '''
+        The model of an exception occurrence.
+    '''
+    def __init__(self, instance=None, msg=''):
+        super(Exception, self).__init__()
+        self.inner_exception = instance
+        self.msg = msg
+        if instance and msg == '':
+            self.msg = self.inner_exception.args[0]
+        self.detail = ''
+        if instance and len(self.inner_exception.args) > 1:
+            self.detail = self.inner_exception.args[1]
+        self.packet = None
+        self.start_time = 0
+        self.end_time = 0
+        self.status = TransformationException.ExceptionStatus.ACTIVE
+        self.transformation_unit = None
+        self.transformation_context = None
+        if instance:
+            self.transformation_context = traceback.format_exc()
+        self.debug_msg = """%s: %s
+Detail: %s
+Status: %s
+Start: %f  - End: %f
+Packet: %s
+Unit: %s
+Context:%s
+""" % (self.inner_exception.__class__.__name__, self.msg, self.detail,
+       self.status, self.start_time, self.end_time, self.packet,
+       self.transformation_unit, self.transformation_context)
+
+    def __str__(self):
+        #        return self.debug_msg
+        return self.msg + '\n' + str(self.transformation_context)
+
+
+class Cancel(Message):
+    '''
+        This message is used to cancel the current activity of a primitive.
+    '''
+    def __init__(self):
+        self.exclusions = []    # the primitives to not be cancelled
+
+    def __str__(self):
+        return 'Cancel - exclusion = %s' % self.exclusions
+
+
+
+class Packet(Message):
+    '''
+        Holds the current graph and the different matches.
+    '''
+    def __init__(self, graph=None):
+        self.graph = graph               # the source graph
+        self.deltas = []                 # holds the modifications produced by a rule    
+        self.match_sets = {}             # holds of the matches for each pre-condition pattern already matched
+        self.current = None              # points to the guid identifying the current match set
+        self.global_pivots = Pivots()    # {pivot name: source node guid}
+
+    def __str__(self):
+        ms = ''.join(['''
+        %s: %s''' % (k, self.match_sets[k]) for k in sorted(self.match_sets)])
+        if ms == '':
+            ms = str(None)
+        s = '''Packet (%s)
+    graph: %s
+    deltas: %s
+    match_sets: %s
+    pivots: %s''' % (self.current, self.graph, self.deltas, ms, self.global_pivots)
+        return s
+
+    def clone(self):
+        cpy = Packet()
+        cpy.graph = self.graph.copy()
+        cpy.deltas = self.deltas[:]
+        cpy.global_pivots = copy.copy(self.global_pivots)
+        cpy.current = self.current
+        cpy.match_sets = copy.deepcopy(self.match_sets)
+        return cpy
+
+    def copy_readonly(self):
+        cpy = Packet()
+        cpy.graph = self.graph
+        cpy.deltas = self.deltas
+        cpy.global_pivots = copy.copy(self.global_pivots)
+        cpy.current = self.current
+        cpy.match_sets = copy.deepcopy(self.match_sets)
+        return cpy
+
+    def copy_state(self, conditionId):
+        cpy = Packet()
+        cpy.graph = self.graph.copy()
+        cpy.deltas = self.deltas[:]
+        cpy.global_pivots = copy.copy(self.global_pivots)
+        cpy.current = self.current
+        if conditionId in self.match_sets:
+            cpy.match_sets = {conditionId: copy.copy(self.match_sets[conditionId])}
+        return cpy
+
+    def set_state(self, packet):
+        self.graph = packet.graph
+        self.deltas = packet.deltas
+        self.global_pivots = packet.global_pivots
+        self.current = packet.current
+        if packet.match_sets is not None:
+            self.match_sets.update(packet.match_sets)
+
+    def clear_state(self):
+        self.deltas = []
+        self.match_sets = {}
+        self.current = None
+        self.global_pivots = Pivots()
+
+    def __copy__(self):
+        return self.copy_readonly()
+
+    def __deepcopy__(self, memo):
+        return self.__copy__()
+
+    #    def get_curr_matchset(self):
+    #        return self.match_sets[self.current]
+    #
+    #    def get_match2rewrite(self, condition):
+    #        return self.match_sets[condition].matches[self.match_sets[condition].match2rewrite]
+    #
+    #    def get_curr_match2rewrite(self):
+    #        return self.match_sets[self.current].matches[self.match_sets[self.current].match2rewrite]
+    #
+    #    def remove_match2rewrite(self, condition):
+    #        # Remove the match to rewrite
+    #        del self.match_sets[condition].matches[self.match_sets[condition].match2rewrite]
+    #        # If the corresponding match set has become empty, remove it too
+    #        if len(self.match_sets[condition].matches) == 0:
+    #            del self.match_sets[condition]
+    #
+    #    def get_local_pivots(self):
+    #        return self.match_sets[self.current].matches[self.match_sets[self.current].match2rewrite].local_pivots
+
+    def clean(self):
+        '''
+            Unflags dirty matches
+        '''
+        for cond in self.match_sets:
+            for match in self.match_sets[cond].matches:
+                match.clean(self)
+
+
+class MatchSet:
+    '''
+        Holds the different matches of a pre-condition.
+    '''
+    def __init__(self):
+        self.match2rewrite = None   # the selected match to be transformed
+        self.matches = []           # TODO: should it be a generator?
+    # TODO: Should we store all the matches and let the iterator explicitly choose one randomly? Or rely on the matching algorithm and save memory space?
+
+    def __str__(self):
+        s = '''MatchSet (%s): %s''' % (self.match2rewrite, self.matches)
+        return s
+
+    def __copy__(self):
+        cpy = MatchSet()
+        cpy.match2rewrite = self.match2rewrite
+        cpy.matches = [copy.copy(match) for match in self.matches]
+        return cpy
+
+    def __deepcopy__(self, memo):
+        cpy = MatchSet()
+        cpy.match2rewrite = self.match2rewrite
+        cpy.matches = [copy.deepcopy(match) for match in self.matches]
+        return cpy
+
+
+
+class Match(dict):
+    '''
+        Wraps the mapping from the label of a pre-condition pattern model element
+        to the node index of the corresponding source model element.
+    '''
+    def __init__(self):
+        super(Match, self).__init__()   # {pattern node label : source node guid}
+        self.local_pivots = Pivots()    # {pivot name : source node guid}
+
+    def __copy__(self):
+        cpy = copy.copy(super(Match, self))
+        cpy.local_pivots = copy.copy(self.local_pivots)
+        return cpy
+
+    def __deepcopy__(self, memo):
+        cpy = copy.deepcopy(super(Match, self))
+        cpy.local_pivots = copy.deepcopy(self.local_pivots)
+        return cpy
+
+    def is_dirty(self, packet):
+        '''
+            Determines whether a source model element is dirty.
+            @param packet: The packet on which the mappings are bound.
+        '''
+        for v in self.values():
+            node = packet.graph.get_node(v)
+            node = packet.graph.vs[node]
+            if node is not None:
+                # Check dirty flag
+                if Himesis.Constants.MT_DIRTY in node.attribute_names() and node[Himesis.Constants.MT_DIRTY]:
+                    return True
+            else:
+                # It was deleted
+                return True
+        return False
+
+    def clean(self, packet):
+        for v in self.values():
+            node = packet.graph.get_node(v)
+            node = packet.graph.vs[node]
+            if node and Himesis.Constants.MT_DIRTY in node.attribute_names():
+                node[Himesis.Constants.MT_DIRTY] = False
+
+    def to_label_mapping(self, source_graph):
+        '''
+            Converts the match to a mapping dictionary {label: source node index}.
+        '''
+        mapping = {}
+        for label in self.keys():
+            try:
+                sourceNode = source_graph.get_node(self[label])
+            except KeyError:
+                raise Exception('The matched node %s does not exist' % label)
+            if sourceNode is not None:
+                mapping[label] = sourceNode
+            else:
+                raise Exception('The matched node %s does not exist' % label)
+        return mapping
+
+    def to_mapping(self, source_graph, pattern_graph):
+        '''
+            Converts the match to a mapping dictionary {pattern node index: source node index}.
+        '''
+        mapping = {}
+        for label in self.keys():
+            patternNode = pattern_graph.get_node_with_label(label)
+            if patternNode is not None:
+                sourceNode = source_graph.get_node(self[label])
+                mapping[patternNode] = sourceNode
+        return mapping
+
+    def from_mapping(self, mapping, source_graph, pattern_graph):
+        '''
+            Extracts all matches from a mapping dictionary {pattern node index: source node index}
+            and adds them to this object in the form {pattern label: source node guid}.
+            Relevant pivots are also extracted.
+        '''
+        for pattern_node in mapping:
+            #print "Pattern Graph: ", pattern_graph
+            #print "len(Pattern Graph.vs): ", pattern_graph.vcount()
+            #print "Pattern Node: ", pattern_node
+            if pattern_node < pattern_graph.vcount():
+                #print "Pattern Graph.vs[pattern_node]: ", pattern_graph.vs[pattern_node]
+                label = pattern_graph.vs[pattern_node][Himesis.Constants.MT_LABEL]
+                guid = source_graph.vs[mapping[pattern_node]][Himesis.Constants.GUID]
+                self[label] = guid
+
+        self.local_pivots.from_mapping(mapping, source_graph, pattern_graph)
+
+
+
+class Pivots(dict):
+    '''
+        Wraps the binding from a pivot name to a source model element.
+    '''
+    def __init__(self):
+        super(Pivots, self).__init__()     # {pivot name : source node guid}
+        self.has_source_node_indices = False
+
+    def __copy__(self):
+        cpy = copy.copy(super(Pivots, self))
+        cpy.has_source_node_indices = self.has_source_node_indices
+        return cpy
+
+    def __deepcopy__(self, memo):
+        cpy = copy.deepcopy(super(Pivots, self))
+        cpy.has_source_node_indices = self.has_source_node_indices
+        return cpy
+
+    def to_source_node_indices(self, source_graph):
+        for p in self.keys():
+            sourceNode = source_graph.get_node(self[p])
+            self[p] = sourceNode
+        self.has_source_node_indices = True
+
+    def to_mapping(self, source_graph, pattern_graph):
+        '''
+            Converts the pivots to a mapping dictionary {pattern node index: source node index}.
+        '''
+        mapping = {}
+        if not self.has_source_node_indices:
+            for p in self.keys():
+                patternNode = pattern_graph.get_pivot_in(p)
+                if patternNode is not None:
+                    sourceNode = source_graph.get_node(self[p])
+                    mapping[patternNode] = sourceNode
+        else:
+            for p in self.keys():
+                patternNode = pattern_graph.get_pivot_in(p)
+                if patternNode is not None:
+                    mapping[patternNode] = self[p]
+        return mapping
+
+    def from_mapping(self, mapping, source_graph, pattern_graph):
+        '''
+            Extracts all pivots from a mapping dictionary {pattern node index: source node index}
+            and adds them to this object in the form {pivot name: source node guid}.
+        '''
+        for p in mapping:
+            pivot = pattern_graph.get_pivot_out(p)
+            if pivot is not None:
+                guid = source_graph.vs[mapping[p]][Himesis.Constants.GUID]
+                if guid is not None:
+                    self[pivot] = guid
+                else:
+                    #TODO: This should be a TransformationLanguageSpecificException
+                    raise Exception('The bound node has no Guid')
+
+
+# Define the nil packet
+NIL_PACKET = Packet()

+ 20 - 0
tcore/primitive.py

@@ -0,0 +1,20 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+import uuid
+
+
+# Abstract class
+class Primitive(object):
+    def __init__(self):
+        self.is_success = False      # flags weather the primitive's action resulted in a success or not
+        self.exception = None       # holds the exception object if one was raised
+        self._id = uuid.uuid4()
+
+    def cancelIn(self, cancel):
+        self.is_success = False
+        self.exception = None
+
+    def __str__(self):
+        return '%s %s' % (str(self.__class__.__name__), self._id) 

+ 61 - 0
tcore/resolver.py

@@ -0,0 +1,61 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from .rule_primitive import RulePrimitive
+from .messages import TransformationException
+
+
+class Resolver(RulePrimitive):
+    '''
+        Detects & resolves any conflict between matches and rewritings.
+    '''
+    def __init__(self, external_matches_only=False, custom_resolution=lambda packet: False):
+        '''
+            Detects & resolves any conflict between matches.
+            @param external_matches_only: Whether to only check for matches outside the current scope of the resolver.
+                                    By default, this is False.
+            @param custom_resolution: Function that defines how to resolve any conflict
+                                    By default, this returns False.
+        '''
+        super(Resolver, self).__init__()
+        self.external_matches_only = external_matches_only
+        self.custom_resolution = custom_resolution
+
+    def packet_in(self, packet):
+        '''
+            Attempts to merge the packets into a single one, only if all threads had succeeded.
+        '''
+        self.exception = None
+        self.is_success = False
+        for cond in packet.match_sets:
+            # Ignore the current match set when checking for conflicts with external matches only
+            if self.external_matches_only and cond == packet.current:
+                continue
+            for match in packet.match_sets[cond].matches:
+                if match.is_dirty(packet):
+                    # First try the custom resolution function
+                    if not self._custom_resolution(packet, match):
+                        # Then try the default resolution function
+                        if not self._default_resolution(packet, match):
+                            self.is_success = False
+                            # TODO: This should be an InconsistentUseException
+                            self.exception = TransformationException()
+                            self.exception.packet = packet
+                            self.exception.transformation_unit = self
+                            return packet
+        # No conflicts are to be reported
+        self.is_success = True
+        return packet
+
+    def _custom_resolution(self, packet, match):
+        '''
+            Applies the user-defined resolution function
+        '''
+        return self.custom_resolution(packet)
+
+    def _default_resolution(self, packet, match):
+        '''
+            Attempts to resolve conservatively any conflicts
+        '''
+        return False

+ 69 - 0
tcore/rewriter.py

@@ -0,0 +1,69 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from .rule_primitive import RulePrimitive
+from .messages import TransformationException
+from ..core.himesis import Himesis
+from ...tconstants import TConstants as TC
+from ...utils import Utilities as utils
+
+import sys
+if sys.version_info[0] >= 3:
+    from functools import reduce
+
+class Rewriter(RulePrimitive):
+    '''
+        Transforms the matched source model elements according to the specified post-condition pattern.
+    '''
+    def __init__(self, condition,sendAndApplyDeltaFunc):
+        '''
+            Transforms the bound graph of the source graph into what the specification of the post-condition pattern.
+            @param condition: The the post-condition pattern.
+        '''
+        super(Rewriter, self).__init__()
+        self.condition = condition
+
+        self.sendAndApplyDeltaFunc = sendAndApplyDeltaFunc
+
+    def __str__(self):
+        s = super(Rewriter, self).__str__()
+        s = s.split(' ')
+        s.insert(1, '[%s]' % self.condition.name)
+        return reduce(lambda x, y: x + ' ' + y, s)
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        if self.condition.pre[Himesis.Constants.GUID] not in packet.match_sets:
+            self.is_success = False
+            # TODO: This should be a TransformationLanguageSpecificException 
+            self.exception = TransformationException()
+            self.exception.packet = packet
+            return packet
+        else:
+            match = packet.match_sets[self.condition.pre[Himesis.Constants.GUID]].match2rewrite
+            mapping = match.to_label_mapping(packet.graph)
+            # Apply the transformation on the match
+            try:
+                self.condition.execute(packet, mapping)     # Sets dirty nodes as well
+            except Exception as e:
+                self.is_success = False
+                self.exception = TransformationException(e)
+                self.exception.packet = packet
+                self.exception.transformation_unit = self
+                return packet
+
+            # Remove the match
+            packet.match_sets[self.condition.pre[Himesis.Constants.GUID]].match2rewrite = None
+            if  len(packet.match_sets[self.condition.pre[Himesis.Constants.GUID]].matches) == 0:
+                del packet.match_sets[self.condition.pre[Himesis.Constants.GUID]]
+
+            #print self.condition
+
+            ''' hergin :: motif-integration :: start '''
+            self.sendAndApplyDeltaFunc(packet.deltas)
+            ''' hergin :: motif-integration :: end '''
+
+            self.is_success = True
+            return packet

+ 95 - 0
tcore/rollbacker.py

@@ -0,0 +1,95 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+#import pickle, os
+from ..util.infinity import INFINITY
+from .iterator import Iterator
+from ..tcore.messages import TransformationException
+
+
+class Rollbacker(Iterator):
+    '''
+        Provides back-tracking capacity.
+    '''
+    def __init__(self, condition, max_iterations=INFINITY):
+        '''
+            Selects one match from the packet.
+            @param condition: The pre-condition pattern.
+            @param max_iterations: The maximum number of times to select.
+                                    By default, this is +INFINITY.
+        '''
+        super(Rollbacker, self).__init__(condition, max_iterations)
+        self.checkpoints = []   # Stack of file names
+
+    def packet_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        try:
+            self.establish(packet)
+            self.is_success = True
+        except Exception as e:
+            self.is_success = False
+            self.exception = TransformationException(e)
+            self.exception.packet = packet
+            self.exception.transformation_unit = self
+        finally:
+            self.iterations = 1
+            return packet
+
+    def next_in(self, packet):
+        self.exception = None
+        self.is_success = False
+        if self.iterations < self.max_iterations:
+            # If came from the same scope as the rollbacker, then just pass it over
+            if packet.current in packet.match_sets:
+                self.iterations += 1
+                self.is_success = True
+                return packet
+            else:
+                try:
+                    packet.set_state(self.restore())
+                    self.is_success = True
+                except Exception as e:
+                    self.is_success = False
+                    self.excepion = TransformationException(e)
+                    self.exception.packet = packet
+                    self.exception.transformation_unit = self
+                finally:
+                    return packet
+        else:   # self.iterations == self.max_iterations
+            try:
+                packet = self.restore()
+                self.is_success = True
+            except:
+                self.is_success = False
+            finally:
+                return packet
+
+    def establish(self, packet):
+        #        fileName = '%d.tc_state.%d' % (self._id, len(self.checkpoints))
+        #        with open(fileName, 'w') as storage:
+        #            pickle.dump(packet, storage)
+        #        self.checkpoints.append(fileName)
+        self.checkpoints.append(packet.copy_state(self.condition))
+
+
+    def restore(self):
+        #        with open(self.checkpoints[-1], 'r') as storage:
+        #            packet = pickle.load(storage)
+        #            return packet
+        #        os.remove(self.checkpoints[-1])
+        if len(self.checkpoints) > 0:
+            return self.checkpoints.pop()
+        raise Exception('There are no checkpoints to restore')
+
+    def discard(self):
+        #        os.remove(self.checkpoints[-1])
+        if len(self.checkpoints) > 0:
+            del self.checkpoints[-1]
+        raise Exception('There are no checkpoints to discard')
+
+    def discard_all(self):
+        #        for fn in self.checkpoints:
+        #            os.remove(fn)
+        self.checkpoints = []

+ 13 - 0
tcore/rule_primitive.py

@@ -0,0 +1,13 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from .primitive import Primitive
+
+# Abstract class
+class RulePrimitive(Primitive):
+    def __init__(self):
+        super(RulePrimitive, self).__init__()
+
+    def packet_in(self, packet):
+        raise AttributeError('Method not implemented')

+ 70 - 0
tcore/selector.py

@@ -0,0 +1,70 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.seeded_random import Random
+from .control_primitive import ControlPrimitive
+from .messages import Cancel, TransformationException, NIL_PACKET
+
+
+class Selector(ControlPrimitive):
+    '''
+        Selects a packet randomly.
+    '''
+    def __init__(self):
+        '''
+            Selects a packet randomly.
+        '''
+        super(Selector, self).__init__()
+        self.exclusions = []
+
+    def success_in(self, packet):
+        '''
+            Receives a successful packet
+        '''
+        self.exception = None
+        self.is_success = False
+        self.success.append(packet)
+
+    def fail_in(self, packet):
+        '''
+            Receives a failed packet
+        '''
+        self.exception = None
+        self.is_success = False
+        self.fail.append(packet)
+
+    def reset(self):
+        super(Selector, self).reset()
+        self.exclusions = []
+
+    def select(self):
+        '''
+            Selects a packet randomly from the success list.
+            If the success list is empty, then from the fail list.
+        '''
+        self.exception = None
+        self.is_success = False
+        if len(self.success) > 0:
+            self.is_success = True
+            packet = Random.choice(self.success)
+            self.exclusions.append(packet.current)
+            return packet
+        elif len(self.fail) > 0:
+            self.is_success = False
+            return Random.choice(self.fail)
+        else:
+            self.is_success = False
+            #TODO: This should be a TransformationLanguageSpecificException
+            self.exception = TransformationException('No packet was received')
+            self.exception.packet = NIL_PACKET
+            return NIL_PACKET
+
+    def cancel(self):
+        '''
+            Produces a cancel event and resets its state
+        '''
+        c = Cancel()
+        c.exclusions = self.exclusions
+        self.reset()
+        return c

+ 89 - 0
tcore/synchronizer.py

@@ -0,0 +1,89 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+from ..util.seeded_random import Random
+from .control_primitive import ControlPrimitive
+from .messages import TransformationException, NIL_PACKET
+
+
+class Synchronizer(ControlPrimitive):
+    '''
+        Synchonizes all threads of execution by merging the packets.
+    '''
+    def __init__(self, threads=2, custom_merge=lambda packets: None):
+        '''
+            Synchonizes all threads of execution by merging the packets.
+            @param threads: Specifies how many threads will be synchronized.
+                            By default, this is 2.
+            @param custom_merge: Function that defines how to merge the success packets.
+                                By default, this returns None.
+        '''
+        super(Synchronizer, self)
+
+        assert(threads >= 2)
+        self.threads = threads
+        self.custom_merge = custom_merge
+
+    def success_in(self, packet):
+        '''
+            Receives a successful packet
+        '''
+        self.exception = None
+        self.is_success = False
+        self.success.append(packet)
+
+    def fail_in(self, packet):
+        '''
+            Receives a failed packet
+        '''
+        self.exception = None
+        self.is_success = False
+        self.fail.append(packet)
+
+    def _custom_merge(self):
+        '''
+            Applies the user-defined merge function
+        '''
+        return self.custom_merge(self.success)
+
+    def _default_merge(self):
+        '''
+            Attempts to merge the packets conservatively
+        '''
+        return None
+
+    def merge(self):
+        '''
+            Attempts to merge the packets into a single one, only if all threads had succeeded.
+        '''
+        self.exception = None
+        self.is_success = False
+
+        def failure():
+            self.is_success = False
+            self.exception = TransformationException()
+            self.exception.packet = NIL_PACKET
+            return NIL_PACKET
+
+        if len(self.success) == self.threads:
+            packet = self._custom_merge()
+            if packet is not None:
+                self.is_success = True
+                self.reset()
+                return packet
+            else:
+                packet = self._default_merge()
+                if packet is not None:
+                    self.is_success = True
+                    self.reset()
+                    return packet
+                else:
+                    return failure()
+        elif len(self.success) + len(self.fail) == self.threads:
+            self.is_success = False
+            return Random.choice(self.fail)
+        else:
+            return failure()
+            
+                

+ 0 - 0
util/__init__.py


+ 109 - 0
util/infinity.py

@@ -0,0 +1,109 @@
+# -*- coding: Latin-1 -*-
+## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
+## ##
+# infinity.py   
+#                     --------------------------------
+#                            Copyright (c) 2005
+#                          Jean-Sébastien  BOLDUC
+#                             Hans  Vangheluwe
+#                       McGill University (Montréal)
+#                     --------------------------------
+#
+#  - Singleton class "Inf" and unique instance "INFINITY" --- 
+#    stands for infinity (to use in time advance function)
+#    
+## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
+## ##
+
+##  INFINITY OBJECT --- ADDED 04/04/2005
+##  more comparison operators -- HV 12/11/2006
+##
+##  mul and rmul added -- Eugene 14/11/2006
+## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
+## ##
+
+class Infty(object):
+    """Singleton class: the single instance "INFINITY" stands for infinity."""
+    __instantiated = False
+    def __init__(self):
+        if self.__instantiated:
+            raise NotImplementedError("singleton class already instantiated")
+        self.__instantiatiated = True
+
+    def __deepcopy__(self, memo):
+        return self
+
+    def __add__(self, other):
+        """ INFINITY + x = INFINITY """
+        return self
+
+    def __sub__(self, other):
+        """ INFINITY - x = INFINITY (if x != INF), or NaN (if x == INFINITY) """
+        if other == self:
+            raise ValueError("INFINITY - INFINITY gives NaN (not defined)")
+        return self
+
+    def __mul__(self, other):
+        """ INFINITY * x = INFINITY """
+        return self
+
+    def __radd__(self, other):
+        """ x + INFINITY = INFINITY """
+        return self
+
+    def __rsub__(self, other):
+        """ x - INFINITY = -INFINITY (if x != INFINITY), or NaN (if x == INFINITY) """
+        if other == self:
+            raise ValueError("INFINITY - INFINITY gives NaN (not defined)")
+        raise ValueError("x - INFINITY gives MINUS_INFINITY (not defined)")
+
+    def __rmul__(self, other):
+        """ x * INFINITY = INFINITY """
+        return self
+
+    def __abs__(self):
+        """ abs(INFINITY) = INFINITY -- absolute value """
+        return self
+
+    #    def __cmp__(self, other):
+    #        if other is self:
+    #            return 0
+    #        else:
+    #            return 1
+
+    def __eq__(self, other):
+        if other is self:
+            return True
+        else:
+            return False
+
+    def __ne__(self, other):
+        if other is self:
+            return False
+        else:
+            return True
+
+    def __lt__(self, other):
+        return False
+
+    def __le__(self, other):
+        if other is self:
+            return True
+        else:
+            return False
+
+    def __gt__(self, other):
+        if other is self:
+            return False
+        else:
+            return True
+
+    def __ge__(self, other):
+        return True
+
+    def __str__(self):
+        return "+INFINITY"
+
+# Instantiate singleton:    
+INFINITY = Infty()
+

+ 24 - 0
util/seeded_random.py

@@ -0,0 +1,24 @@
+'''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
+Copyright 2011 by the AToMPM team and licensed under the LGPL
+See COPYING.lesser and README.md in the root of this project for full details'''
+
+import random
+
+class SeededRandom(random.Random):
+    '''
+        Random class wrapper, provided a seeded random number generator
+    '''
+    __instantiated = False
+    def __init__(self, seed=0):
+        '''
+            Singleton class: the single instance "INFINITY" stands for infinity.
+        '''
+        if SeededRandom.__instantiated:
+            raise NotImplementedError("singleton class already instantiated")
+
+        SeededRandom.__instantiatiated = True
+        random.Random.__init__(self)
+        self.seed(seed)
+
+
+Random = SeededRandom()