Browse Source

Merge branch 'master' of github.com:AToMPM/atompm

Bentley James Oakes 7 years ago
parent
commit
ffe9222099
68 changed files with 3840 additions and 1535 deletions
  1. 3 2
      README.md
  2. 1 1
      client/behavioursc_canvas.js
  3. 62 12
      client/connection_utils.js
  4. 4 0
      doc/overview.rst
  5. 0 235
      mt/___websocket.py
  6. 46 26
      mt/httpd.py
  7. 3 26
      mt/main.py
  8. 57 57
      mt/mtworker.py
  9. 28 28
      mt/ptcal/barrier.py
  10. 68 76
      mt/ptcal/compiler.py
  11. 60 60
      mt/ptcal/dapi.py
  12. 10 7
      mt/ptcal/dcal.py
  13. 112 112
      mt/ptcal/motifcontext.py
  14. 77 69
      mt/ptcal/petrinet.py
  15. 237 230
      mt/ptcal/ptcal.py
  16. 71 78
      mt/ptcal/pytcore/core/himesis.py
  17. 101 101
      mt/ptcal/pytcore/core/match_algo.py
  18. 6 6
      mt/ptcal/pytcore/rules/arule.py
  19. 2 2
      mt/ptcal/pytcore/rules/brule.py
  20. 4 4
      mt/ptcal/pytcore/rules/bsrule.py
  21. 17 17
      mt/ptcal/pytcore/rules/crule.py
  22. 4 4
      mt/ptcal/pytcore/rules/frule.py
  23. 6 6
      mt/ptcal/pytcore/rules/lfrule.py
  24. 7 7
      mt/ptcal/pytcore/rules/lqsrule.py
  25. 3 3
      mt/ptcal/pytcore/rules/lrule.py
  26. 6 6
      mt/ptcal/pytcore/rules/lsrule.py
  27. 1 1
      mt/ptcal/pytcore/rules/ndarule.py
  28. 14 14
      mt/ptcal/pytcore/rules/query.py
  29. 1 1
      mt/ptcal/pytcore/rules/sequence.py
  30. 5 5
      mt/ptcal/pytcore/rules/srule.py
  31. 4 4
      mt/ptcal/pytcore/rules/xfrule.py
  32. 5 5
      mt/ptcal/pytcore/rules/xrule.py
  33. 4 4
      mt/ptcal/pytcore/rules/xsrule.py
  34. 3 3
      mt/ptcal/pytcore/tcore/composer.py
  35. 3 3
      mt/ptcal/pytcore/tcore/composite_primitive.py
  36. 4 4
      mt/ptcal/pytcore/tcore/control_primitive.py
  37. 6 6
      mt/ptcal/pytcore/tcore/iterator.py
  38. 20 17
      mt/ptcal/pytcore/tcore/matcher.py
  39. 56 56
      mt/ptcal/pytcore/tcore/messages.py
  40. 3 3
      mt/ptcal/pytcore/tcore/primitive.py
  41. 5 5
      mt/ptcal/pytcore/tcore/resolver.py
  42. 12 9
      mt/ptcal/pytcore/tcore/rewriter.py
  43. 21 21
      mt/ptcal/pytcore/tcore/rollbacker.py
  44. 2 2
      mt/ptcal/pytcore/tcore/rule_primitive.py
  45. 7 7
      mt/ptcal/pytcore/tcore/selector.py
  46. 10 10
      mt/ptcal/pytcore/tcore/synchronizer.py
  47. 11 11
      mt/ptcal/pytcore/util/infinity.py
  48. 2 2
      mt/ptcal/pytcore/util/seeded_random.py
  49. 46 42
      mt/ptcal/synchgraph.py
  50. 11 11
      mt/ptcal/tconstants.py
  51. 28 28
      mt/ptcal/tcontext.py
  52. 52 52
      mt/ptcal/tcorecontext.py
  53. 17 12
      mt/ptcal/utils.py
  54. 3 0
      mt/websocket/README
  55. 29 0
      mt/websocket/__init__.py
  56. 447 0
      mt/websocket/_abnf.py
  57. 325 0
      mt/websocket/_app.py
  58. 52 0
      mt/websocket/_cookiejar.py
  59. 495 0
      mt/websocket/_core.py
  60. 87 0
      mt/websocket/_exceptions.py
  61. 180 0
      mt/websocket/_handshake.py
  62. 319 0
      mt/websocket/_http.py
  63. 75 0
      mt/websocket/_logging.py
  64. 126 0
      mt/websocket/_socket.py
  65. 44 0
      mt/websocket/_ssl_compat.py
  66. 163 0
      mt/websocket/_url.py
  67. 105 0
      mt/websocket/_utils.py
  68. 42 22
      mt/ws.py

+ 3 - 2
README.md

@@ -5,12 +5,13 @@ AToMPM stands for "A Tool for Multi-Paradigm Modeling". It is a research framewo
 ## Installation
 
 To install AToMPM, follow these steps:
-* Download and install the latest Python 2.7.X
+* Download and install the latest Python (2.7.X or 3.X.X)
     * Use a package manager on Linux
     * Or visit http://python.org/download/
 * Download and install python-igraph
     * Use the pip package manager (comes with Python)
         * `pip install python-igraph`
+        * If using Python3: `pip3 install python-igraph`
     * For Windows, you may need to install the compiled igraph core
         * `http://www.lfd.uci.edu/~gohlke/pythonlibs/#python-igraph`
 * Download and install node.js
@@ -32,7 +33,7 @@ To run AToMPM on Windows, execute the `run.bat` script inside of the main AToMPM
 ### Mac or Linux
 * Execute commands in different terminals
     1. Execute `node httpwsd.js` in the main AToMPM folder
-    2. Execute `python2 mt\main.py` in the main AToMPM folder
+    2. Execute `python mt\main.py` in the main AToMPM folder
     3. Open a browser (preferably Chrome) and navigate to http://localhost:8124/atompm
 
 * The above steps are automated by the `run_AToMPM_local.sh` script

+ 1 - 1
client/behavioursc_canvas.js

@@ -405,7 +405,7 @@ __canvasBehaviourStatechart = {
 			else if( this.__currentState == this.__STATE_DRAGGING_CONNECTION_PATH_CTRL_POINT )
 			{
 				if( name == __EVENT_MOUSE_MOVE ) {
-                    ConnectionUtils.previewControlPointTranslation(GUIUtils.convertToCanvasX(event), GUIUtils.convertToCanvasY(event));
+                    ConnectionUtils.previewControlPointTranslation(GUIUtils.convertToCanvasX(event), GUIUtils.convertToCanvasY(event), event.ctrlKey);
 				}
 				else if( name == __EVENT_LEFT_RELEASE_CTRL_POINT )
 				{

+ 62 - 12
client/connection_utils.js

@@ -200,18 +200,68 @@ ConnectionUtils = function(){
 		connectionPathEditingOverlay = {};
 		currentControlPoint = undefined;
 	};
-	
-	/**
-	 * Moves the control point and its overlay to the specified coordinates
-	 */
-	this.previewControlPointTranslation = function(x,y){
-		var _x = parseInt( currentControlPoint.node.getAttribute('_x') ),
-			 _y = parseInt( currentControlPoint.node.getAttribute('_y') );
-		currentControlPoint.translate(x-_x,y-_y);
-		currentControlPoint.node.setAttribute('_x',x);
-		currentControlPoint.node.setAttribute('_y',y);
-		ConnectionUtils.updateConnectionPath(true);
-	};
+
+    /**
+     * Moves the control point and its overlay to the specified coordinates
+     */
+    this.previewControlPointTranslation = function (x, y, ctrl_key_down) {
+
+        // if the control key is not down,
+        // restrict control point to within bounding box
+        if (!ctrl_key_down) {
+            let new_points = this.restrictControlPoint(x, y);
+            x = new_points[0];
+            y = new_points[1];
+        }
+
+        let _x = parseInt(currentControlPoint.node.getAttribute('_x')),
+            _y = parseInt(currentControlPoint.node.getAttribute('_y'));
+
+        currentControlPoint.translate(x - _x, y - _y);
+
+        currentControlPoint.node.setAttribute('_x', x);
+        currentControlPoint.node.setAttribute('_y', y);
+        ConnectionUtils.updateConnectionPath(true);
+    };
+
+    /**
+     * Restricts the control point to within an icon's bounding box
+     */
+    this.restrictControlPoint = function (x, y) {
+        let start = currentControlPoint.node.getAttribute("__start");
+        let end = currentControlPoint.node.getAttribute("__end");
+
+        // something went wrong, or we're not an
+		// outside edge, so return the points
+        if (start == undefined && end == undefined) {
+            return [x, y];
+        }
+
+        //get the bounding box rectangle
+        let icon = __getIcon(start || end);
+        let bbox = icon.getBBox();
+
+        //get the dimensions
+        let iconX = bbox.x;
+        let iconY = bbox.y;
+
+        let width = bbox.width;
+        let height = bbox.height;
+
+        //restrict x and y to within the bounding box
+        if (x < iconX) {
+            x = iconX;
+        } else if (x > iconX + width) {
+            x = iconX + width;
+        }
+
+        if (y < iconY) {
+            y = iconY;
+        } else if (y > iconY + height) {
+            y = iconY + height;
+        }
+        return [Math.round(x), Math.round(y)];
+    };
 	
 	/**
 	 * Show the connection path editing overlay. This shows draggable circles

+ 4 - 0
doc/overview.rst

@@ -226,6 +226,10 @@ When in the **EDGE EDITING** state,
 | Action                                | Shortcut(s)                                                                 |
 +=======================================+=============================================================================+
 | Move control point                    | Left-press any control point, drag it to desired position and release.      |
+|                                       |                                                                             |
+|                                       | If editing a control point attached to an icon, movement is restricted to   |
+|                                       | within that icon's bounding box. If free movement is desired,               |
+|                                       | hold CTRL while moving the control point.                                   |
 +---------------------------------------+-----------------------------------------------------------------------------+
 | Vertically/Horizontally align control | Left-click any control point and click TAB.                                 |
 | point to previous control point       |                                                                             |

+ 0 - 235
mt/___websocket.py

@@ -1,235 +0,0 @@
-"""
-REF:: https://github.com/mtah/python-websocket (+ /.setup/websocket.py.patch)
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with this program.  If not, see <http://www.gnu.org/licenses/>
-"""
-
-import sys, re, urlparse, socket, asyncore
-
-class WebSocket(object):
-    def __init__(self, url, **kwargs):
-        self.host, self.port, self.resource, self.secure = WebSocket._parse_url(url)
-        self.protocol = kwargs.pop('protocol', None)
-        self.cookie_jar = kwargs.pop('cookie_jar', None)
-        self.onopen = kwargs.pop('onopen', None)
-        self.onmessage = kwargs.pop('onmessage', None)
-        self.onerror = kwargs.pop('onerror', None)
-        self.onclose = kwargs.pop('onclose', None)
-        if kwargs: raise ValueError('Unexpected argument(s): %s' % ', '.join(kwargs.values()))
-
-        self._dispatcher = _Dispatcher(self)
-
-    def send(self, data):
-        self._dispatcher.write('\x00' + _utf8(data) + '\xff')
-
-    def close(self):
-        self._dispatcher.handle_close()
-
-    @classmethod
-    def _parse_url(cls, url):
-        p = urlparse.urlparse(url)
-
-        if p.hostname:
-            host = p.hostname
-        else:
-            raise ValueError('URL must be absolute')
-    
-        if p.fragment:
-            raise ValueError('URL must not contain a fragment component')
-    
-        if p.scheme == 'ws':
-            secure = False
-            port = p.port or 80
-        elif p.scheme == 'wss':
-            raise NotImplementedError('Secure WebSocket not yet supported')
-            # secure = True
-            # port = p.port or 443
-        else:
-            raise ValueError('Invalid URL scheme')
-
-        resource = p.path or u'/'
-        if p.query: resource += u'?' + p.query
-        return (host, port, resource, secure)
-
-    #@classmethod
-    #def _generate_key(cls):
-    #    spaces = random.randint(1, 12)
-    #    number = random.randint(0, 0xffffffff/spaces)
-    #    key = list(str(number*spaces))
-    #    chars = map(unichr, range(0x21, 0x2f) + range(0x3a, 0x7e))
-    #    random_inserts = random.sample(xrange(len(key)), random.randint(1,12))
-    #    for (i, c) in [(r, random.choice(chars)) for r in random_inserts]:
-    #        key.insert(i, c)
-    #    print key
-    #    return ''.join(key)
-
-class WebSocketError(Exception):
-    def _init_(self, value):
-        self.value = value
-
-    def _str_(self):
-        return str(self.value)
-
-class _Dispatcher(asyncore.dispatcher):
-    def __init__(self, ws):
-        asyncore.dispatcher.__init__(self)
-        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
-        self.connect((ws.host, ws.port))
-        
-        self.ws = ws
-        self._read_buffer = ''
-        self._write_buffer = ''
-        self._handshake_complete = False
-
-        if self.ws.port != 80:
-            hostport = '%s:%d' % (self.ws.host, self.ws.port)
-        else:
-            hostport = self.ws.host
-            
-        fields = [
-            'Upgrade: WebSocket',
-            'Connection: Upgrade',
-            'Host: ' + hostport,
-            'Origin: http://' + hostport,
-            #'Sec-WebSocket-Key1: %s' % WebSocket.generate_key(),
-            #'Sec-WebSocket-Key2: %s' % WebSocket.generate_key()
-        ]
-        if self.ws.protocol: fields['Sec-WebSocket-Protocol'] = self.ws.protocol
-        if self.ws.cookie_jar:
-            cookies = filter(lambda c: _cookie_for_domain(c, _eff_host(self.ws.host)) and \
-                             _cookie_for_path(c, self.ws.resource) and \
-                             not c.is_expired(), self.ws.cookie_jar)
-            
-            for cookie in cookies:
-                fields.append('Cookie: %s=%s' % (cookie.name, cookie.value))
-        
-        # key3 = ''.join(map(unichr, (random.randrange(256) for i in xrange(8))))
-        self.write(_utf8('GET %s HTTP/1.1\r\n' \
-                         '%s\r\n\r\n' % (self.ws.resource,
-                                         '\r\n'.join(fields))))
-                                         # key3)))
-
-    def handl_expt(self):
-        self.handle_error()
-
-    def handle_error(self):
-        self.close()
-        t, e, trace = sys.exc_info()
-        if self.ws.onerror:
-            self.ws.onerror(e)
-        else:
-            asyncore.dispatcher.handle_error(self)
-
-    def handle_close(self):
-        self.close()
-        if self.ws.onclose:
-            self.ws.onclose()
-
-    def handle_read(self):
-        if self._handshake_complete:
-            self._read_until('\xff', self._handle_frame)
-        else:
-            self._read_until('\r\n\r\n', self._handle_header)
-
-    def handle_write(self):
-        sent = self.send(self._write_buffer)
-        self._write_buffer = self._write_buffer[sent:]
-
-    def writable(self):
-        return len(self._write_buffer) > 0
-
-    def write(self, data):
-        self._write_buffer += data # TODO: separate buffer for handshake from data to
-                                   # prevent mix-up when send() is called before
-                                   # handshake is complete?
-
-    def _read_until(self, delimiter, callback):
-		  def lookForAndHandleCompletedFrame():
-			  pos = self._read_buffer.find(delimiter)
-			  if pos >= 0:
-				  pos += len(delimiter)
-				  data = self._read_buffer[:pos]
-				  self._read_buffer = self._read_buffer[pos:]
-				  if data:
-				     callback(data)
-				     lookForAndHandleCompletedFrame()
-
-		  self._read_buffer += self.recv(4096)
-		  lookForAndHandleCompletedFrame()
-
-    def _handle_frame(self, frame):
-        assert frame[-1] == '\xff'
-        if frame[0] != '\x00':
-            raise WebSocketError('WebSocket stream error')
-
-        if self.ws.onmessage:
-            self.ws.onmessage(frame[1:-1])
-        # TODO: else raise WebSocketError('No message handler defined')
-
-    def _handle_header(self, header):
-        assert header[-4:] == '\r\n\r\n'
-        start_line, fields = _parse_http_header(header)
-        if start_line != 'HTTP/1.1 101 Web Socket Protocol Handshake' or \
-           fields.get('Connection', None) != 'Upgrade' or \
-           fields.get('Upgrade', None) != 'WebSocket':
-            raise WebSocketError('Invalid server handshake')
-
-        self._handshake_complete = True
-        if self.ws.onopen:
-            self.ws.onopen()
-
-_IPV4_RE = re.compile(r'\.\d+$')
-_PATH_SEP = re.compile(r'/+')
-
-def _parse_http_header(header):
-    def split_field(field):
-        k, v = field.split(':', 1)
-        return (k, v.strip())
-
-    lines = header.strip().split('\r\n')
-    if len(lines) > 0:
-        start_line = lines[0]
-    else:
-        start_line = None
-        
-    return (start_line, dict(map(split_field, lines[1:])))
-
-def _eff_host(host):
-    if host.find('.') == -1 and not _IPV4_RE.search(host):
-        return host + '.local'
-    return host
-
-def _cookie_for_path(cookie, path):
-    if not cookie.path or path == '' or path == '/':
-        return True
-    path = _PATH_SEP.split(path)[1:]
-    cookie_path = _PATH_SEP.split(cookie.path)[1:]
-    for p1, p2 in map(lambda *ps: ps, path, cookie_path):
-        if p1 == None:
-            return True
-        elif p1 != p2:
-            return False
-
-    return True
-
-def _cookie_for_domain(cookie, domain):
-    if not cookie.domain:
-        return True
-    elif cookie.domain[0] == '.':
-        return domain.endswith(cookie.domain)
-    else:
-        return cookie.domain == domain
-
-def _utf8(s):
-    return s.encode('utf-8')

+ 46 - 26
mt/httpd.py

@@ -2,10 +2,18 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-import threading, urlparse
-from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
-from SocketServer import ThreadingMixIn
-from mtworker import mtworkerThread
+import sys
+if sys.version_info[0] < 3:
+	import threading, urlparse
+	from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+	from SocketServer import ThreadingMixIn
+	from mtworker import mtworkerThread
+else:
+	import threading
+	import urllib.parse as urlparse
+	from http.server import BaseHTTPRequestHandler, HTTPServer
+	from socketserver import ThreadingMixIn
+	from mtworker import mtworkerThread
 
 
 
@@ -44,29 +52,33 @@ class HTTPRequestHandler(BaseHTTPRequestHandler) :
 		#check for valid worker id
 		url   = urlparse.urlparse(self.path)
 		query = urlparse.parse_qs(url[4])
-		if query == '' or 'wid' not in query : 
+		if query == '' or 'wid' not in query :
 			return self._respond(400, 'missing worker id')
 
 		wid = query['wid'][0]
-		if wid not in mtw2msgQueue : 
+		if wid not in mtw2msgQueue :
 			return self._respond(400, 'invalid worker id :: '+wid)
 
 		#retrieve reqdata if any
 		reqData = None
 		if (self.command == 'PUT' or self.command == 'POST') :
-			dl = int(self.headers.getheader('Content-Length') or 0)
+			if sys.version_info < (3, 0):
+				header = self.headers.getheader('Content-Length')
+			else:
+				header = self.headers.get('Content-Length')
+			dl = int(header or 0)
 			if dl > 0 :
 				reqData = self.rfile.read(dl)
 
 		#setup lock and response objects + forward request to worker
-		self.lock 		 = threading.Condition()	
- 		self._response = {}		 
-  		self._postMessage(
-					wid,
-  					{'method':self.command,
- 					 'uri':self.path,
-  					 'reqData':reqData,
-  					 'resp':self})
+		self.lock 		 = threading.Condition()
+		self._response = {}
+		self._postMessage(
+			wid,
+			{'method':self.command,
+			 'uri':self.path,
+			 'reqData':reqData,
+			 'resp':self})
 
 		#wait on worker's response (necessary completing the execution of a do_*()
 		#causes an empty response to be sent)
@@ -75,10 +87,10 @@ class HTTPRequestHandler(BaseHTTPRequestHandler) :
 			self.lock.wait()
 		self.lock.release()
 		self._respond(
-				self._response['statusCode'],
-				self._response['reason'],
-				self._response['data'],
-				self._response['headers'])
+			self._response['statusCode'],
+			self._response['reason'],
+			self._response['data'],
+			self._response['headers'])
 
 
 
@@ -88,7 +100,7 @@ class HTTPRequestHandler(BaseHTTPRequestHandler) :
 				worker is currently using msgQueue)
 			2. add msg to it
 			3. release lock and notify worker that a new msg is available '''
-	def _postMessage(self,wid,msg) : 
+	def _postMessage(self,wid,msg) :
 		mtw2lock[wid].acquire()
 		mtw2msgQueue[wid].append(msg)
 		mtw2lock[wid].notify()
@@ -108,17 +120,25 @@ class HTTPRequestHandler(BaseHTTPRequestHandler) :
 		if headers == '' :
 			self.send_header('Content-Type','text/plain')
 		else :
-			for h,i in headers.iteritems() :
+			for h,i in headers.items() :
 				self.send_header(h,i)
 		self.end_headers()
 
 		if round(statusCode/100.0) != 2 :
 			if reason != '' :
+				if sys.version_info < (3, 0):
+					reason = bytes(reason)
+				else:
+					reason = bytes(reason, 'utf8')
 				self.wfile.write(reason)
-		else : 
+		else :
 			if data != '' :
+				if sys.version_info < (3, 0):
+					data = bytes(data)
+				else:
+					data = bytes(data, 'utf8')
 				self.wfile.write(data)
-		
+
 
 
 	'''
@@ -127,9 +147,9 @@ class HTTPRequestHandler(BaseHTTPRequestHandler) :
 		self._response['statusCode'] = msg['statusCode']
 
 		for x in ('reason','data','headers') :
-			if x in msg : 
+			if x in msg :
 				self._response[x] = msg[x]
-			else : 
+			else :
 				self._response[x] = ''
 
 
@@ -146,7 +166,7 @@ class HTTPServerThread(threading.Thread) :
 		self.httpd = MultiThreadedHTTPServer(('127.0.0.1', 8125), HTTPRequestHandler)
 		self.httpd.serve_forever()
 		self.httpd.socket.close()
-	
+
 
 	def stop(self) :
 		self.httpd.shutdown()

+ 3 - 26
mt/main.py

@@ -2,41 +2,18 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-import asyncore, logging, threading
+import logging
 from httpd import HTTPServerThread
-from ws import WebSocket
 
 
 '''
-	init and launch http server and asyncore loop + set logging level for mt/*
-	
-	NOTE: omitting asyncore.loop()'s first parameter ('timeout' according to the
-  			API) causes initialization to be very long... it seems it represents
-			the delay during which an asyncore loop may remain idle before giving
-			control to someone
-
-	NOTE: python-websocket is built on an event-loop called asyncore... this loop
-  		  	is started via asyncore.loop()... unfortunately, if there isn't already
-			something in the loop when it's started, it just terminates... hence, 
-			to enable the creation of WebSockets (which requires a running asyncore
-			loop) in each future mtworker, we create a dummy WebSocket which serves
-			only to keep the asyncore loop alive while there are no other open
-			WebSockets '''
+	init and launch http server + set logging level for mt/*
+'''
 def main() :
 	logging.basicConfig(format='%(levelname)s - %(message)s', level=logging.INFO)
-	dummy_ws = WebSocket()	
 	httpd = HTTPServerThread()
 	httpd.start()
 
-	try :
-		asyncore.loop(1)
-	except KeyboardInterrupt :
-#		print(threading.enumerate())
-		httpd.stop()
-		dummy_ws.close()
-		pass
-
-
 if __name__ == "__main__" :
 	main()
 

+ 57 - 57
mt/mtworker.py

@@ -47,8 +47,8 @@ class mtworkerThread(threading.Thread) :
 	def __init__(self,mtw2msgQueue,mtw2lock) :
 		threading.Thread.__init__(self)
 		self.wid 				  = str(mtworkerThread.nextID)
-	  	mtworkerThread.nextID  += 1
-		self._msgQueue 		  = [] 	
+		mtworkerThread.nextID  += 1
+		self._msgQueue 		  = []
 		mtw2msgQueue[self.wid] = self._msgQueue
 		self._lock 				  = threading.Condition()
 		mtw2lock[self.wid]	  = self._lock
@@ -81,7 +81,7 @@ class mtworkerThread(threading.Thread) :
 				 thread sleeps while self._msgQueue is empty '''
 	def run(self):
 		while not self._stopped :
-  			self._lock.acquire()
+			self._lock.acquire()
 
 			if len(self._msgQueue) == 0 :
 				self._lock.wait()
@@ -101,68 +101,68 @@ class mtworkerThread(threading.Thread) :
 		TBI:: the use of '127.0.0.1' implies that the atompm server is running on
 	  			the same machine as the transformation engine... '''
 	def _aswHttpReq(self,method,uri,data) :
-		return utils.httpReq( 
-								method, 
-								'127.0.0.1:8124', 
-								uri+'?wid='+self._aswid, 
-								data)
+		return utils.httpReq(
+			method,
+			'127.0.0.1:8124',
+			uri+'?wid='+self._aswid,
+			data)
 
 
 
 	'''
-		handle an incoming message from the server '''	
+		handle an incoming message from the server '''
 	def _onmessage(self,msg):
 		if msg == 'DIE' :
-			return self.stop()		
+			return self.stop()
 
-		logging.debug(self.wid+' >> #'+str(id(msg['resp']))+' '+\
-							msg['method']+' '+msg['uri'])
+		logging.debug(self.wid+' >> #'+str(id(msg['resp']))+' '+ \
+					  msg['method']+' '+msg['uri'])
 
 		if msg['method'] == 'PUT' and re.match('/aswSubscription',msg['uri']) :
 			if self._ws != None :
 				self._postMessage(
-						msg['resp'],
-						{'statusCode':403,
-  						 'reason':'already subscribed to an asworker'})
-			else : 
+					msg['resp'],
+					{'statusCode':403,
+					 'reason':'already subscribed to an asworker'})
+			else :
 				self._aswid = str(json.loads(msg['reqData'])['aswid'])
 				self._ptcal = PyTCoreAbstractionLayer(
-						{'httpReq':self._aswHttpReq, 'wid':self._aswid}, self.wid)
+					{'httpReq':self._aswHttpReq, 'wid':self._aswid}, self.wid)
 				try :
 					self._ws = WebSocket(self._ptcal)
-				except Exception, e :
+				except Exception as e :
 					self._postMessage(
 						msg['resp'],
 						{'statusCode':500,
-  						 'reason':str(e)})
+						 'reason':str(e)})
 
 				self._ws.subscribe(self._aswid)
 				def respond(resp) :
 					if self._ws.subscribed == False :
 						self._ws.close()
 						self._postMessage(
-									resp,
-									{'statusCode':500,
-									 'reason':'subscription to asworker failed'})
+							resp,
+							{'statusCode':500,
+							 'reason':'subscription to asworker failed'})
 					elif self._ws.subscribed == True :
 						self._postMessage(resp,{'statusCode':200})
 					else :
 						t = threading.Timer(0.5,respond,[resp])
 						t.start()
-				respond(msg['resp'])	
+				respond(msg['resp'])
 
 		elif msg['method'] == 'PUT' and re.match('/envvars',msg['uri']) :
 			if self._ptcal.username != None :
 				self._postMessage(
-						msg['resp'],
-						{'statusCode':403,
-						 'reason':'already provided environment variables'})
+					msg['resp'],
+					{'statusCode':403,
+					 'reason':'already provided environment variables'})
 			else :
 				reqData = json.loads(msg['reqData'])
 				self._ptcal.username   = reqData['username']
 				self._ptcal.defaultDCL = reqData['defaultDCL']
 				self._postMessage(msg['resp'],{'statusCode':200})
-				
+
 		elif msg['method'] == 'PUT' and re.match('/current.model',msg['uri']) :
 			m   = json.loads(msg['reqData'])['m']
 			mms = json.loads(msg['reqData'])['mms']
@@ -172,50 +172,50 @@ class mtworkerThread(threading.Thread) :
 
 		elif msg['method'] == 'PUT' and re.match('/current.transform',msg['uri']):
 			try :
-				if not self._ptcal.isStopped() : 
+				if not self._ptcal.isStopped() :
 					self._postMessage(
-							msg['resp'],
-							{'statusCode':403,
-							 'reason':'not allowed to (re)load during '+\
-							 			 'ongoing transformation(s)'})
+						msg['resp'],
+						{'statusCode':403,
+						 'reason':'not allowed to (re)load during '+ \
+								  'ongoing transformation(s)'})
 				else :
 					transfs = json.loads(msg['reqData'])['transfs']
 					transfs.reverse()
 					self._ptcal.loadTransforms(transfs)
 					self._postMessage(msg['resp'],{'statusCode':200})
-			except Exception, e :
+			except Exception as e :
 				self._postMessage(
-						msg['resp'],
-						{'statusCode':500,
-  						 'reason':"Error in model transformation worker: " + str(e)})
-				
+					msg['resp'],
+					{'statusCode':500,
+					 'reason':"Error in model transformation worker: " + str(e)})
+
 		elif msg['method'] == 'PUT' and re.match('/query.transform',msg['uri']):
 			try :
 				self._ptcal.processQuery(json.loads(msg['reqData']))
 				self._postMessage(msg['resp'],{'statusCode':200})
-			except Exception, e :
+			except Exception as e :
 				self._postMessage(
-						msg['resp'],
-						{'statusCode':500,
-  						 'reason':'There\'s something wrong with the query: '+str(e)})
+					msg['resp'],
+					{'statusCode':500,
+					 'reason':'There\'s something wrong with the query: '+str(e)})
 
 		elif msg['method'] == 'PUT' and re.match('^/execmode',msg['uri']) :
 			legalModes = ['play','stop','pause','step']
 			mode = json.loads(msg['reqData'])['mode']
 			if mode in legalModes :
-				if self._ptcal.isStopping() : 
+				if self._ptcal.isStopping() :
 					self._postMessage(
-							msg['resp'],
-							{'statusCode':503,
-							 'reason':'currently processing a STOP request'})
-				else : 
+						msg['resp'],
+						{'statusCode':503,
+						 'reason':'currently processing a STOP request'})
+				else :
 					self._postMessage(msg['resp'],{'statusCode':200})
 					getattr(self._ptcal,mode.lower())()
-			else :	
+			else :
 				self._postMessage(
-						msg['resp'],
-						{'statusCode':400,
-						 'reason':'invalid execution command :: '+mode})
+					msg['resp'],
+					{'statusCode':400,
+					 'reason':'invalid execution command :: '+mode})
 
 		elif msg['method'] == 'POST' and re.match('^/toggledebug',msg['uri']) :
 			self._ptcal.toggleDebugMode()
@@ -230,36 +230,36 @@ class mtworkerThread(threading.Thread) :
 			#self._ptcal.toggleDebugMode()
 			self._ptcal.analyzePN();
 			self._postMessage(msg['resp'],{'statusCode':204})
-		
+
 		#flat reachability analysis
 		elif msg['method'] == 'POST' and re.match('^/PNFull',msg['uri']) :
 			f = json.loads(msg['reqData'])['fname']
 			#self._ptcal.toggleDebugMode()
 			self._ptcal.PNFull(fname=f);
 			self._postMessage(msg['resp'],{'statusCode':204})
-			
+
 		elif msg['method'] == 'POST' and re.match('^/dotPN',msg['uri']) :
 			#self._ptcal.toggleDebugMode()
 			f = json.loads(msg['reqData'])['fname']
 			self._ptcal.PNFull(fname=f,dot=True);
 			self._postMessage(msg['resp'],{'statusCode':204})
-		
+
 		elif msg['method'] == 'POST' and re.match('^/bdapiresp',msg['uri']) :
 			resp = json.loads(msg['reqData'])
 			self._ptcal._queueBDAPI(resp)
 			self._postMessage(msg['resp'],{'statusCode':204})
-			
+
 		else :
 			self._postMessage(msg['resp'],{'statusCode':501})
 
 
 
 	'''
-		post response back to server '''			
+		post response back to server '''
 	def _postMessage(self,resp,msg) :
-		logging.debug(self.wid+' << #'+str(id(resp))+' '+str(msg))		
+		logging.debug(self.wid+' << #'+str(id(resp))+' '+str(msg))
 		resp.lock.acquire()
-		resp.setResponse(msg)		
+		resp.setResponse(msg)
 		resp.lock.notify()
 		resp.lock.release()
 

+ 28 - 28
mt/ptcal/barrier.py

@@ -7,31 +7,31 @@ from threading import *
 #Barrier implementation from here
 #From http://greenteapress.com/semaphores/index.html
 class barrier:
-   def __init__(self, n):
-     self.n = n
-     self.count = 0
-     self.mutex = Semaphore(1)
-     self.turnstile = Semaphore(0)
-     self.turnstile2 = Semaphore(0)
-  
-   def phase1(self):
-     self.mutex.acquire()
-     self.count += 1
-     if self.count == self.n:
-       for i in range(self.n):
-         self.turnstile.release()
-     self.mutex.release()
-     self.turnstile.acquire()
-  
-   def phase2(self):
-     self.mutex.acquire()
-     self.count -= 1
-     if self.count == 0:
-       for i in range(self.n):
-         self.turnstile2.release()
-     self.mutex.release()
-     self.turnstile2.acquire()
-  
-   def wait(self):
-     self.phase1()
-     self.phase2()
+    def __init__(self, n):
+        self.n = n
+        self.count = 0
+        self.mutex = Semaphore(1)
+        self.turnstile = Semaphore(0)
+        self.turnstile2 = Semaphore(0)
+
+    def phase1(self):
+        self.mutex.acquire()
+        self.count += 1
+        if self.count == self.n:
+            for i in range(self.n):
+                self.turnstile.release()
+        self.mutex.release()
+        self.turnstile.acquire()
+
+    def phase2(self):
+        self.mutex.acquire()
+        self.count -= 1
+        if self.count == 0:
+            for i in range(self.n):
+                self.turnstile2.release()
+        self.mutex.release()
+        self.turnstile2.acquire()
+
+    def wait(self):
+        self.phase1()
+        self.phase2()

+ 68 - 76
mt/ptcal/compiler.py

@@ -3,13 +3,13 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 import re, json, uuid, itertools
-from dcal import DesignerCodeAbstractionLayer
-from utils import Utilities as utils
-from tconstants import TConstants as TC
-from pytcore.core.himesis import Himesis, HConstants as HC
-from pytcore.core.himesis import HimesisPreConditionPatternLHS
-from pytcore.core.himesis import HimesisPreConditionPatternNAC
-from pytcore.core.himesis import HimesisPostConditionPattern
+from .dcal import DesignerCodeAbstractionLayer
+from .utils import Utilities as utils
+from .tconstants import TConstants as TC
+from .pytcore.core.himesis import Himesis, HConstants as HC
+from .pytcore.core.himesis import HimesisPreConditionPatternLHS
+from .pytcore.core.himesis import HimesisPreConditionPatternNAC
+from .pytcore.core.himesis import HimesisPostConditionPattern
 
 
 ''' 
@@ -37,7 +37,7 @@ class ModelAndRuleCompiler :
         self._username         = username
         self._aswid                 = aswid
         self._defaultDCL        = defaultDCL
-        self._subtypes         = {}        
+        self._subtypes         = {}
         self._connectorTypes    = set()
         self._knownMMs            = set()
         self._loadedMMs        = set()
@@ -48,13 +48,13 @@ class ModelAndRuleCompiler :
 
         self.RC__looseSubtypingMM = None
 
-    
+
     '''
         add a node to himesis graph 'hg' that reflects the contents of asworker
         node 'n' with id 'id' '''
     def addNode(self,hg,n,id) :
         newNodeIndex = \
-                 hg.add_node(n['$type'],n['$type'] in self._connectorTypes)
+            hg.add_node(n['$type'],n['$type'] in self._connectorTypes)
         hg.vs[newNodeIndex]['$atompmId'] = str(id)
         for attr in n :
             attr = str(attr)
@@ -66,8 +66,8 @@ class ModelAndRuleCompiler :
                 hg.vs[newNodeIndex][attr] = n[attr]['value']
         return newNodeIndex
 
-    
-    
+
+
 
     ''' 
         compile an atompm json model into a pytcore himesis graph 
@@ -101,18 +101,18 @@ class ModelAndRuleCompiler :
             for mm in m['metamodels'] :
                 if mm not in self._knownMMs :
                     mmData = utils.fread(
-                                            '/users/%s/%s.metamodel'%(self._username,mm))
+                        '/users/%s/%s.metamodel'%(self._username,mm))
                     self.parseMetamodel(mm,mmData)
-    
+
         hg = himesisBaseClass(name)
         hg[HC.METAMODELS] = set(m['metamodels'])
         hg[HC.MISSING_METAMODELS] = \
             lambda : hg[HC.METAMODELS] - self._loadedMMs
         hg[HC.GUID] = uuid.uuid4()
-        
+
         if himesisBaseClass == Himesis :
             self._loadedMMs = hg[HC.METAMODELS].copy()
-    
+
         if len(m['nodes']) == 0 :
             return hg
 
@@ -122,8 +122,8 @@ class ModelAndRuleCompiler :
 
         for e in m['edges'] :
             hg.add_edges(
-                    [(atompmIds2himesisIndices[str(e['src'])],
-                      atompmIds2himesisIndices[str(e['dest'])])])        
+                [(atompmIds2himesisIndices[str(e['src'])],
+                  atompmIds2himesisIndices[str(e['dest'])])])
 
         return hg
 
@@ -140,7 +140,7 @@ class ModelAndRuleCompiler :
         if fname in self._compiledRules :
             return self._compiledRules[fname]
         elif r == None :
-             r = utils.fread('/users/%s/%s'%(self._username,fname))
+            r = utils.fread('/users/%s/%s'%(self._username,fname))
 
 
         '''
@@ -174,7 +174,7 @@ class ModelAndRuleCompiler :
                 return res
 
             hgs = []
-            for p,pcm in p2pcm.iteritems() :
+            for p,pcm in p2pcm.items() :
                 mms = []
                 for mm in pcm['metamodels'] :
                     if re.search('.pattern$',mm):
@@ -182,7 +182,7 @@ class ModelAndRuleCompiler :
                     elif re.search('.ramified$',mm):
                         mms.append(mm[:-len('.ramified')])
                 pcm['metamodels'] = mms
-                    
+
                 for id in pcm['nodes'] :
                     n = pcm['nodes'][id]
                     matches = re.match('(.*)\.pattern/__p(.*)',n['$type']) or re.match('(.*)\.ramified/__p(.*)',n['$type'])
@@ -204,9 +204,9 @@ class ModelAndRuleCompiler :
                     del n['__pMatchSubtypes']
 
                 hg = self.compileModel(
-                                pcm,
-                                name=fname+'_'+patternType,
-                                himesisBaseClass=himesisBaseClass)
+                    pcm,
+                    name=fname+'_'+patternType,
+                    himesisBaseClass=himesisBaseClass)
 
                 if patternType == 'LHS' or patternType == 'NAC' :
                     wrapAttributeDesignerCode(hg,'attrCondition')
@@ -223,13 +223,13 @@ class ModelAndRuleCompiler :
                     def wrapImportedModelAttribute(val) :
                         return lambda arg1,arg2 : val
                     for v in hg.vs :
-                        for attr,val in v.attributes().iteritems() :
+                        for attr,val in v.attributes().items() :
                             if Himesis.is_RAM_attribute(attr) and val != None :
                                 v[attr] = wrapImportedModelAttribute(val)
                     hg[HC.MT_ACTION] = ''
-                    wrapPatternActionDesignerCode(hg)    
+                    wrapPatternActionDesignerCode(hg)
 
-                hgs.append(hg)            
+                hgs.append(hg)
 
             return hgs
 
@@ -253,7 +253,7 @@ class ModelAndRuleCompiler :
                 if re.search('/RHSImport$',r['nodes'][id]['$type']) :
                     pc[id] = utils.fread(
                         '/users/%s/%s'%(self._username,
-                                             r['nodes'][id]['filename']['value']))
+                                        r['nodes'][id]['filename']['value']))
 
                     for nid in pc[id]['nodes'] :
                         n = pc[id]['nodes'][nid]
@@ -262,9 +262,9 @@ class ModelAndRuleCompiler :
                         matches = re.match('(.*)/(.*)',n['$type'])
                         n['$type'] = matches.group(1)+'.pattern/__p'+matches.group(2)
                     pc[id]['metamodels'] = \
-                        map(lambda mm : mm+'.pattern', pc[id]['metamodels'])
-                    return pc        
-                
+                        [mm + '.pattern' for mm in pc[id]['metamodels']]
+                    return pc
+
 
         ''' 
             return a dict of the form {...,id:contents,...} where 'id' describes a
@@ -290,8 +290,7 @@ class ModelAndRuleCompiler :
                 return getImportedModelAsPatternContents()
 
             def outNeighbors(source) :
-                return map(lambda x: str(x['dest']),
-                              filter(lambda y : y['src'] == source, r['edges']))
+                return [str(x['dest']) for x in [y for y in r['edges'] if y['src'] == source]]
 
             def getConnectedNodes(container,contents) :
                 _contents = set()
@@ -301,10 +300,10 @@ class ModelAndRuleCompiler :
 
                 if len(_contents) == 0 :
                     return contents
-                
+
                 contents = contents | _contents
                 return set(utils.flatten(
-                            map(lambda x: getConnectedNodes(x,contents),_contents)))
+                    [getConnectedNodes(x, contents) for x in _contents]))
 
             pc = {}
             for id in r['nodes'] :
@@ -315,10 +314,7 @@ class ModelAndRuleCompiler :
                 return {}
 
             for p in pc :
-                pc[p] = filter(
-                    lambda x: \
-                        r['nodes'][x]['$type'] != TC.RULEMM+'/PatternContents', \
-                    getConnectedNodes(p,set()))
+                pc[p] = pc[p] = [x for x in getConnectedNodes(p,set()) if r['nodes'][x]['$type'] != TC.RULEMM+'/PatternContents']
 
                 m = {'nodes':{},'edges':[],'metamodels':[]}
                 mms = []
@@ -327,11 +323,10 @@ class ModelAndRuleCompiler :
                     mms.append( utils.getMetamodel(r['nodes'][id]['$type']) )
                 m['metamodels'] = list(set(mms))
                 m['edges'] = \
-                    filter(
-                        lambda e : e['src'] in m['nodes'], r['edges'])
+                    [e for e in r['edges'] if e['src'] in m['nodes']]
                 pc[p] = m
-                
-            return pc         
+
+            return pc
 
 
         ''' 
@@ -339,17 +334,14 @@ class ModelAndRuleCompiler :
             patternContentsModel} have empty or duplicate __pLabels... return    error
               if any '''
         def validateLabels(p2pcm) :
-            for p,pcm in p2pcm.iteritems() :
-                for id in pcm['nodes'] : 
+            for p,pcm in p2pcm.items() :
+                for id in pcm['nodes'] :
                     if '__pLabel' not in pcm['nodes'][id] :
                         return {'$err':'missing __pLabel attribute'}
                     l = pcm['nodes'][id]['__pLabel']['value']
                     if l == '' :
                         return {'$err':'empty __pLabel'}
-                    elif len(
-                            filter(
-                                lambda x: pcm['nodes'][x]['__pLabel']['value'] == l, 
-                                pcm['nodes']) ) > 1 :
+                    elif len([x for x in pcm['nodes'] if pcm['nodes'][x]['__pLabel']['value'] == l]) > 1:
                         return {'$err':'duplicate __pLabel :: '+l}
             return {}
 
@@ -388,16 +380,16 @@ class ModelAndRuleCompiler :
                             attr)
                         return self._dcal.eval(code)
                     except Exception as e :
-                     	if '$err' in ex :
-                        	raise RuntimeError(ex['$err'])
+                        if '$err' in ex :
+                            raise RuntimeError(ex['$err'])
                         else :
-                            raise RuntimeError(\
-                                        'unexpected error encountered while evaluating '+
-                                        type+' :: '+str(e))
+                            raise RuntimeError( \
+                                'unexpected error encountered while evaluating '+
+                                type+' :: '+str(e))
                 return evalAttrCode
 
             for v in hg.vs :
-                for attr,code in v.attributes().iteritems() :
+                for attr,code in v.attributes().items() :
                     if Himesis.is_RAM_attribute(attr) and code != None :
                         v[attr] = wrap(code,v[HC.MT_LABEL],attr)
 
@@ -427,9 +419,9 @@ class ModelAndRuleCompiler :
                         if '$err' in ex :
                             raise RuntimeError(ex['$err'])
                         else :
-                            raise RuntimeError(\
-                                        'unexpected error encountered while evaluating '+
-                                        'pattern action code :: '+str(e))
+                            raise RuntimeError( \
+                                'unexpected error encountered while evaluating '+
+                                'pattern action code :: '+str(e))
                 return evalPatternCode
 
             hg[HC.MT_ACTION] = wrap(hg[HC.MT_ACTION])
@@ -457,9 +449,9 @@ class ModelAndRuleCompiler :
                         if '$err' in ex :
                             raise RuntimeError(ex['$err'])
                         else :
-                            raise RuntimeError(\
-                                        'unexpected error encountered while evaluating '+
-                                        'pattern condition code :: '+str(e))
+                            raise RuntimeError( \
+                                'unexpected error encountered while evaluating '+
+                                'pattern condition code :: '+str(e))
                 return evalPatternCode
 
             hg[HC.MT_CONSTRAINT] = wrap(hg[HC.MT_CONSTRAINT])
@@ -475,7 +467,7 @@ class ModelAndRuleCompiler :
             raise ValueError(fname+' NAC compilation failed on :: '+nacs['$err'])
 
         rhs  = compilePattern('RHS',HimesisPostConditionPattern) or \
-                 compilePattern('RHSImport',HimesisPostConditionPattern)
+               compilePattern('RHSImport',HimesisPostConditionPattern)
         if rhs.__class__ == {}.__class__ :
             raise ValueError(fname+' RHS compilation failed on :: '+rhs['$err'])
 
@@ -483,11 +475,11 @@ class ModelAndRuleCompiler :
         for nac in nacs :
             nac.LHS = lhs[0]
             nac.bridge = nac.compute_bridge()
-            
+
         #lhs[0].NACs = nacs
-        
+
         lhs[0].addNACs(nacs)
-        
+
         ''' hergin :: motif-integration start '''
         ''' check condition for RHS for query rule '''
         if len(rhs)>0:
@@ -496,20 +488,20 @@ class ModelAndRuleCompiler :
                 rhs[0].pre_labels = lhs[0].vs[HC.MT_LABEL]
             else :
                 rhs[0].pre_labels = []
-        
+
             self._compiledRules[fname] = {'lhs':lhs[0],'rhs':rhs[0]}
         else:
             self._compiledRules[fname] = {'lhs':lhs[0]}
         ''' hergin :: motif-integration end '''
         return self._compiledRules[fname]
-    
+
 
 
     '''
         remember the types stored in the 'connectorTypes' property of the passed
           metamodel '''
     def _computeConnectorTypes(self,mm,mmData) :
-        for ct in mmData['connectorTypes'].keys() :
+        for ct in list(mmData['connectorTypes'].keys()) :
             self._connectorTypes.add(mm+'/'+ct)
 
 
@@ -549,30 +541,30 @@ class ModelAndRuleCompiler :
                         >> rule can now match entities from MyDSL '''
     def _computeSubtypes(self,mm,mmData) :
         t2pt     = mmData['types2parentTypes']
-        types    = t2pt.keys()
-        parents  = set(itertools.chain.from_iterable(t2pt.values()))
-        children = filter(lambda t: t2pt[t] != [], types)
+        types = list(t2pt.keys())
+        parents = set(itertools.chain.from_iterable(list(t2pt.values())))
+        children = [t for t in types if t2pt[t] != []]
         for type in types :
             fulltype = mm+'/'+type
             if fulltype not in self._subtypes :
                 self._subtypes[fulltype] = []
             if type in parents :
                 for c in children :
-                    if type in t2pt[c] : 
+                    if type in t2pt[c] :
                         self._subtypes[fulltype].append(mm+'/'+c)
             if self.RC__looseSubtypingMM and \
-                self.RC__looseSubtypingMM+'/'+type in self._subtypes :
+                    self.RC__looseSubtypingMM+'/'+type in self._subtypes :
                 self._subtypes[fulltype].append(self.RC__looseSubtypingMM+'/'+type)
                 self._subtypes[fulltype].extend(
                     self._subtypes[self.RC__looseSubtypingMM+'/'+type])
 
-    
+
     '''
         forget all compiled rules '''
     def forgetCompiledRules(self) :
         self._compiledRules = {}
 
-    
+
 
     '''
           return a reference to self._mmTypeData '''
@@ -613,7 +605,7 @@ class ModelAndRuleCompiler :
         if loadMM :
             self._loadedMMs.add(mm)
 
-                
+
 
     '''
         remove a metamodel from the list of currently loaded (on the asworker) 

+ 60 - 60
mt/ptcal/dapi.py

@@ -3,8 +3,8 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 import copy, subprocess, traceback, os
-from pytcore.core.himesis import HConstants as HC
-from utils import Utilities as utils
+from .pytcore.core.himesis import HConstants as HC
+from .utils import Utilities as utils
 
 '''
     implements the DesignerAPI functions used in pattern/attribute constraints 
@@ -22,18 +22,18 @@ from utils import Utilities as utils
                 . 'session_*()' functions provide a clean and efficient way to 
                   remember information across rules
                 . the 'isConnectionType()' function '''
-class DesignerAPI :    
+class DesignerAPI :
     def __init__(self,username,aswid,mtwid) :
         self._username = username
         self._aswid        = aswid
         self._mtwid        = mtwid
-        
+
     def _aswPrintReq(self,msg):
-        utils.httpReq(    'PUT', 
-                        '127.0.0.1:8124', 
-                        '/GET/console?wid='+self._aswid, 
-                        {'text':msg})
-    
+        utils.httpReq(    'PUT',
+                          '127.0.0.1:8124',
+                          '/GET/console?wid='+self._aswid,
+                          {'text':msg})
+
     def _printToDevCon(self,msg):
         self._aswPrintReq(msg)
 
@@ -71,12 +71,12 @@ class DesignerAPI :
         self._attr       = attr
         self._journal     = journal
 
-        matched = self._pl2gi.values()
+        matched = list(self._pl2gi.values())
         for v in self._graph.vs :
             if v.index not in matched :
                 self._pl2gi['$atompmId:'+str(v['$atompmId'])] = v.index
-            
-            
+
+
     '''
         wrapper around raise()... to be properly reported to the client, 
         exceptions can not simply be raised... this would just cause them to be
@@ -94,20 +94,20 @@ class DesignerAPI :
     def _getAttr(self,attr=None,pLabel=None) :
         if pLabel == None :
             if self._type.startswith('pattern') :
-                self.__raise(\
+                self.__raise( \
                     'getAttr() requires a __pLabel in pattern conditions/actions')
             pLabel = self._pLabel
         elif not self._type.startswith('pattern') :
-            self.__raise(\
+            self.__raise( \
                 'getAttr() only accepts a __pLabel in pattern conditions/actions')
         elif pLabel not in self._pl2gi :
-            self.__raise(\
-                'invalid getAttr() __pLabel :: '+str(pLabel)+' (either no node '+\
+            self.__raise( \
+                'invalid getAttr() __pLabel :: '+str(pLabel)+' (either no node '+ \
                 'with that __pLabel exists, or none is matched yet)')
         if attr == None :
             if not self._type.startswith('attr') :
-                self.__raise(\
-                    'getAttr() can only be called without parameters'+\
+                self.__raise( \
+                    'getAttr() can only be called without parameters'+ \
                     'in attribute conditions/actions')
             attr = self._attr
 
@@ -115,20 +115,20 @@ class DesignerAPI :
         if attr not in n.attribute_names() :
             self.__raise('invalid getAttr() attribute :: '+str(attr))
         return copy.deepcopy(n[attr])
-    
-    
+
+
     def _getAttrNames(self,pLabel=None):
         if pLabel == None :
             if self._type.startswith('pattern') :
-                self.__raise(\
+                self.__raise( \
                     'getAttrNames() requires a __pLabel in pattern conditions/actions')
-            pLabel = self._pLabel        
+            pLabel = self._pLabel
         elif not self._type.startswith('pattern') :
-            self.__raise(\
+            self.__raise( \
                 'getAttrNames() only accepts a __pLabel in pattern conditions/actions')
         elif pLabel not in self._pl2gi :
-            self.__raise(\
-                'invalid getAttr() __pLabel :: '+str(pLabel)+' (either no node '+\
+            self.__raise( \
+                'invalid getAttr() __pLabel :: '+str(pLabel)+' (either no node '+ \
                 'with that __pLabel exists, or none is matched yet)')
 
         n = self._graph.vs[self._pl2gi[pLabel]]
@@ -138,18 +138,18 @@ class DesignerAPI :
     def _hasAttr(self,attr=None,pLabel=None) :
         if pLabel == None :
             if self._type.startswith('pattern') :
-                self.__raise(\
+                self.__raise( \
                     'hasAttr() requires a __pLabel in pattern conditions/actions')
             pLabel = self._pLabel
         elif not self._type.startswith('pattern') :
-            self.__raise(\
+            self.__raise( \
                 'hasAttr() only accepts a __pLabel in pattern conditions/actions')
         elif pLabel not in self._pl2gi :
-            self.__raise(\
-                'invalid hasAttr() __pLabel :: '+str(pLabel)+' (either no node '+\
+            self.__raise( \
+                'invalid hasAttr() __pLabel :: '+str(pLabel)+' (either no node '+ \
                 'with that __pLabel exists, or none is matched yet)')
         if attr == None :
-            self.__raise(\
+            self.__raise( \
                 'hasAttr() can not be called without an attribute parameter')
 
         n = self._graph.vs[self._pl2gi[pLabel]]
@@ -163,8 +163,8 @@ class DesignerAPI :
         elif pLabel == None :
             self.__raise('setAttr() requires a valid __pLabel')
         elif pLabel not in self._pl2gi :
-            self.__raise(\
-                'invalid setAttr() __pLabel :: '+str(pLabel)+' (either no node '+\
+            self.__raise( \
+                'invalid setAttr() __pLabel :: '+str(pLabel)+' (either no node '+ \
                 'with that __pLabel exists, or none is matched yet)')
 
         n = self._graph.vs[self._pl2gi[pLabel]]
@@ -174,18 +174,18 @@ class DesignerAPI :
         if oldVal != val :
             n[attr] = val
             self._journal.append(
-                                {'op':'CHATTR',
-                                   'guid':n[HC.GUID],
-                                 'attr':attr,
-                                 'old_val':oldVal,
-                                 'new_val':val})
+                {'op':'CHATTR',
+                 'guid':n[HC.GUID],
+                 'attr':attr,
+                 'old_val':oldVal,
+                 'new_val':val})
             n[HC.MT_DIRTY] = True
 
 
 
     def _getAllNodes(self,fulltypes=None) :
         if not self._type.startswith('pattern') :
-            self.__raise(\
+            self.__raise( \
                 'getAllNodes() can only be used in pattern conditions/actions')
         elif fulltypes != None and fulltypes.__class__ != [].__class__ :
             self.__raise('invalid getAllNodes() fulltypes array :: '+fulltypes)
@@ -203,27 +203,27 @@ class DesignerAPI :
 
     def _getNeighbors(self,dir,type,pLabel) :
         if not self._type.startswith('pattern') :
-            self.__raise(\
+            self.__raise( \
                 'getNeighbors() can only be used in pattern conditions/actions')
         elif pLabel == None :
             self.__raise('getNeighbors() requires a valid __pLabel')
         elif pLabel not in self._pl2gi :
-            self.__raise(\
-                'invalid getNeighbors() __pLabel :: '+str(pLabel)+' (no node '+\
+            self.__raise( \
+                'invalid getNeighbors() __pLabel :: '+str(pLabel)+' (no node '+ \
                 'with that __pLabel exists)')
 
         pLabels = set()
         if len(self._graph.es) > 0 :
-            gi2pl = dict((v, k) for (k, v) in self._pl2gi.items())
+            gi2pl = dict((v, k) for (k, v) in list(self._pl2gi.items()))
             idx   = self._pl2gi[pLabel]
             for e in self._graph.get_edgelist() :
                 if e[0] == idx and \
-                    (dir == '>' or dir == '*' or dir == "out") and \
-                    (type == '*' or self._graph.vs[e[1]][HC.FULLTYPE] == type) :
+                        (dir == '>' or dir == '*' or dir == "out") and \
+                        (type == '*' or self._graph.vs[e[1]][HC.FULLTYPE] == type) :
                     pLabels.add(gi2pl[e[1]])
                 elif e[1] == idx and \
-                      (dir == '<' or dir == '*' or dir == "in") and \
-                      (type == '*' or self._graph.vs[e[0]][HC.FULLTYPE] == type) :
+                        (dir == '<' or dir == '*' or dir == "in") and \
+                        (type == '*' or self._graph.vs[e[0]][HC.FULLTYPE] == type) :
                     pLabels.add(gi2pl[e[0]])
         return list(pLabels)
 
@@ -231,13 +231,13 @@ class DesignerAPI :
 
     def _isConnectionType(self,pLabel) :
         if not self._type.startswith('pattern') :
-            self.__raise(\
+            self.__raise( \
                 'isConnectionType() can only be used in pattern conditions/actions')
         elif pLabel == None :
             self.__raise('isConnectionType() requires a valid __pLabel')
         elif pLabel not in self._pl2gi :
-            self.__raise(\
-                'invalid isConnectionType() __pLabel :: '+str(pLabel)+' (no node '+\
+            self.__raise( \
+                'invalid isConnectionType() __pLabel :: '+str(pLabel)+' (no node '+ \
                 'with that __pLabel exists)')
 
         return self._graph.vs[self._pl2gi[pLabel]][HC.CONNECTOR_TYPE]
@@ -253,7 +253,7 @@ class DesignerAPI :
 
     def _session_put(self,key,val) :
         if not self._type.endswith('Action') :
-            self.__raise(\
+            self.__raise( \
                 'session_put() can only be used in attribute and pattern actions')
 
         self._graph.session[key] = val
@@ -262,24 +262,24 @@ class DesignerAPI :
 
     def _pauseTransformation(self):
         self._httpReq("PUT", '127.0.0.1:8125', '/execmode?wid='+self._mtwid, {'mode':'pause'})
-        
+
     def _stopTransformation(self):
         self._httpReq("PUT", '127.0.0.1:8125', '/execmode?wid='+self._mtwid, {'mode':'stop'})
-        
+
     def _resumeTransformation(self):
         self._httpReq("PUT", '127.0.0.1:8125', '/execmode?wid='+self._mtwid, {'mode':'play'})
 
     def _httpReq(self,method,host,uri,data) :
         if host == None :
             return utils.httpReq(
-                        method,
-                        '127.0.0.1:8124',
-                        uri+'?wid='+self._aswid,
-                        data)
-        else : 
+                method,
+                '127.0.0.1:8124',
+                uri+'?wid='+self._aswid,
+                data)
+        else :
             return utils.httpReq(method,host,uri,data)
 
-        
+
 
     def _print(self,str) :
         print(str)
@@ -289,7 +289,7 @@ class DesignerAPI :
     def _sys_call(self,args) :
         try :
             return subprocess.call(args)
-        except OSError as ex : 
+        except OSError as ex :
             self.__raise('system call crashed on :: '+ex.strerror)
 
 
@@ -297,7 +297,7 @@ class DesignerAPI :
     def _sys_mkdir(self,path) :
         try :
             return os.makedirs('./users/'+self._username+'/'+path)
-        except OSError as ex : 
+        except OSError as ex :
             if ex.errno != 17 :
                 #ignore directory already exists error
                 self.__raise('directory creation failed :: '+ex.strerror)

+ 10 - 7
mt/ptcal/dcal.py

@@ -3,11 +3,11 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 import sys
-from tconstants import TConstants as TC
-from dapi import DesignerAPI
+from .tconstants import TConstants as TC
+from .dapi import DesignerAPI
 
 try :
-	import spidermonkey 
+	import spidermonkey
 except ImportError as ex :
 	pass
 
@@ -36,7 +36,7 @@ class DesignerCodeAbstractionLayer :
 			elif lang == TC.JAVASCRIPT and 'spidermonkey' in sys.modules :
 				self._execContexts[lang] = JavaScriptExecutionContext(self._dAPI)
 			else :
-				assert False, 'unsupported designer code language :: '+str(lang)	
+				assert False, 'unsupported designer code language :: '+str(lang)
 		self._execContext = self._execContexts[lang]
 
 		self._dAPI.configure(graph,type,pl2gi,ex,pLabel,attr,journal)
@@ -72,7 +72,7 @@ class JavaScriptExecutionContext :
 	def __init__(self,dAPI) :
 		self._context = spidermonkey.Runtime().new_context()
 		self._context.bind_callable("getAttr",dAPI._getAttr)
-		self._context.bind_callable("hasAttr",dAPI._hasAttr)		
+		self._context.bind_callable("hasAttr",dAPI._hasAttr)
 		self._context.bind_callable("setAttr",dAPI._setAttr)
 		self._context.bind_callable("getAllNodes",dAPI._getAllNodes)
 		self._context.bind_callable("getNeighbors",dAPI._getNeighbors)
@@ -110,7 +110,7 @@ class PythonExecutionContext :
 			{'getAttr' 				: dAPI._getAttr,
 			 'hasAttr'				: dAPI._hasAttr,
 			 'getAttrNames'		    : dAPI._getAttrNames,
-		 	 'setAttr' 				: dAPI._setAttr,
+			 'setAttr' 				: dAPI._setAttr,
 			 'getAllNodes' 			: dAPI._getAllNodes,
 			 'getNodesFromLabels'	: dAPI._getNodesFromLabels,
 			 'getNeighbors' 		: dAPI._getNeighbors,
@@ -136,7 +136,10 @@ class PythonExecutionContext :
 		if 'result' in self._context :
 			del self._context['result']
 
-		exec(code) in self._context
+		if sys.version_info[0] < 3:
+			exec(code) in self._context
+		else:
+			exec((code), self._context)
 
 		if 'result' not in self._context :
 			return None

+ 112 - 112
mt/ptcal/motifcontext.py

@@ -13,25 +13,25 @@ import collections
 import datetime
 from time import *
 import timeit
-from tconstants import TConstants as TC
-from utils import Utilities as utils
-from tcontext import TransformationContext
-from pytcore.tcore.messages import Pivots
-from pytcore.rules.arule import ARule
-from pytcore.rules.query import Query
-from pytcore.rules.query import CQuery2
-from pytcore.rules.query import CQuery3
-from pytcore.rules.srule import SRule
-from pytcore.rules.crule import CRule
-from pytcore.rules.frule import FRule
-from pytcore.rules.brule import BRule
-from pytcore.rules.bsrule import BSRule
-from pytcore.rules.lrule import LRule
-from pytcore.rules.lfrule import LFRule
-from pytcore.rules.lsrule import LSRule
-from pytcore.rules.lqsrule import LQSRule
-from pytcore.rules.sequence import Sequence
-			
+from .tconstants import TConstants as TC
+from .utils import Utilities as utils
+from .tcontext import TransformationContext
+from .pytcore.tcore.messages import Pivots
+from .pytcore.rules.arule import ARule
+from .pytcore.rules.query import Query
+from .pytcore.rules.query import CQuery2
+from .pytcore.rules.query import CQuery3
+from .pytcore.rules.srule import SRule
+from .pytcore.rules.crule import CRule
+from .pytcore.rules.frule import FRule
+from .pytcore.rules.brule import BRule
+from .pytcore.rules.bsrule import BSRule
+from .pytcore.rules.lrule import LRule
+from .pytcore.rules.lfrule import LFRule
+from .pytcore.rules.lsrule import LSRule
+from .pytcore.rules.lqsrule import LQSRule
+from .pytcore.rules.sequence import Sequence
+
 '''
 	holds the execution context of a 'Transformation' construct 
 	t						
@@ -45,70 +45,70 @@ class MotifContext(TransformationContext) :
 		self.totalExecutionTime=0
 		self.sendAndApplyDeltaFunc = ptcal.sendAndApplyDelta
 		self.nextInput = "packetIn"
-		
+
 		self.compiler = ptcal._compiler
 		self.ptcal = ptcal
 		self.rules = {}
-		
+
 		self.startStateID = None
-		
+
 		for id in self.t['nodes']:
-			
+
 			rule = self.ruleIdentifier(self.t['nodes'], id)
-			
+
 			if rule == None:
-				
-				if self.t['nodes'][id]['$type'] == self.metamodel+"/LRule" or\
-						self.t['nodes'][id]['$type'] == self.metamodel+"/LSRule" or\
-						self.t['nodes'][id]['$type'] == self.metamodel+"/LFRule" or\
+
+				if self.t['nodes'][id]['$type'] == self.metamodel+"/LRule" or \
+						self.t['nodes'][id]['$type'] == self.metamodel+"/LSRule" or \
+						self.t['nodes'][id]['$type'] == self.metamodel+"/LFRule" or \
 						self.t['nodes'][id]['$type'] == self.metamodel+"/LQSRule":
-					
+
 					maxIterations = int(self.t['nodes'][id]['maxIterations']['value'])
 					nested = int(self.t['nodes'][id]['nested']['value'])
 					outerFirst = True if nested==0 else False
-					
-					def f(e) : 
+
+					def f(e) :
 						return e['src'] == id
-					lruleEdges = filter(f,self.t['edges'])
-					
+					lruleEdges = list(filter(f,self.t['edges']))
+
 					baseEdgeId=None
 					loopEdgeId=None
-				
+
 					for edge in lruleEdges:
 						if self.t['nodes'][edge['dest']]['$type']==self.metamodel+"/base":
 							baseEdgeId=edge['dest']
 						elif self.t['nodes'][edge['dest']]['$type']==self.metamodel+"/loop":
 							loopEdgeId=edge['dest']
-							
-					def f(e) : 
+
+					def f(e) :
 						return e['src'] == baseEdgeId
-					baseRuleIds = filter(f,self.t['edges'])
+					baseRuleIds = list(filter(f,self.t['edges']))
 					baseRuleID=baseRuleIds[0]['dest']
-					
+
 					compiledBaseRule = None
-					
-					if self.t['nodes'][id]['$type'] == self.metamodel+"/LRule" or\
+
+					if self.t['nodes'][id]['$type'] == self.metamodel+"/LRule" or \
 							self.t['nodes'][id]['$type'] == self.metamodel+"/LQSRule":
 						baseRuleName = self.t['nodes'][baseRuleID]['query']['value']
 						compiledBaseRule = self.compiler.compileRule(None,baseRuleName)
 					else:
 						baseRuleName = self.t['nodes'][baseRuleID]['rule']['value']
 						compiledBaseRule = self.compiler.compileRule(None,baseRuleName)
-						#baseRule = ARule(compiledBaseRule['lhs'],compiledBaseRule['rhs'],self.sendAndApplyDeltaFunc)
-					
-					def f(e) : 
+					#baseRule = ARule(compiledBaseRule['lhs'],compiledBaseRule['rhs'],self.sendAndApplyDeltaFunc)
+
+					def f(e) :
 						return e['src'] == loopEdgeId
-					loopRuleIds = filter(f,self.t['edges'])
+					loopRuleIds = list(filter(f,self.t['edges']))
 					loopRuleID=loopRuleIds[0]['dest']
-					
+
 					loopRuleType = self.t['nodes'][loopRuleID]['$type']
-					
+
 					if loopRuleType != self.metamodel+"/CRule":
 						loopRuleName = self.t['nodes'][loopRuleID]['rule']['value']
 						compiledLoopRule = self.compiler.compileRule(None,loopRuleName)
-					
+
 					loopRule = None
-					
+
 					if loopRuleType == self.metamodel+"/ARule":
 						loopRule = ARule(compiledLoopRule['lhs'],compiledLoopRule['rhs'],self.sendAndApplyDeltaFunc)
 					elif loopRuleType == self.metamodel+"/FRule":
@@ -120,54 +120,54 @@ class MotifContext(TransformationContext) :
 						self.ptcal._transfData[ruleName] = utils.fread('/users/%s/%s'%(self.ptcal.username,ruleName))
 						motifContext = MotifContext(ruleName,self.ptcal)
 						loopRule = CRule(motifContext)
-	
+
 					if self.t['nodes'][id]['$type'] == self.metamodel+"/LRule":
 						rule = LRule(compiledBaseRule['lhs'],loopRule,max_iterations=maxIterations)
-	
+
 					elif self.t['nodes'][id]['$type'] == self.metamodel+"/LQSRule":
 						rule = LQSRule(compiledBaseRule['lhs'],loopRule,max_iterations=maxIterations)
-	
+
 					elif self.t['nodes'][id]['$type'] == self.metamodel+"/LSRule":
 						rule = LSRule(compiledBaseRule['lhs'],compiledBaseRule['rhs'],loopRule,outer_first=outerFirst,sendAndApplyDeltaFunc=self.sendAndApplyDeltaFunc,max_iterations=maxIterations)
-	
+
 					elif self.t['nodes'][id]['$type'] == self.metamodel+"/LFRule":
 						rule = LFRule(compiledBaseRule['lhs'],compiledBaseRule['rhs'],loopRule,outer_first=outerFirst,sendAndApplyDeltaFunc=self.sendAndApplyDeltaFunc,max_iterations=maxIterations)
-						
-				elif self.t['nodes'][id]['$type'] == self.metamodel+"/BRule" or\
+
+				elif self.t['nodes'][id]['$type'] == self.metamodel+"/BRule" or \
 						self.t['nodes'][id]['$type'] == self.metamodel+"/BSRule":
-					
-					def f(e) : 
+
+					def f(e) :
 						return e['src'] == id
-					bruleEdges = filter(f,self.t['edges'])
-					
+					bruleEdges = list(filter(f,self.t['edges']))
+
 					branchRuleList=[]
-					
+
 					for edge in bruleEdges:
 						if self.t['nodes'][edge['dest']]['$type']==self.metamodel+"/branch":
 							branchID=edge['dest']
-							def f(e) : 
+							def f(e) :
 								return e['src'] == branchID
-							branchRuleID=filter(f,self.t['edges'])[0]['dest']
-							
+							branchRuleID=list(filter(f,self.t['edges'])[0]['dest'])
+
 							rule = self.ruleIdentifier(self.t['nodes'],branchRuleID)
-							
+
 							if rule == None and self.t['nodes'][branchRuleID]['$type']==self.metamodel+"/CRule":
 								ruleName = self.t['nodes'][branchRuleID]['ref']['value']
 								self.ptcal._transfData[ruleName] = utils.fread('/users/%s/%s'%(self.ptcal.username,ruleName))
 								motifContext = MotifContext(ruleName,self.ptcal)
 								rule = CRule(motifContext)
-								
+
 							branchRuleList.append(rule)
-					
+
 					if self.t['nodes'][id]['$type'] == self.metamodel+"/BRule":
 						rule = BRule(branchRuleList)
 					elif self.t['nodes'][id]['$type'] == self.metamodel+"/BSRule":
 						maxIterations = int(self.t['nodes'][id]['maxIterations']['value'])
 						rule = BSRule(branchRuleList,maxIterations)
-			
+
 				elif self.t['nodes'][id]['$type'] == self.metamodel+"/CRule":
 					rule = self.t['nodes'][id]['ref']['value']
-				
+
 				elif self.t['nodes'][id]['$type'] == self.metamodel+"/Sequence":
 					sequenceRuleList=[]
 					rulesFile = self.t['nodes'][id]['ref']['value']
@@ -180,43 +180,43 @@ class MotifContext(TransformationContext) :
 						else: # TODO decide for CRule
 							pass
 					rule = Sequence(sequenceRuleList)
-			
+
 				elif self.t['nodes'][id]['$type'] == self.metamodel+"/Start":
 					self.startStateID = id
 					rule = None
-			
+
 			if rule != None:
 				self.rules[id] = {'id':id,
-							'name':self.t['nodes'][id]['name']['value'],
-							'alias':self.t['nodes'][id]['alias']['value'],
-							'rule':rule}
-	
+								  'name':self.t['nodes'][id]['name']['value'],
+								  'alias':self.t['nodes'][id]['alias']['value'],
+								  'rule':rule}
+
 	def ruleIdentifier(self,ruleList,ruleId):
-		
+
 		rule = None
-		
+
 		if ruleList[ruleId]['$type']==self.metamodel+"/ARule":
 			ruleName = ruleList[ruleId]['rule']['value']
 			compiledRule = self.compiler.compileRule(None,ruleName)
 			rule = ARule(compiledRule['lhs'],compiledRule['rhs'],self.sendAndApplyDeltaFunc)
-			
+
 		elif ruleList[ruleId]['$type']==self.metamodel+"/QRule":
 			ruleName = ruleList[ruleId]['query']['value']
 			compiledRule = self.compiler.compileRule(None,ruleName)
 			rule = Query(compiledRule['lhs'])
-		
+
 		elif ruleList[ruleId]['$type']==self.metamodel+"/FRule":
 			maxIterations = ruleList[ruleId]['maxIterations']['value']
 			ruleName = ruleList[ruleId]['rule']['value']
 			compiledRule = self.compiler.compileRule(None,ruleName)
 			rule = FRule(compiledRule['lhs'],compiledRule['rhs'],int(maxIterations),self.sendAndApplyDeltaFunc)
-		
+
 		elif ruleList[ruleId]['$type']==self.metamodel+"/SRule":
 			maxIterations = ruleList[ruleId]['maxIterations']['value']
 			ruleName = ruleList[ruleId]['rule']['value']
 			compiledRule = self.compiler.compileRule(None,ruleName)
 			rule = SRule(compiledRule['lhs'],compiledRule['rhs'],int(maxIterations),self.sendAndApplyDeltaFunc)
-		
+
 		elif ruleList[ruleId]['$type'] == self.metamodel+"/CQRule2":
 			ruleName = ruleList[ruleId]['query']['value']
 			innerRuleName = ruleList[ruleId]['innerQuery']['value']
@@ -224,7 +224,7 @@ class MotifContext(TransformationContext) :
 			compiledInnerRule = self.compiler.compileRule(None,innerRuleName)
 			innerQuery = Query(compiledInnerRule['lhs'])
 			rule = CQuery2(compiledRule['lhs'],innerQuery)
-		
+
 		elif ruleList[ruleId]['$type'] == self.metamodel+"/CQRule3":
 			ruleName = ruleList[ruleId]['query']['value']
 			innerRuleName = ruleList[ruleId]['innerQuery']['value']
@@ -235,10 +235,10 @@ class MotifContext(TransformationContext) :
 			innerQuery = Query(compiledInnerRule['lhs'])
 			secondInnerQuery = Query(compiledSecondInnerRule['lhs'])
 			rule = CQuery3(compiledRule['lhs'],innerQuery,secondInnerQuery)
-		
+
 		return rule
-	
-		
+
+
 	def setLastStepExecTime(self,a):
 		self._lastStep['time'] = a
 		self.totalExecutionTime += a
@@ -248,7 +248,7 @@ class MotifContext(TransformationContext) :
 	def getCurrentStepId(self) :
 		if self._lastStep == {} :
 			assert False, \
-				 "this function shouldn't be called when there is no current step"
+				"this function shouldn't be called when there is no current step"
 		else :
 			return self._lastStep['id']
 
@@ -268,34 +268,34 @@ class MotifContext(TransformationContext) :
 		
 	'''
 	def _getInitialStep(self) :
-		
+
 		if self.startStateID==None:
 			raise RuntimeError('There is no start state in loaded MoTif instance!')
-		
-		def f(e) : 
+
+		def f(e) :
 			return e['src'] == self.startStateID
-		startStateEdges = filter(f,self.t['edges'])
-		
+		startStateEdges = list(filter(f,self.t['edges']))
+
 		initialStepID=None
 		if len(startStateEdges) == 0 :
 			raise RuntimeError('Start state is not connected to any other state!')
 		else:
 			firstLinkID=startStateEdges[0]['dest']
-			def f(e) : 
+			def f(e) :
 				return e['src'] == firstLinkID
-			startStateEdges = filter(f,self.t['edges'])
+			startStateEdges = list(filter(f,self.t['edges']))
 			initialStepID=startStateEdges[0]['dest']
-			
+
 		if initialStepID in self.rules:
 			return self.rules[initialStepID]
 		else:
 			if self.t['nodes'][initialStepID]['$type']==self.metamodel+"/EndSuccess":
-						return {'trafoResult':TC.SUCCEEDED,
-										'feedbackReceived':'True'}
+				return {'trafoResult':TC.SUCCEEDED,
+						'feedbackReceived':'True'}
 			elif self.t['nodes'][initialStepID]['$type']==self.metamodel+"/EndFail":
-						return {'trafoResult':TC.FAILED,
-										'feedbackReceived':'True'}
-				
+				return {'trafoResult':TC.FAILED,
+						'feedbackReceived':'True'}
+
 
 
 	'''
@@ -339,16 +339,16 @@ class MotifContext(TransformationContext) :
 		else :
 			def f(e) :
 				return e['src'] == self._lastStep['id']
-			
-			edgesFromLastStep = filter(f,self.t['edges'])
-			
+
+			edgesFromLastStep = list(filter(f,self.t['edges']))
+
 			if len(edgesFromLastStep) == 0 :
 				ai = self._applicationInfo()
 				self._lastStep = {}
 				self._expired = True
 				return ai
 			else :
-				
+
 				targetLinkID=None
 				resString = None
 				if self._lastStep['applicationInfo'] == TC.SUCCEEDED :
@@ -360,27 +360,27 @@ class MotifContext(TransformationContext) :
 					if self.t['nodes'][edgeLS['dest']]['$type'] == resString:
 						targetLinkID=edgeLS['dest']
 						break
-				
-				def f(e) : 
+
+				def f(e) :
 					return e['src'] == targetLinkID
-				nodesAfterLastStep = filter(f,self.t['edges'])
-				
+				nodesAfterLastStep = list(filter(f,self.t['edges']))
+
 				nextStepID = nodesAfterLastStep[0]['dest']
-				
+
 				if nextStepID in self.rules:
 					self._lastStep = self.rules[nextStepID]
 				else:
 					if self.t['nodes'][nextStepID]['$type']==self.metamodel+"/EndSuccess":
 						self._lastStep = {'trafoResult':TC.SUCCEEDED,
-										'feedbackReceived':'True'}
+										  'feedbackReceived':'True'}
 					elif self.t['nodes'][nextStepID]['$type']==self.metamodel+"/EndFail":
 						self._lastStep = {'trafoResult':TC.FAILED,
-										'feedbackReceived':'True'}
-					
+										  'feedbackReceived':'True'}
+
 				#print clock()-timeNextStep
-				
+
 				return self._lastStep
-		
+
 
 	'''
 		set the application information of the last step '''
@@ -392,4 +392,4 @@ class MotifContext(TransformationContext) :
 
 	def isLastStepFeedbackReceived(self) :
 		return (not self._expired and self._lastStep=={}) or \
-				 'feedbackReceived' in self._lastStep
+			   'feedbackReceived' in self._lastStep

+ 77 - 69
mt/ptcal/petrinet.py

@@ -6,14 +6,22 @@ See COPYING.lesser and README.md in the root of this project for full details'''
 import igraph as ig
 import datetime
 #import pydot
+
 from threading import *
-from Queue import *
-from barrier import *
 from random import choice
-from sets import *
+from .barrier import *
+
+import sys
+
+if sys.version_info[0] < 3:
+  from Queue import *
+  from sets import *
+else:
+  from queue import *
+
 
 class  PnModule(Thread):
-  
+
   def __init__(self, pnet, que, bar, full=False):
     Thread.__init__(self)
     '''Our graph'''
@@ -33,10 +41,10 @@ class  PnModule(Thread):
     self.full = full
     self.result={}
     self.xxx = 0
-    
+
   def repstr(self,string, length):
     return (string * length)[0:length]
-    
+
   def printMatrix(self,M):
     lenr = len(M[0])
     lenc = len(M)
@@ -57,18 +65,18 @@ class  PnModule(Thread):
       text+='\n'
       j=0
       i+=1
-    print text
-      
+    print(text)
+
   def summary(self):
     ig.summary(self.reachability)
-  
+
   def getKey(self):
-    return self.key  
-    
+    return self.key
+
   def next(self):
     self.ssc += 1
     return self.ssc
-  
+
   def DminPlusMatrix(self):
     i = 0
     j = 0
@@ -91,67 +99,67 @@ class  PnModule(Thread):
           fromweight = int(self.pngraph.es[self.pngraph.get_eid(t.index,p.index)]['weight'])
           self.dplus[i][j] = fromweight
         #print t['name']
-#        if t.index in totrans:
-#          self.dminus[i][j] = 1
-#        elif t.index in fromtrans:
-#          self.dplus[i][j] = 1
-#        else:
-#          pass#row.append(0)
+        #        if t.index in totrans:
+        #          self.dminus[i][j] = 1
+        #        elif t.index in fromtrans:
+        #          self.dplus[i][j] = 1
+        #        else:
+        #          pass#row.append(0)
         self.mx_to_gr_indexTn[t['name']] = j
         self.mx_to_gr_indexT[j] = t.index
         j+=1
       i+=1
       j = 0
-#    self.printMatrix(self.dminus)
-#    self.printMatrix(self.incidence)
-#    self.printMatrix(self.dplus)
-  
+  #    self.printMatrix(self.dminus)
+  #    self.printMatrix(self.incidence)
+  #    self.printMatrix(self.dplus)
+
   #get Strongly connected components, used in modular analysis
   def getSCC(self,M):
     vindex = statePresent(M)
     if not vindex == -1:
       return self.reachability.vs[vindex]['SCC']
-    else: 
+    else:
       return -1
-    
+
   def getSCCvid(self,id):
     P = self.reachability.vs[int(id)]
     return self.reachability.vs[int(id)]['SCC']
 
-  
+
   def statePresent(self, M):
     for v in self.reachability.vs:
       if all(v['M'] == M):
         return v.index
     return -1
-  
+
   #get all enabled transitions for state exploration
   def enabledT(self,M):
     enabled=[]
     for j in range(self.numT):
-      good = True 
-      
+      good = True
+
       tcol = self.dminus[0:self.numP,j] #Ti column over P as rows
       for i in range(self.numP):
         if tcol[i] > 0:
           t = self.mx_to_gr_indexT[j]
           p = self.mx_to_gr_indexP[i]
           weight = int(self.pngraph.es[self.pngraph.get_eid(p,t)]['weight'])
-          if not M[i] >= weight : 
+          if not M[i] >= weight :
             good = False
             break
       if good:
-       # print 'Enabled trans %s'%self.pngraph.vs[self.mx_to_gr_indexT[j]]['name']
+        # print 'Enabled trans %s'%self.pngraph.vs[self.mx_to_gr_indexT[j]]['name']
         enabled.append(j)
     return enabled
-  
+
   #produce new marking
   def fire(self,j,M,fusion=False):
     if fusion:
       empty = []
       empty.append(j)
       return empty
-    else: 
+    else:
       i = 0
       marking = ''
       for value in M:
@@ -169,7 +177,7 @@ class  PnModule(Thread):
         i+=1
       #print 'New marking %s'%marking
       return Mnew
-  
+
   #create reachability graph
   def reachabilityG(self):
     work=[]
@@ -196,12 +204,12 @@ class  PnModule(Thread):
           self.reachability.add_edges([(fromID,idFound)])
           self.reachability.es[self.reachability.get_eid(fromID, idFound)]['T'] = self.pngraph.vs[self.mx_to_gr_indexT[i]]['name']
     self.barrier.wait() #several modules can run in parallel (comes from modular analysis) wait for all to finish.
-  
+
   #mark strong components of a graph
   def SC(self):
     if not 'SCC' in self.reachability.vs.attribute_names():
-          self.reachability.vs[0]['SCC'] = self.next()
-    
+      self.reachability.vs[0]['SCC'] = self.next()
+
     components = self.reachability.clusters(mode=ig.STRONG)
     for i in range(len(components)):
       ssc = self.next()
@@ -211,11 +219,11 @@ class  PnModule(Thread):
         if self.reachability.vs[s]['SCC']>=0:
           pass
         else:
-          self.reachability.vs[s]['SCC'] = ssc 
+          self.reachability.vs[s]['SCC'] = ssc
           changed = True
       if not changed:
         self.ssc -= 1
-  
+
   #Transition fusion sets, modular analysys
   def addToEnabledTF(self,TF,M):
     if not TF in self.TFtoRun:
@@ -223,16 +231,16 @@ class  PnModule(Thread):
     for m in self.TFtoRun[TF]:
       if all(m==M) :
         #print' duplicate'
-        return 
-  
+        return
+
     self.TFtoRun[TF].append(M)
-   # vattr = ''
+  # vattr = ''
   #  j =0 
   #  for id in M:
-    #  vattr += '%s-%s,'%(self.pngraph.vs[self.mx_to_gr_indexP[j]]['name'],int(id))
-   #   j+=1
-    #print 'adding %s to enabled Fusion transitions'%vattr
-    
+  #  vattr += '%s-%s,'%(self.pngraph.vs[self.mx_to_gr_indexP[j]]['name'],int(id))
+  #   j+=1
+  #print 'adding %s to enabled Fusion transitions'%vattr
+
   #for modular analysis
   def explore(self,id):
     work = []
@@ -256,7 +264,7 @@ class  PnModule(Thread):
   def reset(self):
     for p in self.reachability.vs:
       p['visited']= False
-      
+
   def reachabilityModular(self):
     work=[]
     sgwork=[]
@@ -271,7 +279,7 @@ class  PnModule(Thread):
         fromID = self.statePresent(M)
         enabledTs = self.enabledT(M)
         from_M = []
-        for i in enabledTs: 
+        for i in enabledTs:
           if self.pngraph.vs[self.mx_to_gr_indexT[i]]['fusion'] == True: #new
             self.addToEnabledTF(self.pngraph.vs[self.mx_to_gr_indexT[i]]['name'],M) #new
             continue #noted the shared transition continue exploring.
@@ -311,7 +319,7 @@ class  PnModule(Thread):
       for t in ts:
         self.result[t]=[]
         j = self.mx_to_gr_indexTn[t]
-       # Mse = self.reachability.es.select(T_eq=t)
+        # Mse = self.reachability.es.select(T_eq=t)
         from_M = []
         to_M = []
         for M in self.TFtoRun[t]: #loop over synchtransition edges
@@ -320,22 +328,22 @@ class  PnModule(Thread):
           if self.statePresent(Mnew) == -1:
             self.reachability.add_vertices(1) #add this new marking
             newID = self.reachability.vcount()-1 # to the reachability
-            self.reachability.vs[newID]['M'] = Mnew 
+            self.reachability.vs[newID]['M'] = Mnew
             work.append(Mnew)
           from_M.append(self.statePresent(M))
           to_M.append(self.statePresent(Mnew))
-          
+
         self.result[t].append(from_M)
         self.result[t].append(to_M)
       for ftlists in self.result:
         for from_to in self.result[ftlists]:
           for i in range(len(from_to)):
             from_to[i] = '%s-%d'%(self.key,from_to[i])
-            
-            
+
+
       for t in ts:
         del self.TFtoRun[t] #remove processed maybe not yet?
-  
+
   def rnode(self,id,state):
     vattr = ""
     j=0
@@ -347,14 +355,14 @@ class  PnModule(Thread):
       i -=1
       j+=1
     return "<node id=\"%s\"><marking>%s"%(id,vattr)+"</marking></node>\n"
-	
+
   #export reachability graph to xml
   def reachtoxml(self,fname='',key=''):
     header = "<rgraph>\n"
     end = "</rgraph>"
     for v in self.reachability.vs:
       header+=self.rnode(v.index,v['M'])
-    
+
     for e in self.reachability.es:
       header+="<edge source=\"%s\" target=\"%s\"><transition>%s</transition></edge>\n"%(e.source,e.target,e['T'])
     dateTag = datetime.datetime.now().strftime("%Y-%b-%d_%H-%M-%S")
@@ -362,7 +370,7 @@ class  PnModule(Thread):
     header+=end
     f.write(header)
     f.close()
-	  
+
   def graph(self,key,fname='',id=None):
     vattr=''
     eattr = ''
@@ -377,10 +385,10 @@ class  PnModule(Thread):
         vattr +='SCC-%s\n'%v['SCC']
       for value in v['M']:
         if leng == 1:
-          vattr = 'fstate%d'%choice(range(100))
+          vattr = 'fstate%d'%choice(list(range(100)))
         else:
           if int(value) > 0:
-            
+
             vattr += '%s-%s '%(self.pngraph.vs[self.mx_to_gr_indexP[j]]['name'],int(value))
           if not i-1 == 0:
             pass#vattr+=','
@@ -395,10 +403,10 @@ class  PnModule(Thread):
       graph.add_edge(pydot.Edge(nodes[e.source],nodes[e.target],label=e['T']))
     #graph.write_svg('graphs/STATE%s%d%s.svg'%(self.key,choice(range(100)),dateTag))
     if not fname:
-      graph.write_svg('graphs/STATE%s%d%s.svg'%(self.key,choice(range(100)),dateTag))
+      graph.write_svg('graphs/STATE%s%d%s.svg'%(self.key,choice(list(range(100))),dateTag))
     else:
       graph.write_svg('%s.%s.reachability.svg'%(fname,key))
-  
+
   #the thread per module
   def run(self):
     self.reachability = ig.Graph(0,directed=True)
@@ -420,13 +428,13 @@ class  PnModule(Thread):
     self.printMatrix(self.incidence)
     if self.full:
       self.reachabilityG()
-    else:  
+    else:
       self.reachabilityModular()
-  
+
   def reachable(self, state):
     P = zeros(self.numP)
     for ps in state:
-      for key,value in ps.items():
+      for key,value in list(ps.items()):
         index = self.mx_to_gr_indexPn[key]
         P[index] = value
     id = self.statePresent(P)
@@ -434,17 +442,17 @@ class  PnModule(Thread):
       return True
     else:
       return False
-  
+
   def reachableMod(self,state):
     P = zeros(self.numP)
     for ps in state:
-      for key,value in ps.items():
+      for key,value in list(ps.items()):
         index = self.mx_to_gr_indexPn[key]
         P[index] = value
     id = self.statePresent(P)
     components = self.markAncestors(id)
     return Set(components)
-    
+
   def markAncestors(self,id):
     work = []
     result = []
@@ -459,9 +467,9 @@ class  PnModule(Thread):
           work.append(i)
     #self.reset()
     return result
-      
-    
+
+
   def getEnabledTFs(self):
-    return self.TFtoRun.keys()
+    return list(self.TFtoRun.keys())
 
 

+ 237 - 230
mt/ptcal/ptcal.py

@@ -2,25 +2,36 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-import re, json, uuid, cPickle as pickle, threading, itertools, traceback, logging
+import re, json, uuid, threading, itertools, traceback, logging, sys
+
+if sys.version_info[0] < 3:
+    import cPickle as pickle
+    import StringIO as StringIO
+    from Queue import *
+    from _abcoll import Iterable
+else:
+    import pickle as pickle
+    import io as StringIO
+    from queue import *
+
 from random import Random
-from utils import Utilities as utils
-from tcontext import ModelTransformationContext, ExhaustContext
-from tconstants import TConstants as TC
-from compiler import ModelAndRuleCompiler
-from pytcore.core.himesis import HConstants as HC
-from pytcore.rules.ndarule import NDARule
-from pytcore.tcore.messages import Packet
-from accurate_time import time as clock
-from accurate_time import set_start_time
+from .utils import Utilities as utils
+from .tcontext import ModelTransformationContext, ExhaustContext
+from .tconstants import TConstants as TC
+from .compiler import ModelAndRuleCompiler
+from .pytcore.core.himesis import HConstants as HC
+from .pytcore.rules.ndarule import NDARule
+from .pytcore.tcore.messages import Packet
+from .accurate_time import time as clock
+from .accurate_time import set_start_time
 set_start_time()
 
-import cProfile, pstats, StringIO
+import cProfile, pstats
 
 ''' hergin :: motif-integration start '''
-from motifcontext import MotifContext
-from tcorecontext import TCoreContext
-from pytcore.tcore.messages import Pivots
+from .motifcontext import MotifContext
+from .tcorecontext import TCoreContext
+from .pytcore.tcore.messages import Pivots
 ''' hergin :: motif-integration end '''
 
 import igraph as ig
@@ -28,12 +39,12 @@ import igraph as ig
 import datetime
 from random import *
 from threading import *
-from Queue import *
-from barrier import *
-from synchgraph import *
+
+from .barrier import *
+from .synchgraph import *
 from itertools import *
 #from petrinet import *
-from _abcoll import Iterable
+
 from pprint import isreadable
 from math import *
 
@@ -76,7 +87,7 @@ class PyTCoreAbstractionLayer :
         self.packet                        = None
         self.globalDeltas                  = []
         self.incUpdates                     = True
-        self.sendDeltas                    = True 
+        self.sendDeltas                    = True
         ''' hergin :: motif-integration end '''
         self._mtContexts                   = []
         self._transfData                   = {}
@@ -112,19 +123,19 @@ class PyTCoreAbstractionLayer :
         self.loadedModel=False
         #Enactment, set of formalisms we load automatically to do the cleanups.
         self._loadedMMs = set()
-        
+
     def processQuery(self,query):
         result = ""
         query = query['query'].replace('"',"'")
         qValue = query.split('(')[0].strip()
-        
+
         if qValue == "getCount":
             typ = query.split("'")[1].strip()
             def f(e):
                 return e['$ft__'].endswith(typ)
             result = "Number of '" + typ +"' in the resulting graph: " + str(len(filter(f,self.packet.graph.vs)))
             # I tried to use self.packet.graph.vs.select(tokens_eq=3) but the attribute names starting with $ didnt let me
-        
+
         elif qValue == "toggleSendDelta":
             self.sendDeltas = not self.sendDeltas
             if self.sendDeltas:
@@ -160,9 +171,9 @@ class PyTCoreAbstractionLayer :
                 self._handleChangelogs()
                 return self._mtContexts[-1].isLastStepFeedbackReceived()
             utils.doWhen(
-                         condition,
-                         TC.WAIT_ON_CHLOG_DELAY,
-                         callback)
+                condition,
+                TC.WAIT_ON_CHLOG_DELAY,
+                callback)
         else:
             callback()
 
@@ -183,68 +194,68 @@ class PyTCoreAbstractionLayer :
         TBI: performance would benefit greatly from caches that map atompm ids to
               GUIDs '''
     def _handleChangelogs(self) :
-        
+
         '''
             handle a single changelog '''
         def _handleChangelog(changelog) :
             def eq(a,b) : return str(a) == str(b)
 
             for c in changelog :
-                if c['op'] == 'MKEDGE' : 
+                if c['op'] == 'MKEDGE' :
                     node1 = \
                         self._M.vs.select(lambda v : eq(v['$atompmId'],c['id1']))[0]
                     node2 = \
                         self._M.vs.select(lambda v : eq(v['$atompmId'],c['id2']))[0]
                     self._M.add_edges([(node1.index, node2.index)])
-                
-                elif c['op'] == 'RMEDGE' : 
+
+                elif c['op'] == 'RMEDGE' :
                     pass
-    
+
                 elif c['op'] == 'MKNODE' :
                     self._compiler.addNode(self._M, json.loads(c['node']), c['id'])
-    
+
                 elif c['op'] == 'RMNODE' :
                     node = \
                         self._M.vs.select(lambda v : eq(v['$atompmId'],c['id']))[0]
-                    self._M.delete_nodes([node.index]) 
-    
-                elif c['op'] == 'CHATTR' : 
+                    self._M.delete_nodes([node.index])
+
+                elif c['op'] == 'CHATTR' :
                     node = \
                         self._M.vs.select(lambda v : eq(v['$atompmId'],c['id']))[0]
                     self._M.vs[node.index][c['attr']] = c['new_val']
-                    
-                elif c['op'] == 'LOADMM' : 
+
+                elif c['op'] == 'LOADMM' :
                     self._compiler.parseMetamodel(
-                            c['name'],
-                            utils.fread(
-                                '/users/%s/%s.metamodel'%(self.username,c['name'])),
-                            loadMM=True)
-    
-                elif c['op'] == 'DUMPMM' : 
+                        c['name'],
+                        utils.fread(
+                            '/users/%s/%s.metamodel'%(self.username,c['name'])),
+                        loadMM=True)
+
+                elif c['op'] == 'DUMPMM' :
                     self._compiler.unloadMetamodel(c['name'])
-    
-                elif c['op'] == 'RESETM' : 
+
+                elif c['op'] == 'RESETM' :
                     self._M = self._compiler.compileModel(c['new_model'])
                     self._M.mmTypeData = self._compiler.getMMTypeData()
-    
-                elif c['op'] == 'SYSOUT' : 
+
+                elif c['op'] == 'SYSOUT' :
                     ''' hergin :: motif-integration :: modify :: added startsWith functions '''
                     if c['text'].startswith(TC.RULE_SUCCESS_MSG) or \
-                        c['text'].startswith(TC.RULE_NOT_APPLICABLE_MSG) or \
-                        c['text'].startswith(TC.RULE_FAILURE_MSG) or \
-                        c['text'].startswith(TC.TRANSFORMATION_DONE) or \
-                        c['text'].startswith(TC.REMOTE_APPLICATION_FAILURE) :
+                            c['text'].startswith(TC.RULE_NOT_APPLICABLE_MSG) or \
+                            c['text'].startswith(TC.RULE_FAILURE_MSG) or \
+                            c['text'].startswith(TC.TRANSFORMATION_DONE) or \
+                            c['text'].startswith(TC.REMOTE_APPLICATION_FAILURE) :
                         self._mtContexts[-1].setLastStepFeedbackReceived()
 
 
         self._lock.acquire()
         self._changelogs.sort(key=lambda c : utils.sn2int(c['sequence#']))
         if len(self._changelogs) == 0 or \
-            self._aswNextSequenceNumber == None or \
-            utils.sn2int(self._changelogs[0]['sequence#']) > \
+                self._aswNextSequenceNumber == None or \
+                utils.sn2int(self._changelogs[0]['sequence#']) > \
                 utils.sn2int(self._aswNextSequenceNumber) :
             self._lock.release()
-        else : 
+        else :
             sn = self._changelogs[0]['sequence#']
             if utils.sn2int(sn) < utils.sn2int(self._aswNextSequenceNumber) :
                 raise ValueError('invalid changelog sequence# :: '+sn)
@@ -252,10 +263,10 @@ class PyTCoreAbstractionLayer :
                 logging.debug('++ ('+sn+') '+str(self._changelogs[0]['changelog']))
                 _handleChangelog(self._changelogs.pop(0)['changelog'])
                 self._aswNextSequenceNumber = \
-                            utils.incrementSequenceNumber(self._aswNextSequenceNumber)
+                    utils.incrementSequenceNumber(self._aswNextSequenceNumber)
                 self._lock.release()
                 self._handleChangelogs()
-    
+
 
 
     '''
@@ -273,10 +284,10 @@ class PyTCoreAbstractionLayer :
     def loadModel(self,m,mms,sn) :
         assert self._M == None, 'ptcal.loadModel() should only be called once'
         self._compiler = ModelAndRuleCompiler(
-                                self.username,
-                                self._aswCommTools['wid'],
-                                self.defaultDCL,
-                                self._mtwid)
+            self.username,
+            self._aswCommTools['wid'],
+            self.defaultDCL,
+            self._mtwid)
         self._M = self._compiler.compileModel(m,mmsData=mms)
         self._M.mmTypeData = self._compiler.getMMTypeData()
         ''' hergin :: motif-integration start '''
@@ -285,10 +296,9 @@ class PyTCoreAbstractionLayer :
         self._aswNextSequenceNumber = utils.incrementSequenceNumber(sn)
         self._lock.acquire()
         self._changelogs = \
-            filter(lambda c : utils.sn2int(sn) < utils.sn2int(c['sequence#']), 
-                     self._changelogs)
+            [c for c in self._changelogs if utils.sn2int(sn) < utils.sn2int(c['sequence#'])]
         self._lock.release()
-        
+
     '''
         load a PN model (and its metamodels)
 
@@ -313,16 +323,15 @@ class PyTCoreAbstractionLayer :
         self._aswNextSequenceNumber = utils.incrementSequenceNumber(sn)
         self._lock.acquire()
         self._changelogs = \
-            filter(lambda c : utils.sn2int(sn) < utils.sn2int(c['sequence#']), 
-                     self._changelogs)
+            [c for c in self._changelogs if utils.sn2int(sn) < utils.sn2int(c['sequence#'])]
         self._lock.release()
-        
-    
+
+
 
     ''' setup internal state to reflect given runtime configuration '''
     def _loadRuntimeConfiguration(self,rc) :
         if 'looseSubtypingMM' in rc :
-            self._compiler.RC__looseSubtypingMM = rc['looseSubtypingMM'] 
+            self._compiler.RC__looseSubtypingMM = rc['looseSubtypingMM']
 
 
 
@@ -334,14 +343,14 @@ class PyTCoreAbstractionLayer :
         if fname not in self._transfData :
             self._transfData[fname] = \
                 utils.fread('/users/%s/%s'%(self.username,fname))
-                
+
         ''' hergin :: motif-integration start '''
         if TC.MOTIFMM in self._transfData[fname]['metamodels']:
             self._mtContexts.append(MotifContext(fname,self))
         elif TC.TCOREMM in self._transfData[fname]['metamodels']:
             self._mtContexts.append(TCoreContext(fname,self))
         elif TC.TRANSFMM in self._transfData[fname]['metamodels']:
-            self._mtContexts.append(ModelTransformationContext(self._transfData[fname],fname))    
+            self._mtContexts.append(ModelTransformationContext(self._transfData[fname],fname))
         ''' hergin :: motif-integration end '''
 
 
@@ -413,13 +422,13 @@ class PyTCoreAbstractionLayer :
         
         NOTE:: in step 4c), while in debug mode, we highlight transformations, 
                  exhausts and rules before recursing on them or returning them,
-                 respectively ''' 
+                 respectively '''
     def _nextRule(self) :
         mtc = self._mtContexts[-1]
         self._loadRuntimeConfiguration(mtc.getRuntimeConfiguration())
 
         if self._debugOn and not mtc.isTransformationUnderWay() and \
-            (type(mtc) == MotifContext or type(mtc) == TCoreContext) : # hergin :: motif-integration modify
+                (type(mtc) == MotifContext or type(mtc) == TCoreContext) : # hergin :: motif-integration modify
             _execmode = self._execmode
             self._execmode = 'PAUSE'
             self._aswPrintReq(TC.DEBUGGING_HALT)
@@ -428,8 +437,8 @@ class PyTCoreAbstractionLayer :
             if self._execmode == 'STOPPING' :
                 return {'$err':'transformation stopped during debugging pause'}
             elif self._execmode == 'PAUSE' or \
-                  self._execmode == 'PLAY'  or \
-                  self._execmode == 'STEP' :
+                    self._execmode == 'PLAY'  or \
+                    self._execmode == 'STEP' :
                 self._execmode = _execmode
         self.bdapiQueue = Queue()
         ns = mtc.nextStep()
@@ -452,38 +461,38 @@ class PyTCoreAbstractionLayer :
                     if id(_mtc) in self._mtContexts2debugClients :
                         debugClient = self._mtContexts2debugClients[id(_mtc)]
                         self._requestNodeHighlight(
-                                debugClient['host'], 
-                                debugClient['aswid'], 
-                                _mtc.getCurrentStepId())
-                        break               
+                            debugClient['host'],
+                            debugClient['aswid'],
+                            _mtc.getCurrentStepId())
+                        break
 
             if 'id' in ns :
                 fulltype = mtc.t['nodes'][ns['id']]['$type']
-                
+
                 ''' hergin :: motif-integration start '''
                 if fulltype == mtc.metamodel+"/CRule":
                     if self._debugOn :
                         highlightUpcomingStep()
-                    
+
                     self._loadTransform(ns['rule'])
-                    
+
                     return self._nextRule()
-                    
-                elif fulltype.startswith(TC.TCOREMM) or\
+
+                elif fulltype.startswith(TC.TCOREMM) or \
                         fulltype.startswith(TC.MOTIFMM):
                     if self._debugOn :
                         highlightUpcomingStep()
-                        
+
                     return ns
                     ''' hergin :: motif-integration end '''
-                
+
                 elif fulltype == TC.TRANSFMM+'/Rule' :
                     if self._debugOn :
                         highlightUpcomingStep()
 
                     return {'fname':ns['fname'],
-                              'cr':self._compiler.compileRule(None,ns['fname'])}
-                
+                            'cr':self._compiler.compileRule(None,ns['fname'])}
+
                 #Enactment OpenModel blob, pathToFormalism is present is MM, but not used here,
                 #for functionality of opening window with formalisms is in WriteModel.
                 #pathToFormalism should be removed from MM for OpenModel (was not removed due to 
@@ -517,12 +526,12 @@ class PyTCoreAbstractionLayer :
                     self._mtContexts.append( ExhaustContext(mtc.t,ns['id'],self._randomGen) )
                     return self._nextRule()
             else :
-                
+
                 ''' hergin :: motif-integration start '''
                 if 'trafoResult' in ns:
                     return ns;
                     ''' hergin :: motif-integration end '''
-                
+
                 contents = utils.fread('/users/%s/%s'%(self.username,ns['fname']))
 
                 if self._debugOn :
@@ -530,20 +539,20 @@ class PyTCoreAbstractionLayer :
 
                 if TC.RULEMM in contents['metamodels'] :
                     return {'fname':ns['fname'],
-                              'cr':self._compiler.compileRule(contents,ns['fname'])}
+                            'cr':self._compiler.compileRule(contents,ns['fname'])}
 
                 elif TC.TRANSFMM in contents['metamodels'] :
                     self._transfData[ns['fname']] = contents
                     self._loadTransform(ns['fname'])
                     return self._nextRule()
 
-                raise ValueError(\
-                            'file does not contain valid rule or transformation '+\
-                            'model :: '+ns['fname'])
-        
+                raise ValueError( \
+                    'file does not contain valid rule or transformation '+ \
+                    'model :: '+ns['fname'])
+
 
     ''' Enactment do OpenModel magic
-    ''' 
+    '''
     def runOpenModelRule(self, fname="",formalism=""):
         unload = ""
         if not fname:
@@ -552,11 +561,11 @@ class PyTCoreAbstractionLayer :
             if not formalism:
                 self._aswPrintReq('auto loading model :: '+fname)
                 try:
-                    with open(os.getcwd()+'/users/'+self.username+fname) as f: 
+                    with open(os.getcwd()+'/users/'+self.username+fname) as f:
                         pass
                 except IOError as e:
                     self._aswPrintReq('failed opening a file :: '+fname)
-                    return (None,TC.FAILED) 
+                    return (None,TC.FAILED)
                 if not self.loadedModel:
                     method = '_loadModelForTransform'
                     if len(self._loadedMMs) == 0:
@@ -567,15 +576,15 @@ class PyTCoreAbstractionLayer :
                 else:
                     method = '_appendModelForTransform'
                 resp = self._aswCommTools['httpReq'](
-                'PUT',
-                '/GET/console',
-                {'text':'CLIENT_BDAPI :: '+
-                    '{"func":"'+method+'",'+
-                    ' "args":'+
-                        '{"fname":"'+fname+'",'+
-                        '"unload":"'+unload+'",'+
-                        ' "callback-url":"/__mt/bdapiresp?wid='+
-                                self._aswCommTools['wid']+'"}}'})
+                    'PUT',
+                    '/GET/console',
+                    {'text':'CLIENT_BDAPI :: '+
+                            '{"func":"'+method+'",'+
+                            ' "args":'+
+                            '{"fname":"'+fname+'",'+
+                            '"unload":"'+unload+'",'+
+                            ' "callback-url":"/__mt/bdapiresp?wid='+
+                            self._aswCommTools['wid']+'"}}'})
                 resp = self.bdapiQueue.get(block=True,timeout=5000)
                 if not resp['resp'] == 'ok':
                     return (None,TC.FAILED)
@@ -587,27 +596,27 @@ class PyTCoreAbstractionLayer :
             #this functionality is in WriteMOdel.       
             else:
                 pass
-#               Keep for now....
-#               self._aswPrintReq('pausing transform')
-#               self._execmode = 'PAUSE'
-#               self._aswPrintReq('opening new window for manual step:: '+fname)
-#               try:
-#                   with open(os.getcwd()+'/users/'+self.username+fname) as f: 
-#                       exists = 'true'
-#               except IOError as e:
-#                       exists = 'false'
-#               resp = self._aswCommTools['httpReq'](
-#               'PUT',
-#               '/GET/console',
-#               {'text':'CLIENT_BDAPI :: '+
-#                   '{"func":"_createEmptyModelInNewWindow",'+
-#                   ' "args":'+
-#                       '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
-#                       ' "callback-url":"/__mt/bdapiresp?wid='+
-#                               self._aswCommTools['wid']+'"}}'})
-#               self.loadedModel = False
-#               return (None,TC.SUCCEEDED)
-    
+    #               Keep for now....
+    #               self._aswPrintReq('pausing transform')
+    #               self._execmode = 'PAUSE'
+    #               self._aswPrintReq('opening new window for manual step:: '+fname)
+    #               try:
+    #                   with open(os.getcwd()+'/users/'+self.username+fname) as f:
+    #                       exists = 'true'
+    #               except IOError as e:
+    #                       exists = 'false'
+    #               resp = self._aswCommTools['httpReq'](
+    #               'PUT',
+    #               '/GET/console',
+    #               {'text':'CLIENT_BDAPI :: '+
+    #                   '{"func":"_createEmptyModelInNewWindow",'+
+    #                   ' "args":'+
+    #                       '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
+    #                       ' "callback-url":"/__mt/bdapiresp?wid='+
+    #                               self._aswCommTools['wid']+'"}}'})
+    #               self.loadedModel = False
+    #               return (None,TC.SUCCEEDED)
+
     ''' Enactment do WriteModel magic
     '''
     def runWriteModelRule(self,fname="",formalism=""):
@@ -623,14 +632,14 @@ class PyTCoreAbstractionLayer :
             if not formalism:
                 self._aswPrintReq('auto saving model :: '+fname)
                 resp = self._aswCommTools['httpReq'](
-                'PUT',
-                '/GET/console',
-                {'text':'CLIENT_BDAPI :: '+
-                    '{"func":"_writeModelAfterTransform",'+
-                    ' "args":'+
-                        '{"fname":"'+fname+'",'+
-                        ' "callback-url":"/__mt/bdapiresp?wid='+
-                                self._aswCommTools['wid']+'"}}'})
+                    'PUT',
+                    '/GET/console',
+                    {'text':'CLIENT_BDAPI :: '+
+                            '{"func":"_writeModelAfterTransform",'+
+                            ' "args":'+
+                            '{"fname":"'+fname+'",'+
+                            ' "callback-url":"/__mt/bdapiresp?wid='+
+                            self._aswCommTools['wid']+'"}}'})
                 #Need to wait for the model to load.
                 resp = self.bdapiQueue.get(block=True,timeout=5000)
                 if resp['resp'] == 'ok':
@@ -644,25 +653,25 @@ class PyTCoreAbstractionLayer :
                 self._execmode = 'PAUSE'
                 self._aswPrintReq('opening new window for manual step:: '+fname)
                 try:
-                    with open(os.getcwd()+'/users/'+self.username+fname) as f: 
+                    with open(os.getcwd()+'/users/'+self.username+fname) as f:
                         #open existing model
                         exists = 'true'
                 except IOError as e:
-                        #or save model with the fname provided
-                        exists = 'false'
+                    #or save model with the fname provided
+                    exists = 'false'
                 resp = self._aswCommTools['httpReq'](
-                'PUT',
-                '/GET/console',
-                {'text':'CLIENT_BDAPI :: '+
-                    '{"func":"_createEmptyModelInNewWindow",'+
-                    ' "args":'+
-                        '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
-                        ' "callback-url":"/__mt/bdapiresp?wid='+
-                                self._aswCommTools['wid']+'"}}'})
+                    'PUT',
+                    '/GET/console',
+                    {'text':'CLIENT_BDAPI :: '+
+                            '{"func":"_createEmptyModelInNewWindow",'+
+                            ' "args":'+
+                            '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
+                                                                                                    ' "callback-url":"/__mt/bdapiresp?wid='+
+                            self._aswCommTools['wid']+'"}}'})
                 self.loadedModel = False
                 return (None,TC.SUCCEEDED)
-            
-            
+
+
     '''
         synchronously save 1 changelog into self._changelogs '''
     def onchangelog(self,c) :
@@ -677,7 +686,7 @@ class PyTCoreAbstractionLayer :
         preventing _play()'s next call to _step(), if any) '''
     def pause(self) :
         self._execmode = 'PAUSE'
-        
+
         if not self.incUpdates:
             req = self.buildEditHttpReq(self.globalDeltas)
             self.globalDeltas = []
@@ -707,25 +716,25 @@ class PyTCoreAbstractionLayer :
             3. schedule a recursive call to _play() in TC.INTER_RULE_DELAY 
                 seconds '''
     def play(self) :
-	
+
         self.start_time = clock()
         if self._execmode == 'STOPPED':
             self._randomGen = Random(0)
         if self._execmode != 'PLAY' :
             self._execmode = 'PLAY'
             if not self._stopDebugProgrammedBreak() :
-                self._play()        
+                self._play()
     def _play(self) :
         if self.incUpdates:
             self._doWhenLastStepFeedbackReceived(
-                    lambda : self._execmode == 'PLAY' and \
-                                self._step() and \
-                                utils.setTimeout(TC.INTER_RULE_DELAY,self._play))
+                lambda : self._execmode == 'PLAY' and \
+                         self._step() and \
+                         utils.setTimeout(TC.INTER_RULE_DELAY,self._play))
         else:
             self._doWhenLastStepFeedbackReceived(
-                    lambda : self._execmode == 'PLAY' and \
-                                self._step() and \
-                                self._play())
+                lambda : self._execmode == 'PLAY' and \
+                         self._step() and \
+                         self._play())
 
 
 
@@ -737,8 +746,8 @@ class PyTCoreAbstractionLayer :
         for mtc in reversed(self._mtContexts) :
             if hasattr(mtc,'fname') and mtc.fname == clientInfo['fname'] :
                 self._mtContexts2debugClients[id(mtc)] = clientInfo
-            
-            
+
+
 
     ''' 
         request a new atompm client via the client backdoor API... the new client
@@ -747,14 +756,14 @@ class PyTCoreAbstractionLayer :
         url '''
     def _requestClientDebugWindow(self,fname) :
         return self._aswCommTools['httpReq'](
-                'PUT',
-                '/GET/console',
-                {'text':'CLIENT_BDAPI :: '+
+            'PUT',
+            '/GET/console',
+            {'text':'CLIENT_BDAPI :: '+
                     '{"func":"_loadModelInNewWindow",'+
                     ' "args":'+
-                        '{"fname":"'+fname+'",'+
-                        ' "callback-url":"/__mt/debugClient?wid='+
-                                self._aswCommTools['wid']+'"}}'})
+                    '{"fname":"'+fname+'",'+
+                    ' "callback-url":"/__mt/debugClient?wid='+
+                    self._aswCommTools['wid']+'"}}'})
 
 
 
@@ -763,18 +772,18 @@ class PyTCoreAbstractionLayer :
         highlighted '''         
     def _requestNodeHighlight(self,host,aswid,asid,timeout=1000) :
         return utils.httpReq(
-                'PUT',
-                host,
-                '/GET/console?wid='+aswid,
-                {'text':'CLIENT_BDAPI :: '+
+            'PUT',
+            host,
+            '/GET/console?wid='+aswid,
+            {'text':'CLIENT_BDAPI :: '+
                     '{"func":"_highlight",'+
                     ' "args":'+
-                        '{"asid":"'+asid+'",'+
-                        ' "timeout":'+str(timeout)+'}}'})
+                    '{"asid":"'+asid+'",'+
+                    ' "timeout":'+str(timeout)+'}}'})
 
     ''' hergin :: motif-integration :: START :: put this to outside of step function '''
     ''' also added self '''
-    
+
     '''
         go through a rule's deltas and (1) produce a batchEdit request, and 
         (2) undo them
@@ -814,41 +823,39 @@ class PyTCoreAbstractionLayer :
 
         for d in deltas :
             if d['op'] == 'RMNODE' :
-                reqs.append({\
-                        'method':'DELETE',
-                        'uri':d['attrs'][HC.FULLTYPE]+'/'+\
-                                d['attrs']['$atompmId']+'.instance'})
+                reqs.append({ \
+                    'method':'DELETE',
+                    'uri':d['attrs'][HC.FULLTYPE]+'/'+ \
+                          d['attrs']['$atompmId']+'.instance'})
 
             elif d['op'] == 'MKNODE' :
                 mknodes[d['guid']] = len(reqs)
                 node = self._M.vs[self._M.get_node(d['guid'])]
                 if neighborhood == None :
-                    neighborhood = map(
-                        lambda n: n[HC.FULLTYPE]+'/'+n['$atompmId']+'.instance',
-                        d['neighborhood'])
+                    neighborhood = neighborhood = [n[HC.FULLTYPE]+'/'+n['$atompmId']+'.instance' for n in d['neighborhood']]
                 if node[HC.CONNECTOR_TYPE] :
-                    reqs.append({\
-                            'method':'POST',
-                            'uri':node[HC.FULLTYPE]+'.type',
-                            'reqData':
-                                {'src':None,
-                                 'dest':None,
-                                 'hitchhiker':
-                                    {'segments':None,
-                                     'asSrc':None,
-                                     'asDest':None,
-                                     'neighborhood':neighborhood}}})    
+                    reqs.append({ \
+                        'method':'POST',
+                        'uri':node[HC.FULLTYPE]+'.type',
+                        'reqData':
+                            {'src':None,
+                             'dest':None,
+                             'hitchhiker':
+                                 {'segments':None,
+                                  'asSrc':None,
+                                  'asDest':None,
+                                  'neighborhood':neighborhood}}})
                 else :
-                    reqs.append({\
-                            'method':'POST',
-                            'uri':node[HC.FULLTYPE]+'.type',
-                            'reqData':{'hitchhiker':{'neighborhood':neighborhood}}})
+                    reqs.append({ \
+                        'method':'POST',
+                        'uri':node[HC.FULLTYPE]+'.type',
+                        'reqData':{'hitchhiker':{'neighborhood':neighborhood}}})
 
             elif d['op'] == 'RMEDGE' :
                 pass
 
             elif d['op'] == 'MKEDGE' :
-                def isConnectorMKNODE(req): 
+                def isConnectorMKNODE(req):
                     return 'dest' in req['reqData']
 
                 if d['guid1'] in mknodes :
@@ -857,7 +864,7 @@ class PyTCoreAbstractionLayer :
                         node2 = self._M.vs[self._M.get_node(d['guid2'])]
                         id      = atompmInstanceId(node2)
                         req['reqData']['dest'] = \
-                        req['reqData']['hitchhiker']['asDest'] = \
+                            req['reqData']['hitchhiker']['asDest'] = \
                             node2[HC.FULLTYPE]+'/'+id+'.instance'
 
                 if d['guid2'] in mknodes :
@@ -866,42 +873,42 @@ class PyTCoreAbstractionLayer :
                         node1 = self._M.vs[self._M.get_node(d['guid1'])]
                         id      = atompmInstanceId(node1)
                         req['reqData']['src'] = \
-                        req['reqData']['hitchhiker']['asSrc'] = \
+                            req['reqData']['hitchhiker']['asSrc'] = \
                             node1[HC.FULLTYPE]+'/'+id+'.instance'
-                    
+
             elif d['op'] == 'CHATTR' :
-                node = self._M.vs[self._M.get_node(d['guid'])]              
+                node = self._M.vs[self._M.get_node(d['guid'])]
                 id   = atompmInstanceId(node)
-                reqs.append({\
-                        'method':'PUT',
-                        'uri':node[HC.FULLTYPE]+'/'+id+'.instance',
-                        'reqData':{'changes':{d['attr']:d['new_val']}}})
+                reqs.append({ \
+                    'method':'PUT',
+                    'uri':node[HC.FULLTYPE]+'/'+id+'.instance',
+                    'reqData':{'changes':{d['attr']:d['new_val']}}})
 
             elif d['op'] == 'LOADMM' :
-                reqs.append({\
+                reqs.append({ \
                     'method':'PUT',
                     'uri':'/current.metamodels',
                     'reqData':
                         {'mm':'/%s%s.metamodel'%(self.username,d['name'])}})
-        if self.incUpdates:        
+        if self.incUpdates:
             for d in reversed(deltas) :
                 if d['op'] == 'RMNODE' :
                     newNodeIndex = self._M.add_node(newNodeGuid=d['attrs'][HC.GUID])
-                    for attr,val in d['attrs'].iteritems() :
+                    for attr,val in d['attrs'].items() :
                         self._M.vs[newNodeIndex][attr] = val
 
                 elif d['op'] == 'MKNODE' :
-                    node = self._M.vs[self._M.get_node(d['guid'])]              
+                    node = self._M.vs[self._M.get_node(d['guid'])]
                     self._M.delete_nodes([node.index])
 
                 elif d['op'] == 'RMEDGE' :
-                    node1 = self._M.vs[self._M.get_node(d['guid1'])]                
-                    node2 = self._M.vs[self._M.get_node(d['guid2'])]                
+                    node1 = self._M.vs[self._M.get_node(d['guid1'])]
+                    node2 = self._M.vs[self._M.get_node(d['guid2'])]
                     self._M.add_edges([(node1.index, node2.index)])
 
                 elif d['op'] == 'MKEDGE' :
                     pass
-                    
+
                 elif d['op'] == 'CHATTR' :
                     node = self._M.vs[self._M.get_node(d['guid'])]
                     node[d['attr']] = d['old_val']
@@ -972,23 +979,23 @@ class PyTCoreAbstractionLayer :
         NOTE: this function assumes that feedback for the last step has already
                 been received '''
     def step(self) :
-        if not hasattr(self, 'start_time'):         
+        if not hasattr(self, 'start_time'):
             self.start_time = clock()
         if self._execmode == 'PLAY' :
             pass
-        else : 
+        else :
             if self._execmode == 'STOPPED':
                 self._randomGen = Random(0)
             self._execmode = 'STEP'
             if not self._stopDebugProgrammedBreak() :
                 self._doWhenLastStepFeedbackReceived(self._step)
-    def _step(self) :       
+    def _step(self) :
 
 
         '''
             run the specified rule and return a tuple describing its execution '''
         def runRule(r) :
-            
+
             ''' hergin :: motif-integration start '''
             #self._aswPrintReq('launching rule :: '+r['fname'])
             #ar = NDARule(r['cr']['lhs'],r['cr']['rhs'],rng=self._randomGen)
@@ -997,31 +1004,31 @@ class PyTCoreAbstractionLayer :
                 ar = r['rule']
             else:
                 ar = NDARule(r['cr']['lhs'],r['cr']['rhs'],rng=self._randomGen,sendAndApplyDeltaFunc=self.sendAndApplyDelta)
-            
+
             if mtc.nextInput == "packetIn":
                 startTime=clock()
-                
+
                 self.packet = ar.packet_in(self.packet)
-                
+
                 mtc.setLastStepExecTime(clock()-startTime)
-                
+
             elif mtc.nextInput == "nextIn":
                 startTime=clock()
                 self.packet = ar.next_in(self.packet)
                 mtc.setLastStepExecTime(clock()-startTime)
-                
+
             elif mtc.nextInput == "cancelIn":
                 startTime=clock()
                 self.packet = ar.cancelIn(self.packet)
                 mtc.setLastStepExecTime(clock()-startTime)
-                
+
             elif mtc.nextInput == "successIn":
                 startTime=clock()
                 self.packet = ar.success_in(self.packet)
                 mtc.setLastStepExecTime(clock()-startTime)
-                
+
             ''' hergin :: motif-integration end '''
-            
+
             if ar.is_success :
                 return (self.packet.deltas,TC.SUCCEEDED)
             elif not ar.is_success :
@@ -1037,11 +1044,11 @@ class PyTCoreAbstractionLayer :
             nr = self._nextRule()
         except Exception :
             nr = {'$err':traceback.format_exc()}
-            
+
         ''' hergin :: motif-integration start TRAFO RESULT: in case of a CRule_end, pop it from context and continue the rest '''
         while 'trafoResult' in nr:
             if len(self._mtContexts)==1:
-                
+
                 if not self.incUpdates and self.sendDeltas:
                     ''' hergin TO BE MODIFIED - release mode will change '''
                     req = self.buildEditHttpReq(self.globalDeltas)
@@ -1052,7 +1059,7 @@ class PyTCoreAbstractionLayer :
                         self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
                         return
                     self._handleChangelogs()
-                
+
                 self._aswPrintReq(TC.TRANSFORMATION_DONE+nr['trafoResult']+" in "+str(self._mtContexts[-1].totalExecutionTime/1000.0)+" seconds, in total "+str((clock()-self.start_time)/1000.0))
                 self.stop()
                 return
@@ -1085,7 +1092,7 @@ class PyTCoreAbstractionLayer :
                 self._mtContexts[-1].setLastStepApplicationInfo(ai)
                 self._mtContexts[-1].setLastStepFeedbackReceived()
                 return True
-            else:   
+            else:
                 (res,ai) = runRule(nr)
             self._mtContexts[-1].setLastStepApplicationInfo(ai)
 
@@ -1133,7 +1140,7 @@ class PyTCoreAbstractionLayer :
     def isStopped(self)  :  return self._execmode == 'STOPPED'
     def isStopping(self) :  return self._execmode == 'STOPPING'
     def stop(self) :
-    
+
         if not self.incUpdates:
             req = self.buildEditHttpReq(self.globalDeltas)
             self.globalDeltas = []
@@ -1143,17 +1150,17 @@ class PyTCoreAbstractionLayer :
                 self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
                 return
             self._handleChangelogs()
-    
+
         self._execmode = 'STOPPING'
         #Used for enactment, prevents open being append.
         self.loadedModel = False
-        if not self._stopDebugProgrammedBreak() :   
+        if not self._stopDebugProgrammedBreak() :
             self._doWhenLastStepFeedbackReceived(self._stop)
     def _stop(self) :
         self._mtContexts = []
         for fname in self._userTransfs :
             self._loadTransform(fname)
-        self._mtContexts2debugClients = {}      
+        self._mtContexts2debugClients = {}
         self._aswPrintReq(TC.TRANSFORMATION_STOPPED)
         self._execmode = 'STOPPED'
 
@@ -1201,7 +1208,7 @@ class PyTCoreAbstractionLayer :
                 ii.  pressing "play", "step" or "stop"
             
         . before running a rule, it or its enclosing ExhaustContext's associated
-            atompm node is highlighted ''' 
+            atompm node is highlighted '''
     def toggleDebugMode(self) :
         self._debugOn = not self._debugOn
 

+ 71 - 78
mt/ptcal/pytcore/core/himesis.py

@@ -35,11 +35,11 @@ class Himesis(ig.Graph):
     Constants = HConstants
     EDGE_LIST_THRESHOLD = 10**3
 
-    
+
     @staticmethod
     def is_RAM_attribute(attr_name):
         return not attr_name.startswith('$')
-    
+
     def __init__(self, name='', num_nodes=0, edges=[]):
         """
             Creates a typed, attributed, directed, multi-graph.
@@ -58,46 +58,46 @@ class Himesis(ig.Graph):
         self.mmTypeData = {}
         self._guid2index = {}
         self.session = {}
-        
+
     def copy(self):
         cpy = ig.Graph.copy(self)
         cpy._guid2index = copy.deepcopy(self._guid2index)
         ''' hergin :: motif-integration FIX for mmTypeData bug '''
         cpy.mmTypeData = copy.deepcopy(self.mmTypeData)
         cpy.session = copy.deepcopy(self.session)
-        
+
         cpy.name = copy.deepcopy(self.name)
         return cpy
-    
+
     def __copy__(self):
         return self.copy()
-    
+
     def __deepcopy__(self, memo):
         return self.__copy__()
-    
+
     def __str__(self):
         s = super(Himesis, self).__str__()
         return self.name + ' ' + s[s.index('('):] + ' ' + str(self[Himesis.Constants.GUID])
-    
+
     def get_id(self):
         """
             Returns the unique identifier of the graph
         """
         return self[Himesis.Constants.GUID]
-    
+
     def node_iter(self):
         """
             Iterates over the nodes in the graph, by index
         """
-        return xrange(self.vcount())
-    
+        return range(self.vcount())
+
     def edge_iter(self):
         """
             Iterates over the edges in the graph, by index
         """
-        return xrange(self.ecount())
-    
-    def add_node(self, fulltype=None, isConnector=None, newNodeGuid=None):    
+        return range(self.ecount())
+
+    def add_node(self, fulltype=None, isConnector=None, newNodeGuid=None):
         newNodeIndex = self.vcount()
         if newNodeGuid == None :
             newNodeGuid = uuid.uuid4()
@@ -106,16 +106,16 @@ class Himesis(ig.Graph):
         self.vs[newNodeIndex][Himesis.Constants.FULLTYPE] = fulltype
         self.vs[newNodeIndex][Himesis.Constants.CONNECTOR_TYPE] = isConnector
         if fulltype in self.mmTypeData :
-            for attr,val in self.mmTypeData[fulltype].iteritems():
+            for attr,val in self.mmTypeData[fulltype].items():
                 self.vs[newNodeIndex][str(attr)] = val
         self._guid2index[newNodeGuid] = newNodeIndex
         return newNodeIndex
-    
+
     def delete_nodes(self, nodes):
         self.delete_vertices(nodes)
         # Regenerate the lookup because node indices have changed
         self._guid2index = dict((self.vs[node][Himesis.Constants.GUID], node) for node in self.node_iter())
-    
+
     def get_node(self,guid):
         """
             Retrieves the node instance with the specified guid
@@ -123,7 +123,7 @@ class Himesis(ig.Graph):
         """
         if guid in self._guid2index:
             if self._guid2index[guid] >= self.vcount() or \
-                self.vs[self._guid2index[guid]][Himesis.Constants.GUID] != guid :
+                    self.vs[self._guid2index[guid]][Himesis.Constants.GUID] != guid :
                 self._guid2index = dict((self.vs[node][Himesis.Constants.GUID], node) for node in self.node_iter())
             try:
                 return self._guid2index[guid]
@@ -133,7 +133,7 @@ class Himesis(ig.Graph):
         else :
             #TODO: This should be a TransformationLanguageSpecificException
             raise KeyError('Node not found with specified id. Make sure to only create nodes via Himesis.add_node(): ' + str(guid))
-    
+
     def draw(self, visual_style={}, label=None, show_guid=False, show_id=False, debug=False, width=600, height=900):
         """
         Visual graphic rendering of the graph.
@@ -146,7 +146,7 @@ class Himesis(ig.Graph):
             visual_style["layout"] = 'fr'
         if 'margin' not in visual_style:
             visual_style["margin"] = 10
-        
+
         # Set the labels
         if not label:
             if show_guid:
@@ -162,14 +162,14 @@ class Himesis(ig.Graph):
                     if not visual_style["vertex_label"][n]:
                         visual_style["vertex_label"][n] = self.vs[n][Himesis.Constants.FULLTYPE]
                         if debug:
-                            visual_style["vertex_label"][n] = str(n) + ':' + visual_style["vertex_label"][n] 
+                            visual_style["vertex_label"][n] = str(n) + ':' + visual_style["vertex_label"][n]
                     elif debug:
                         visual_style["vertex_label"][n] = str(n) + ':' + visual_style["vertex_label"][n]
             except:
                 raise Exception('%s is not a valid attribute' % label)
-        
+
         return ig.plot(self, bbox=(0, 0, width, height), **visual_style)
-    
+
     def execute(self, *args):
         raise AttributeError('This method is not implemented')
 
@@ -179,7 +179,7 @@ class HimesisPattern(Himesis):
         super(HimesisPattern, self).__init__(name, num_nodes, edges)
         self.nodes_label = {}
         self.nodes_pivot_out = {}
-    
+
     def get_node_with_label(self, label):
         """
             Retrieves the index of the node with the specified label.
@@ -189,7 +189,7 @@ class HimesisPattern(Himesis):
             self.nodes_label = dict([(self.vs[i][Himesis.Constants.MT_LABEL], i) for i in self.node_iter()])
         if label in self.nodes_label:
             return self.nodes_label[label]
-    
+
     def get_pivot_out(self, pivot):
         """
             Retrieves the index of the pivot node
@@ -205,7 +205,7 @@ class HimesisPreConditionPattern(HimesisPattern):
     def __init__(self, name='', num_nodes=0, edges=[]):
         super(HimesisPreConditionPattern, self).__init__(name, num_nodes, edges)
         self.nodes_pivot_in = {}
-    
+
     def get_pivot_in(self, pivot):
         """
             Retrieves the index of the pivot node
@@ -215,7 +215,7 @@ class HimesisPreConditionPattern(HimesisPattern):
             self.nodes_pivot_in = dict([(self.vs[i][Himesis.Constants.MT_PIVOT_IN], i) for i in self.node_iter()])
         if pivot in self.nodes_pivot_in:
             return self.nodes_pivot_in[pivot]
-    
+
     def constraint(self, mtLabel2graphIndexMap, graph):
         """
             If a constraint shall be specified, the corresponding Himesis graph must override this method.
@@ -225,14 +225,14 @@ class HimesisPreConditionPattern(HimesisPattern):
             @param graph: The whole input graph.
         """
         raise NotImplementedError('Use graph[Himesis.Constants.MT_CONSTRAINT]() instead')
-    
+
 
 class HimesisPreConditionPatternLHS(HimesisPreConditionPattern):
     def __init__(self, name='', num_nodes=0, edges=[]):
         super(HimesisPreConditionPatternLHS, self).__init__(name, num_nodes, edges)
         self.NACs = []
         self.bound_start_index = 0  # index of first bound NAC in NACs list
-    
+
     def addNAC(self, nac):
         """
             Appends the NAC to this LHS pattern
@@ -242,7 +242,7 @@ class HimesisPreConditionPatternLHS(HimesisPreConditionPattern):
         if nac.bridge is None:
             nac.bridge = nac.compute_bridge()
         self.NACs.append(nac)
-    
+
     def addNACs(self, NACs):
         """
             Stores the list of NACs in decreasing order of their size
@@ -262,23 +262,23 @@ class HimesisPreConditionPatternLHS(HimesisPreConditionPattern):
         unbound.sort(key=lambda nac: nac.vcount(), reverse=True)
         self.NACs = unbound + bound
         self.bound_start_index = len(unbound)
-    
+
     def getUnboundNACs(self):
         return self.NACs[:self.bound_start_index]
-    
+
     def getBoundNACs(self):
         return self.NACs[self.bound_start_index:]
-    
+
     def hasBoundNACs(self):
         return self.bound_start_index < len(self.NACs)
-    
+
 
 class HimesisPreConditionPatternNAC(HimesisPreConditionPattern):
     def __init__(self, LHS=None, name='', num_nodes=0, edges=[]):
         super(HimesisPreConditionPatternNAC, self).__init__(name, num_nodes, edges)
         self.LHS = LHS
         self.bridge_size = 0
-    
+
     def set_bridge_size(self):
         """
             Computes the bridge and stores the number of its nodes.
@@ -286,7 +286,7 @@ class HimesisPreConditionPatternNAC(HimesisPreConditionPattern):
         if self.LHS is None:
             raise Exception('Missing LHS to compute bridge')
         self.bridge_size = self.compute_bridge().vcount()
-       
+
     def compute_bridge(self):
         """
             Creates a HimesisPreConditionPattern defined as the intersection of graph with this instance.
@@ -303,15 +303,15 @@ class HimesisPreConditionPatternNAC(HimesisPreConditionPattern):
             G1, G2 = G2, G1
         # The bridge
         G = HimesisPreConditionPattern()
-        G[Himesis.Constants.GUID] = uuid.uuid4()    
-            
+        G[Himesis.Constants.GUID] = uuid.uuid4()
+
         # We don't need to actually solve the largest common subgraph (LCS) problem
         # because we assume that the nodes are labelled uniquely in each graph
         # and that if a label is in G1 and in G2, then it will be in G
         if len(G1.vs) == 0:
             return G
 
-        Labels2 = G2.vs[Himesis.Constants.MT_LABEL] 
+        Labels2 = G2.vs[Himesis.Constants.MT_LABEL]
         for label in G1.vs[Himesis.Constants.MT_LABEL]:
             if label in Labels2:
                 # Get the corresponding node from G1 
@@ -345,14 +345,14 @@ class HimesisPreConditionPatternNAC(HimesisPreConditionPattern):
                         G.vs[newNodeIndex][Himesis.Constants.GUID] = uuid.uuid4()
                         continue
                     # Ignore non-RAM attributes ('special' and HConstants attributes)
-                    elif not Himesis.is_RAM_attribute(attr): 
+                    elif not Himesis.is_RAM_attribute(attr):
                         continue
                     # Handle normal attribute
                     else :
                         if not v2[attr]:
                             # There is no constraint for this attribute
                             continue
-                                     
+
                         # The attribute constraint code is the conjunction of the LHS constraint
                         # with the NAC constraint for this attribute
                         def get_evalAttrConditions(_attr,_v1,_v2) :
@@ -374,15 +374,15 @@ class HimesisPreConditionPatternNAC(HimesisPreConditionPattern):
                 tgt = tgt[0]
                 G.add_edges([(src.index, tgt.index)])
             elif len(src) == 0 :
-#                raise Exception('Label does not exist :: '+str(src_label))
+                #                raise Exception('Label does not exist :: '+str(src_label))
                 pass
             elif len(tgt) == 0 :
-#                raise Exception('Label does not exist :: '+str(tgt_label))
+                #                raise Exception('Label does not exist :: '+str(tgt_label))
                 pass
             elif len(src) > 1 :
                 raise Exception('Label is not unique :: ' + str(src_label))
             elif len(tgt) > 1 :
-                raise Exception('Label is not unique :: ' + str(tgt_label))        
+                raise Exception('Label is not unique :: ' + str(tgt_label))
         return G
 
 
@@ -398,7 +398,7 @@ class HimesisPostConditionPattern(HimesisPattern):
         The action must be specified in the pattern graph and not the input graph.
         """
         raise NotImplementedError('Use graph[Himesis.Constants.MT_ACTION]() instead')
-    
+
     # This method implements the rewriting part of the rule.
     '''    
         NOTE 
@@ -432,7 +432,7 @@ class HimesisPostConditionPattern(HimesisPattern):
         for mm in self[Himesis.Constants.MISSING_METAMODELS]() :
             packet.deltas.append({'op':'LOADMM','name':mm})
 
-    
+
         # Set the attributes of graph.vs[graphNodeIndex] to match those of self.vs[rhsNodeIndex]
         def set_attributes(rhsNodeIndex, graphNodeIndex, newNode, pLabel2graphIndexMap) :
             changedSomething = False
@@ -450,16 +450,16 @@ class HimesisPostConditionPattern(HimesisPattern):
                         if oldVal != newVal :
                             graph.vs[graphNodeIndex][attrName] = newVal
                             packet.deltas.append(
-                                    {'op':'CHATTR',
-                                     'guid':graph.vs[graphNodeIndex][Himesis.Constants.GUID],
-                                     'attr':attrName,
-                                     'old_val':oldVal,
-                                     'new_val':newVal})
+                                {'op':'CHATTR',
+                                 'guid':graph.vs[graphNodeIndex][Himesis.Constants.GUID],
+                                 'attr':attrName,
+                                 'old_val':oldVal,
+                                 'new_val':newVal})
                             changedSomething = True
-                    except Exception, e :
+                    except Exception as e :
                         raise Exception("An error has occurred while computing the value of the attribute '%s' :: %s" % (attrName, e))
             return changedSomething
-        
+
         # Build a dictionary {label: node index} mapping each label of the pattern to a node in the graph to rewrite.
         # Because of the uniqueness property of labels in a rule, we can store all LHS labels
         # and subsequently add the labels corresponding to the nodes to be created.
@@ -469,7 +469,7 @@ class HimesisPostConditionPattern(HimesisPattern):
         LHS_labels = self.pre_labels
         for label in LHS_labels:
             rhsNodeIndex = self.get_node_with_label(label)
-            if rhsNodeIndex is None: 
+            if rhsNodeIndex is None:
                 continue        # not in the interface graph (LHS n RHS)
             if set_attributes(rhsNodeIndex, labels[label], False, labels) :
                 graph.vs[labels[label]][Himesis.Constants.MT_DIRTY] = True
@@ -479,16 +479,9 @@ class HimesisPostConditionPattern(HimesisPattern):
             RHS_labels = []
         else :
             RHS_labels = self.vs[Himesis.Constants.MT_LABEL]
-            def nonConnectorsFirst(l1,l2) :
-                c1 = self.vs[ self.get_node_with_label(l1) ][Himesis.Constants.CONNECTOR_TYPE]
-                c2 = self.vs[ self.get_node_with_label(l2) ][Himesis.Constants.CONNECTOR_TYPE]
-                if c1 and c2 :
-                    return 0
-                elif c1 :
-                    return 1
-                return -1
-            RHS_labels.sort(nonConnectorsFirst)
-            neighborhood = map(lambda l: graph.vs[labels[l]].attributes(), LHS_labels)
+            # sort non-connectors first
+            RHS_labels.sort(key=lambda x: self.vs[ self.get_node_with_label(x) ][Himesis.Constants.CONNECTOR_TYPE] or False)
+            neighborhood = [graph.vs[labels[l]].attributes() for l in LHS_labels]
 
         new_labels = []
         for label in RHS_labels:
@@ -496,12 +489,12 @@ class HimesisPostConditionPattern(HimesisPattern):
             if label not in LHS_labels:
                 new_labels += [label]
                 newNodeIndex = graph.add_node(
-                                self.vs[rhsNodeIndex][Himesis.Constants.FULLTYPE],
-                                self.vs[rhsNodeIndex][Himesis.Constants.CONNECTOR_TYPE])
+                    self.vs[rhsNodeIndex][Himesis.Constants.FULLTYPE],
+                    self.vs[rhsNodeIndex][Himesis.Constants.CONNECTOR_TYPE])
                 packet.deltas.append(
-                        {'op':'MKNODE',
-                         'neighborhood':neighborhood,
-                         'guid':graph.vs[newNodeIndex][Himesis.Constants.GUID]})
+                    {'op':'MKNODE',
+                     'neighborhood':neighborhood,
+                     'guid':graph.vs[newNodeIndex][Himesis.Constants.GUID]})
                 labels[label] = newNodeIndex
                 set_attributes(rhsNodeIndex, newNodeIndex, True, labels)
 
@@ -509,15 +502,15 @@ class HimesisPostConditionPattern(HimesisPattern):
         visited_edges = []
         for label in sorted(new_labels):
             for edge in self.es.select(lambda e: (e.index not in visited_edges and
-                      (label == self.vs[e.source][Himesis.Constants.MT_LABEL] or
-                       label == self.vs[e.target][Himesis.Constants.MT_LABEL]))):
+                                                  (label == self.vs[e.source][Himesis.Constants.MT_LABEL] or
+                                                   label == self.vs[e.target][Himesis.Constants.MT_LABEL]))):
                 src_label = self.vs[edge.source][Himesis.Constants.MT_LABEL]
                 tgt_label = self.vs[edge.target][Himesis.Constants.MT_LABEL]
                 graph.add_edges([(labels[src_label], labels[tgt_label])])
                 packet.deltas.append(
-                        {'op':'MKEDGE',                        
-                         'guid1':graph.vs[labels[src_label]][Himesis.Constants.GUID],
-                         'guid2':graph.vs[labels[tgt_label]][Himesis.Constants.GUID]})
+                    {'op':'MKEDGE',
+                     'guid1':graph.vs[labels[src_label]][Himesis.Constants.GUID],
+                     'guid2':graph.vs[labels[tgt_label]][Himesis.Constants.GUID]})
                 visited_edges.append(edge.index)
 
         # Set the output pivots
@@ -531,7 +524,7 @@ class HimesisPostConditionPattern(HimesisPattern):
         # Perform the post-action
         try:
             packet.deltas.extend(self[Himesis.Constants.MT_ACTION](labels, graph))
-        except Exception, e:
+        except Exception as e:
             raise Exception('An error has occurred while applying the post-action', e)
 
         # Delete nodes (automatically deletes adjacent edges)
@@ -546,17 +539,17 @@ class HimesisPostConditionPattern(HimesisPattern):
                     found = False
                     for rmedge in rmedges :
                         if rmedge['guid1'] == graph.vs[edge.source][Himesis.Constants.GUID] and \
-                            rmedge['guid2'] == graph.vs[edge.target][Himesis.Constants.GUID] :
+                                rmedge['guid2'] == graph.vs[edge.target][Himesis.Constants.GUID] :
                             found = True
                             break
-                    if not found :                        
+                    if not found :
                         rmedges.append({'op':'RMEDGE',
                                         'guid1':graph.vs[edge.source][Himesis.Constants.GUID],
                                         'guid2':graph.vs[edge.target][Himesis.Constants.GUID]})
         if len(labels_to_delete) > 0 :
             packet.deltas = rmedges + rmnodes + packet.deltas
             graph.delete_nodes(labels_to_delete)
-            
+
             ''' hergin :: motif-integration start :: remove the deleted nodes from pivots list '''
             for uuid in packet.global_pivots:
                 deleted=False

+ 101 - 101
mt/ptcal/pytcore/core/match_algo.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 import sys
-from himesis import Himesis
+from .himesis import Himesis
 
 
 class Priority(object):
@@ -22,7 +22,7 @@ class Priority(object):
         """
         self.source_graph = None
         self.pattern_graph = None
-    
+
     def cache_info(self, source_graph, pattern_graph):
         """
             Pre-computes any information required by the order and order_all methods
@@ -30,28 +30,28 @@ class Priority(object):
             @param pattern_graph: The pattern graph.
         """
         pass
-    
+
     def order_source(self, candidate_list):
         """
             Specifies the order for the terminal sets for the source graph.
             @param candidate_list: The list of possible candidates.
         """
         return sorted(candidate_list)
-    
+
     def order_pattern(self, candidate_list):
         """
             Specifies the order for the terminal sets for the pattern graph.
             @param candidate_list: The list of possible candidates.
         """
         return sorted(candidate_list)
-    
+
     def order_all_source(self, candidate_list):
         """
             Specifies the order for all source nodes.
             @param candidate_list: The list of possible candidates.
         """
         return candidate_list
-    
+
     def order_all_pattern(self, candidate_list):
         """
             Specifies the order for all pattern nodes.
@@ -79,7 +79,7 @@ class HimesisMatcher(object):
         self.G2 = pattern_graph
         self.pred1 = pred1
         self.succ1 = succ1
-        
+
         assert(isinstance(priority, Priority))
         self.priority = priority
         self.priority.source_graph = source_graph
@@ -91,19 +91,19 @@ class HimesisMatcher(object):
         if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
             # Give some breathing room
             sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
-        
+
         # Initialize the state
         self.initialize()
-        
+
         # Check whether we are considering multi-graph
-#        if reduce(lambda x,y: x or y, self.G2.is_multiple()):
-#            self.cache_info_multi(self.G1_nodes, self.G2_nodes)
-        
+        #        if reduce(lambda x,y: x or y, self.G2.is_multiple()):
+        #            self.cache_info_multi(self.G1_nodes, self.G2_nodes)
+
         # Scan the two graphs to cache required information.
         # Typically stores the results of expensive operation on the graphs.
         # This speeds up the algorithm significantly.
         self.cache_info()
-    
+
     def cache_info(self):
         """
             Cache information on the nodes.
@@ -113,31 +113,31 @@ class HimesisMatcher(object):
         # Cache individual nodes
         self.G1_nodes = self.G1.node_iter()
         self.G2_nodes = self.G2.node_iter()
-        
-#        # Memoize the predecessor & successor information:
-#        # for each node store the number of neighbours and the list
-#        if len(self.pred1) == 0 or len(self.succ1) == 0:
-#            self.pred1 = {}
-#            self.succ1 = {}
-#            for node in self.G1_nodes:
-#                self.pred1[node] = (len(self.G1.predecessors(node)), self.G1.predecessors(node))
-#                self.succ1[node] = (len(self.G1.successors(node)), self.G1.successors(node))
-#        self.pred2 = {}
-#        self.succ2 = {}
-#        for node in self.G2_nodes:
-#            self.pred2[node] = (len(self.G2.predecessors(node)), self.G2.predecessors(node))
-#            self.succ2[node] = (len(self.G2.successors(node)), self.G2.successors(node))
-        
+
+        #        # Memoize the predecessor & successor information:
+        #        # for each node store the number of neighbours and the list
+        #        if len(self.pred1) == 0 or len(self.succ1) == 0:
+        #            self.pred1 = {}
+        #            self.succ1 = {}
+        #            for node in self.G1_nodes:
+        #                self.pred1[node] = (len(self.G1.predecessors(node)), self.G1.predecessors(node))
+        #                self.succ1[node] = (len(self.G1.successors(node)), self.G1.successors(node))
+        #        self.pred2 = {}
+        #        self.succ2 = {}
+        #        for node in self.G2_nodes:
+        #            self.pred2[node] = (len(self.G2.predecessors(node)), self.G2.predecessors(node))
+        #            self.succ2[node] = (len(self.G2.successors(node)), self.G2.successors(node))
+
         # Cache any further data used for the heuristic prioritization for computing the candidate pair
         # This is done when initializing the priority class
         self.priority.cache_info(self.G1, self.G2)
-    
+
     def reset_recursion_limit(self):
         """
             Restores the recursion limit.
         """
         sys.setrecursionlimit(self.old_recursion_limit)
-    
+
     def initialize(self):
         """
             (Re)Initializes the state of the algorithm.
@@ -152,12 +152,12 @@ class HimesisMatcher(object):
         #    - T1_out: the out-neighbours of the nodes in M_1
         #    - T2_out: the out-neighbours of the nodes in M_2
         #=======================================================================
-        
+
         # core_1[n] contains the index of the node m paired with n, if n is in the mapping
         self.core_1 = {}   # This is M_1
         # core_2[m] contains the index of the node n paired with m, if m is in the mapping
         self.core_2 = {}   # This is M_2
-        
+
         # The value stored is the depth of the search tree when the node became part of the corresponding set
         # Non-zero if n is in M_1 or in T_1^{in}
         self.in_1 = {}
@@ -172,13 +172,13 @@ class HimesisMatcher(object):
         self.inout_1 = {}
         # Non-zero if n is in M_2 or in T_2^{in} or in T_2^{out}
         self.inout_2 = {}
-        
+
         # Prepare the necessary data structures required for backtracking
         self.state = HimesisMatcherState(self)
 
         # Provide a convenient way to access the isomorphism mapping.
         self.mapping = self.core_2.copy()
-    
+
     def are_compatibile(self, src_node, patt_node):
         """
             Verifies if a candidate pair is compatible.
@@ -188,7 +188,7 @@ class HimesisMatcher(object):
         """
         sourceNode = self.G1.vs[src_node]
         patternNode = self.G2.vs[patt_node]
-        
+
         # First check if they are of the same type
         if sourceNode[Himesis.Constants.FULLTYPE] == patternNode[Himesis.Constants.FULLTYPE]:
             # Then check for the degree compatibility
@@ -196,25 +196,25 @@ class HimesisMatcher(object):
                     and self.succ2[patt_node][0] <= self.succ1[src_node][0])
         # Otherwise, first check for the degree compatibility
         elif not (self.pred2[patt_node][0] <= self.pred1[src_node][0]
-                and self.succ2[patt_node][0] <= self.succ1[src_node][0]):
+                  and self.succ2[patt_node][0] <= self.succ1[src_node][0]):
             return False
         # Then check sub-types compatibility
         else:
             return (patternNode[Himesis.Constants.MT_SUBTYPE_MATCH]
                     and sourceNode[Himesis.Constants.FULLTYPE] in patternNode[Himesis.Constants.MT_SUBTYPES])
-    
+
     def candidate_pairs_iter(self):
         """
             Iterator over candidate pairs of nodes in G1 and G2, according to the VF2 algorithm.
             The candidate pairs have all passed the compatibility check before output.
             @return: The candidate pair (source node, pattern node)
         """
-        
+
         #=======================================================================
         # Here we compute P(s) = (p1,p2) the candidate pair
         # for the current partial mapping M(s).
         #=======================================================================
-        
+
         # First try the nodes that are in both Ti_in and Ti_out
         if len(self.inout_1) > len(self.core_1) and len(self.inout_2) > len(self.core_2):
             for patt_node in self.priority.order_pattern(self.inout_2):
@@ -223,7 +223,7 @@ class HimesisMatcher(object):
             for src_node in self.priority.order_source(self.inout_1):
                 if src_node not in self.core_1:
                     yield src_node, patt_node
-        
+
         # If T1_out and T2_out are both non-empty:
         # P(s) = T1_out x {min T2_out}
         elif len(self.out_1) > len(self.core_1) and len(self.out_2) > len(self.core_2):
@@ -233,7 +233,7 @@ class HimesisMatcher(object):
             for src_node in self.priority.order_source(self.out_1):
                 if src_node not in self.core_1:
                     yield src_node, patt_node
-    
+
         # If T1_in and T2_in are both non-empty:
         # P(s) = T1_in x {min T2_in}
         elif len(self.in_1) > len(self.core_1) and len(self.in_2) > len(self.core_2):
@@ -243,7 +243,7 @@ class HimesisMatcher(object):
             for src_node in self.priority.order_source(self.in_1):
                 if src_node not in self.core_1:
                     yield src_node, patt_node
-    
+
         # If all terminal sets are empty:
         # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
         else:
@@ -253,7 +253,7 @@ class HimesisMatcher(object):
             for src_node in self.priority.order_all_source(self.G1_nodes):
                 if src_node not in self.core_1:
                     yield src_node, patt_node
-    
+
     def are_syntactically_feasible(self, src_node, patt_node):
         """
             Determines whether the two nodes are syntactically feasible,
@@ -267,18 +267,18 @@ class HimesisMatcher(object):
         # It verifies that edges directly or indirectly connected to M(s + P(s))
         # does not violate the subgraph matching conditions.
         #=======================================================================
-        
+
         # Check for self-loops
-#        e1, e2 = -1, -1
-#        if patt_node in self.succ2[patt_node] or patt_node in self.pred2[patt_node]:
-#            if src_node in self.succ1[src_node] or src_node in self.pred1[src_node]:
-#                e1 = self.G1.get_eid(src_node, src_node)
-#                e2 = self.G2.get_eid(patt_node, patt_node)
-#                if self.G1.count_multiple(e1) < self.G2.count_multiple(e2):
-#                    return False
-#            else:
-#                return False
-        
+        #        e1, e2 = -1, -1
+        #        if patt_node in self.succ2[patt_node] or patt_node in self.pred2[patt_node]:
+        #            if src_node in self.succ1[src_node] or src_node in self.pred1[src_node]:
+        #                e1 = self.G1.get_eid(src_node, src_node)
+        #                e2 = self.G2.get_eid(patt_node, patt_node)
+        #                if self.G1.count_multiple(e1) < self.G2.count_multiple(e2):
+        #                    return False
+        #            else:
+        #                return False
+
         # Counters for in and out edges found 
         in1 = 0
         in2 = 0
@@ -286,7 +286,7 @@ class HimesisMatcher(object):
         out2 = 0
         inout1 = 0
         inout2 = 0
-        
+
         # Checks if successors are compatible
         for successor2 in self.succ2[patt_node][1]:
             tmp = self.G2.predecessors(successor2)
@@ -300,8 +300,8 @@ class HimesisMatcher(object):
                     tmp = self.G1.successors(successor1)
                     self.succ1[successor1] = (len(tmp), tmp)
                     if (self.succ2[successor2][0] <= self.succ1[successor1][0]
-                        and self.pred2[successor2][0] <= self.pred1[successor1][0]
-                        and successor1 not in self.core_1):
+                            and self.pred2[successor2][0] <= self.pred1[successor1][0]
+                            and successor1 not in self.core_1):
                         break
                 else:
                     return False
@@ -315,7 +315,7 @@ class HimesisMatcher(object):
             else:
                 if self.core_2[successor2] not in self.succ1[src_node][1]:
                     return False
-        
+
         # Checks if predecessors are compatible
         for predecessor2 in self.pred2[patt_node][1]:
             tmp = self.G2.predecessors(predecessor2)
@@ -329,8 +329,8 @@ class HimesisMatcher(object):
                     tmp = self.G1.successors(predecessor1)
                     self.succ1[predecessor1] = (len(tmp), tmp)
                     if (self.pred2[predecessor2][0] <= self.pred1[predecessor1][0]
-                        and self.pred2[predecessor2][0] <= self.pred1[predecessor1][0]
-                        and predecessor1 not in self.core_1):
+                            and self.pred2[predecessor2][0] <= self.pred1[predecessor1][0]
+                            and predecessor1 not in self.core_1):
                         break
                 else:
                     return False
@@ -344,7 +344,7 @@ class HimesisMatcher(object):
             else:
                 if self.core_2[predecessor2] not in self.pred1[src_node][1]:
                     return False
-        
+
         # Now compute the counters of the source node
         for successor1 in self.succ1[src_node][1]:
             if successor1 not in self.core_1:
@@ -362,7 +362,7 @@ class HimesisMatcher(object):
             #else:
             #    if self.core_1[successor1] not in self.succ2[patt_node]:
             #        return False
-        
+
         # Now compute the counters of the source node
         for predecessor1 in self.pred1[src_node][1]:
             if predecessor1 not in self.core_1:
@@ -380,12 +380,12 @@ class HimesisMatcher(object):
             #else:
             #    if self.core_1[predecessor1] not in self.pred2[patt_node]:
             #        return False
-        
+
         # Finally, verify if all counters satisfy the subgraph matching conditions
         # For induced matches
         #return in2 <= in1 and out2 <= out1 and inout2 <= inout1
         return in2 <= in1 and out2 <= out1 and (in2 + out2 + inout2) <= (in1 + out1 + inout1)
-    
+
     def are_semantically_feasible(self, src_node, patt_node):
         """
             Determines whether the two nodes are syntactically feasible,
@@ -398,10 +398,10 @@ class HimesisMatcher(object):
         # This feasibility check looks at the data stored in the pair of candidates.
         # It verifies that all attribute constraints are satisfied.
         #=======================================================================
-        
+
         src_node = self.G1.vs[src_node]
         patt_node = self.G2.vs[patt_node]
-        
+
         # Check for attributes value/constraint
         for attr in patt_node.attribute_names():
             # Ignore non-RAM attributes 
@@ -416,16 +416,16 @@ class HimesisMatcher(object):
             # patt_node[attr](..) is expecting a mapping of patt_node's mtLabel
             # to src_node's index in self.G1... so we build this mapping first
             mtLabel2graphIndexMap = {}
-            mtLabel2graphIndexMap[ patt_node[Himesis.Constants.MT_LABEL] ] = src_node.index    
+            mtLabel2graphIndexMap[ patt_node[Himesis.Constants.MT_LABEL] ] = src_node.index
 
             try:
                 if not patt_node[attr](mtLabel2graphIndexMap,self.G1):
                     return False
-            except Exception, e:
-                    #TODO: This should be a TransformationLanguageSpecificException
+            except Exception as e:
+                #TODO: This should be a TransformationLanguageSpecificException
                 raise Exception("An error has occurred while checking the constraint of the attribute '%s' :: %s" % (attr, str(e)))
         return True
-    
+
     def _match(self):
         """
             Extends the pattern matching mapping.
@@ -437,7 +437,7 @@ class HimesisMatcher(object):
         # It cleans up the class variables after each recursive call.
         # If a match is found, we yield the mapping.
         #=======================================================================
-        
+
         # Base condition when a complete match is found
         if len(self.core_2) == self.G2.vcount():
             # Save the final mapping, otherwise garbage collection deletes it
@@ -445,14 +445,14 @@ class HimesisMatcher(object):
             yield self.mapping
         else:
             for src_node, patt_node in self.candidate_pairs_iter():
-                
+
                 # Cache the predecessors and successors of the candidate pairs on the fly 
                 self.pred1, self.succ1, self.pred2, self.succ2 = {}, {}, {}, {}
                 self.pred1[src_node] = (len(self.G1.predecessors(src_node)), self.G1.predecessors(src_node))
                 self.succ1[src_node] = (len(self.G1.successors(src_node)), self.G1.successors(src_node))
                 self.pred2[patt_node] = (len(self.G2.predecessors(patt_node)), self.G2.predecessors(patt_node))
                 self.succ2[patt_node] = (len(self.G2.successors(patt_node)), self.G2.successors(patt_node))
-                
+
                 if self.are_compatibile(src_node, patt_node):
                     if self.are_syntactically_feasible(src_node, patt_node):
                         if self.are_semantically_feasible(src_node, patt_node):
@@ -460,10 +460,10 @@ class HimesisMatcher(object):
                             newstate = self.state.__class__(self, src_node, patt_node)
                             for mapping in self._match():
                                 yield mapping
-    
+
                             # restore data structures
                             newstate.restore()
-    
+
     def has_match(self, context={}):
         """
             Determines if the pattern graph can be matched on the source graph. 
@@ -475,7 +475,7 @@ class HimesisMatcher(object):
             return True
         except StopIteration:
             return False
-    
+
     def match_iter(self, context={}):
         """
             Iterator over matchings of the pattern graph on the source graph.
@@ -515,7 +515,7 @@ class HimesisMatcherState(object):
         self.src_node = None
         self.patt_node = None
         self.depth = len(matcher.core_1)
-        
+
         if src_node is None or patt_node is None:
             # Then we reset the class variables
             matcher.core_1 = {}
@@ -532,15 +532,15 @@ class HimesisMatcherState(object):
             # Add the node pair to the isomorphism mapping.
             matcher.core_1[src_node] = patt_node
             matcher.core_2[patt_node] = src_node
-            
+
             # Store the node that was added last.
             self.src_node = src_node
             self.patt_node = patt_node
-            
+
             # Now we must update the other four vectors.
             # We will add only if it is not in there already!
             self.depth = len(matcher.core_1)
-            
+
             # First we add the new nodes...
             for vector in (matcher.in_1, matcher.out_1, matcher.inout_1):
                 if src_node not in vector:
@@ -548,9 +548,9 @@ class HimesisMatcherState(object):
             for vector in (matcher.in_2, matcher.out_2, matcher.inout_2):
                 if patt_node not in vector:
                     vector[patt_node] = self.depth
-                    
+
             # Now we add every other node...
-            
+
             # Updates for T_1^{in}
             new_nodes_in = []
             for node in matcher.core_1:
@@ -560,22 +560,22 @@ class HimesisMatcherState(object):
             for node in new_nodes_in:
                 if node not in matcher.in_1:
                     matcher.in_1[node] = self.depth
-                
+
             # Updates for T_1^{out}
-            new_nodes_out = []        
+            new_nodes_out = []
             for node in matcher.core_1:
                 n = [successor for successor in matcher.G1.successors(node)
                      if successor not in matcher.core_1 and successor not in new_nodes_out]
                 new_nodes_out += n
             for node in new_nodes_out:
-                if node not in matcher.out_1:                
+                if node not in matcher.out_1:
                     matcher.out_1[node] = self.depth
-            
+
             # Updates for T_1^{inout}
-            for node in set(matcher.in_1.keys() + matcher.out_1.keys()):
-                if node in matcher.out_1 and node in matcher.in_1 and node not in matcher.inout_1: 
+            for node in set(list(matcher.in_1.keys()) + list(matcher.out_1.keys())):
+                if node in matcher.out_1 and node in matcher.in_1 and node not in matcher.inout_1:
                     matcher.inout_1[node] = self.depth
-            
+
             # Updates for T_2^{in}
             new_nodes_in = []
             for node in matcher.core_2:
@@ -585,9 +585,9 @@ class HimesisMatcherState(object):
             for node in new_nodes_in:
                 if node not in matcher.in_2:
                     matcher.in_2[node] = self.depth
-    
+
             # Updates for T_2^{out}
-            new_nodes_out = []        
+            new_nodes_out = []
             for node in matcher.core_2:
                 n = [successor for successor in matcher.G2.successors(node)
                      if successor not in matcher.core_2 and successor not in new_nodes_out]
@@ -595,17 +595,17 @@ class HimesisMatcherState(object):
             for node in new_nodes_out:
                 if node not in matcher.out_2:
                     matcher.out_2[node] = self.depth
-            
+
             # Updates for T_2^{inout}
-            for node in set(matcher.in_2.keys() + matcher.out_2.keys()):
-                if node in matcher.out_2 and node in matcher.in_2 and node not in matcher.inout_2: 
+            for node in set(list(matcher.in_2.keys()) + list(matcher.out_2.keys())):
+                if node in matcher.out_2 and node in matcher.in_2 and node not in matcher.inout_2:
                     matcher.inout_2[node] = self.depth
-    
+
     def restore(self):
         """
             Deletes the HimesisMatcherState object and restores the class variables.
         """
-        
+
         # First we remove the node that was added from the core vectors.
         # Watch out! src_node == 0 should evaluate to True.
         if self.src_node is not None and self.patt_node is not None:
@@ -615,7 +615,7 @@ class HimesisMatcherState(object):
         # Now we revert the other four vectors.        
         # Thus, we delete all entries which have this depth level.
         for vector in (self.matcher.in_1, self.matcher.in_2, self.matcher.out_1, self.matcher.out_2, self.matcher.inout_1, self.matcher.inout_2):
-            for node in vector.keys():
+            for node in list(vector.keys()):
                 if vector[node] == self.depth:
                     del vector[node]
 
@@ -631,7 +631,7 @@ class VF2(HimesisMatcher):
             @param G2: The smaller graph. 
         """
         HimesisMatcher.__init__(self, G1, G2)
-    
+
     def match_iter(self):
         """
             Iterator over mappings of G2 on a subgraph of G1.
@@ -657,7 +657,7 @@ class SubgraphIsoMatcher(HimesisMatcher):
             Basically this is the same as HimesisMatcher but no node data is taken into consideration. 
         """
         HimesisMatcher.__init__(self, source_graph, pattern_graph, priority)
-    
+
     def are_compatibile(self, src_node, patt_node):
         """
             Verifies if a candidate pair is compatible.
@@ -665,10 +665,10 @@ class SubgraphIsoMatcher(HimesisMatcher):
             @param src_node: The candidate from the source graph.
             @param patt_node: The candidate from the pattern graph.
         """
-        
+
         return (self.pred2[patt_node][0] <= self.pred1[src_node][0]
                 and self.succ2[patt_node][0] <= self.succ1[src_node][0])
-    
+
     def are_semantically_feasible(self, sourceNode, patternNode):
         """
             Since no data is considered, the graphs have no semantics.

+ 6 - 6
mt/ptcal/pytcore/rules/arule.py

@@ -22,29 +22,29 @@ class ARule(Composer):
         self.M = Matcher(condition=LHS, max=1)
         self.I = Iterator(max_iterations=1)
         self.W = Rewriter(condition=RHS,sendAndApplyDeltaFunc=sendAndApplyDeltaFunc)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
-        
+
         # Match
         packet = self.M.packet_in(packet)
         if not self.M.is_success:
             self.exception = self.M.exception
             return packet
-        
+
         # Choose the only match
         packet = self.I.packet_in(packet)
         if not self.I.is_success:
             self.exception = self.I.exception
             return packet
-        
+
         # Rewrite
         packet = self.W.packet_in(packet)
         if not self.W.is_success:
             self.exception = self.W.exception
             return packet
-        
+
         # Output success packet
         self.is_success = True
         return packet
@@ -65,7 +65,7 @@ class ARule_r(ARule):
         super(ARule_r, self).__init__(LHS, RHS)
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         packet = super(ARule_r, self).packet_in(packet)
         # is_success is True

+ 2 - 2
mt/ptcal/pytcore/rules/brule.py

@@ -17,11 +17,11 @@ class BRule(Composer):
         '''
         super(BRule, self).__init__()
         self.branches = branches
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
-        remaining_branches = range(len(self.branches))
+        remaining_branches = list(range(len(self.branches)))
         ''' hergin motif-integration ::: clone commented in observance of not need
              report bugs if have '''
         #original = packet.clone()

+ 4 - 4
mt/ptcal/pytcore/rules/bsrule.py

@@ -4,7 +4,7 @@ See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
 from ..tcore.composer import Composer
-from brule import BRule
+from .brule import BRule
 
 
 class BSRule(Composer):
@@ -21,11 +21,11 @@ class BSRule(Composer):
         self.brule = BRule(branches)
         self.max_iterations = max_iterations
         self.iterations = 0
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
-        
+
         while self.iterations < self.max_iterations:
             # Re-apply the BRule
             packet = self.brule.packet_in(packet)
@@ -35,5 +35,5 @@ class BSRule(Composer):
             else:
                 self.is_success = True
             self.iterations += 1
-          
+
         return packet

+ 17 - 17
mt/ptcal/pytcore/rules/crule.py

@@ -12,22 +12,22 @@ class CRule(Composer):
         self._mtContexts.append(motifContext)
         self.cruleEnd = False
         self.packet = None
-    
+
     def packet_in(self, packet):
-        
+
         self.cruleEnd = False
         self._mtContexts[-1]._expired=False
         self._mtContexts[-1]._lastStep={}
-        
+
         self.exception = None
         self.is_success = False
         self.packet = packet
         while not self.cruleEnd:
             result = self._step()
             self.is_success = result if result != None else False
-        
+
         return self.packet
-            
+
 
     def _nextRule(self) :
         mtc = self._mtContexts[-1]
@@ -35,29 +35,29 @@ class CRule(Composer):
         ns = mtc.nextStep()
 
         if 'id' in ns :
-        
+
             fulltype = mtc.t['nodes'][ns['id']]['$type']
-            
+
             if fulltype == mtc.metamodel+"CRule":
-                
+
                 #self._loadTransform(ns['rule'])
-                
+
                 return self._nextRule()
-                
-            elif fulltype.startswith('/Formalisms/__Transformations__/Transformation/T-Core') or\
+
+            elif fulltype.startswith('/Formalisms/__Transformations__/Transformation/T-Core') or \
                     fulltype.startswith('/Formalisms/__Transformations__/Transformation/MoTif'):
-                    
+
                 return ns
 
         elif 'trafoResult' in ns:
             return ns;
-        
-    def _step(self) :       
+
+    def _step(self) :
 
         def runRule(r) :
-            
+
             ar = r['rule']
-            
+
             self.packet = ar.packet_in(self.packet)
 
             if ar.is_success :
@@ -88,7 +88,7 @@ class CRule(Composer):
 
         else :
             (res,ai) = runRule(nr)
-            
+
             self._mtContexts[-1].setLastStepApplicationInfo(ai)
 
             if ai == TC.FAILED :

+ 4 - 4
mt/ptcal/pytcore/rules/frule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from arule import ARule
+from .arule import ARule
 from ..tcore.resolver import Resolver
 
 
@@ -22,7 +22,7 @@ class FRule(ARule):
         # Matcher needs to find many matches
         self.M.max = max_iterations
         self.I.max_iterations = max_iterations
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -36,7 +36,7 @@ class FRule(ARule):
         if not self.I.is_success:
             self.exception = self.I.exception
             return packet
-        
+
         while True:
             # Rewrite
             packet = self.W.packet_in(packet)
@@ -72,7 +72,7 @@ class FRule_r(ARule):
         super(FRule_r, self).__init__(LHS, RHS, max_iterations)
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False

+ 6 - 6
mt/ptcal/pytcore/rules/lfrule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from lrule import LRule
+from .lrule import LRule
 from ..tcore.rewriter import Rewriter
 from ..tcore.resolver import Resolver
 
@@ -27,7 +27,7 @@ class LFRule(LRule):
         self.outer_first = outer_first
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -41,7 +41,7 @@ class LFRule(LRule):
         if not self.I.is_success:
             self.exception = self.I.exception
             return packet
-        
+
         while True:
             if self.outer_first:
                 # Rewrite
@@ -50,7 +50,7 @@ class LFRule(LRule):
                     self.exception = self.W.exception
                     return packet
 
-                
+
                 # Resolve any conflicts if necessary
                 ''' hergin :: motif-integration commented '''
                 #packet = self.R.packet_in(packet)
@@ -62,7 +62,7 @@ class LFRule(LRule):
             if not self.inner_rule.is_success:
                 self.exception = self.inner_rule.exception
                 return packet
-            
+
             if not self.outer_first:
                 # Rewrite
                 packet = self.W.packet_in(packet)
@@ -70,7 +70,7 @@ class LFRule(LRule):
                     self.exception = self.W.exception
                     return packet
 
-                
+
                 # Resolve any conflicts if necessary
                 ''' hergin :: motif-integration commented '''
                 #packet = self.R.packet_in(packet)

+ 7 - 7
mt/ptcal/pytcore/rules/lqsrule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from lrule import LRule
+from .lrule import LRule
 from symbol import try_stmt
 
 
@@ -19,7 +19,7 @@ class LQSRule(LRule):
             @param max_iterations: The maximum number of matches of the LHS.
         '''
         super(LQSRule, self).__init__(LHS, inner_rule, max_iterations)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -43,11 +43,11 @@ class LQSRule(LRule):
                 return packet
             # Rule has been applied once, so it's a success anyway
             self.is_success = True
-            
-            
+
+
             if self.I.iterations == self.I.max_iterations:
                 return packet
-            
+
             ''' hergin :: motif-integration : clean Matchset before rematch because only LHS doesnot have a rewriter '''
             #packet.match_sets = {}
             #try:
@@ -55,13 +55,13 @@ class LQSRule(LRule):
             #        del packet.match_sets[self.I.condition]
             #except KeyError:
             #    pass
-            
+
             # Re-Match
             packet = self.M.packet_in(packet)
             if not self.M.is_success:
                 self.exception = self.M.exception
                 return packet
-             
+
             # Choose another match
             packet = self.I.next_in(packet)
             # No more iterations are left

+ 3 - 3
mt/ptcal/pytcore/rules/lrule.py

@@ -23,7 +23,7 @@ class LRule(Composer):
         self.M = Matcher(condition=LHS, max=max_iterations)
         self.I = Iterator(max_iterations=max_iterations)
         self.inner_rule = inner_rule
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -45,8 +45,8 @@ class LRule(Composer):
                 if self.inner_rule.exception:
                     self.exception = self.inner_rule.exception
                 return packet
-            
-                
+
+
             # Clean the packet: required since there is no Rewriter in a Query
             if  len(packet.match_sets[self.I.condition].matches) == 0:
                 del packet.match_sets[self.I.condition]

+ 6 - 6
mt/ptcal/pytcore/rules/lsrule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from lrule import LRule
+from .lrule import LRule
 from ..tcore.rewriter import Rewriter
 from ..tcore.resolver import Resolver
 
@@ -24,7 +24,7 @@ class LSRule(LRule):
         super(LSRule, self).__init__(LHS, inner_rule, max_iterations)
         self.W = Rewriter(condition=RHS,sendAndApplyDeltaFunc=sendAndApplyDeltaFunc)
         self.outer_first = outer_first
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -38,7 +38,7 @@ class LSRule(LRule):
         if not self.I.is_success:
             self.exception = self.I.exception
             return packet
-        
+
         while True:
             if self.outer_first:
                 # Rewrite
@@ -46,13 +46,13 @@ class LSRule(LRule):
                 if not self.W.is_success:
                     self.exception = self.W.exception
                     return packet
-                
+
             # Apply the inner rule
             packet = self.inner_rule.packet_in(packet)
             if not self.inner_rule.is_success:
                 self.exception = self.inner_rule.exception
                 return packet
-            
+
             if not self.outer_first:
                 # Rewrite
                 packet = self.W.packet_in(packet)
@@ -96,7 +96,7 @@ class LSRule_r(LSRule):
         super(LSRule_r, self).__init__()
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False

+ 1 - 1
mt/ptcal/pytcore/rules/ndarule.py

@@ -30,7 +30,7 @@ class NDARule(Composer):
         self.W = Rewriter(condition=RHS,sendAndApplyDeltaFunc=sendAndApplyDeltaFunc)
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False

+ 14 - 14
mt/ptcal/pytcore/rules/query.py

@@ -19,7 +19,7 @@ class Query(Composer):
         super(Query, self).__init__()
         self.M = Matcher(condition=LHS, max=1)
         self.I = Iterator(max_iterations=1)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -39,7 +39,7 @@ class Query(Composer):
         # Output success packet
         self.is_success = True
         return packet
-    
+
 class CQuery2(Composer):
     '''
         Finds a match for the LHS.
@@ -53,7 +53,7 @@ class CQuery2(Composer):
         self.M = Matcher(condition=LHS)
         self.I = Iterator()
         self.innerQuery=innerQuery
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -62,21 +62,21 @@ class CQuery2(Composer):
         if not self.M.is_success:
             self.exception = self.M.exception
             return packet
-        
+
         # Choose the first match
         packet = self.I.packet_in(packet)
         if not self.I.is_success:
             self.exception = self.I.exception
             return packet
-        
+
         while True:
-            
+
             packet = self.innerQuery.packet_in(packet)
             if self.innerQuery.is_success:
                 if self.innerQuery.exception:
                     self.exception = self.innerQuery.exception
                     return packet
-            
+
                 # Choose another match
                 packet = self.I.next_in(packet)
                 # No more iterations are left
@@ -90,7 +90,7 @@ class CQuery2(Composer):
             else:
                 self.is_success=True
                 return packet
-            
+
 class CQuery3(Composer):
     '''
         Finds a match for the LHS.
@@ -105,7 +105,7 @@ class CQuery3(Composer):
         self.I = Iterator()
         self.innerQuery=innerQuery
         self.secondInnerQuery=secondInnerQuery
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -114,21 +114,21 @@ class CQuery3(Composer):
         if not self.M.is_success:
             self.exception = self.M.exception
             return packet
-        
+
         # Choose the first match
         packet = self.I.packet_in(packet)
         if not self.I.is_success:
             self.exception = self.I.exception
             return packet
-        
+
         while True:
-            
+
             packet = self.innerQuery.packet_in(packet)
             if self.innerQuery.is_success:
                 if self.innerQuery.exception:
                     self.exception = self.innerQuery.exception
                     return packet
-            
+
                 # Choose another match
                 packet = self.I.next_in(packet)
                 # No more iterations are left
@@ -139,7 +139,7 @@ class CQuery3(Composer):
                         self.is_success = False
                     return packet
             else:
-                
+
                 packet = self.secondInnerQuery.packet_in(packet)
                 if self.secondInnerQuery.is_success:
                     if self.secondInnerQuery.exception:

+ 1 - 1
mt/ptcal/pytcore/rules/sequence.py

@@ -16,7 +16,7 @@ class Sequence(Composer):
         '''
         super(Sequence, self).__init__()
         self.rules = rules
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False

+ 5 - 5
mt/ptcal/pytcore/rules/srule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from arule import ARule
+from .arule import ARule
 from ..tcore.resolver import Resolver
 
 
@@ -20,7 +20,7 @@ class SRule(ARule):
         '''
         super(SRule, self).__init__(LHS, RHS,sendAndApplyDeltaFunc)
         self.I.max_iterations = max_iterations
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -34,14 +34,14 @@ class SRule(ARule):
         if not self.I.is_success:
             self.exception = self.I.exception
             return packet
-        
+
         while True:
             # Rewrite
             packet = self.W.packet_in(packet)
             if not self.W.is_success:
                 self.exception = self.W.exception
                 return packet
-            
+
             # Rule has been applied once, so it's a success anyway
             self.is_success = True
             if self.I.iterations == self.I.max_iterations:
@@ -76,7 +76,7 @@ class SRule_r(SRule):
         super(SRule_r, self).__init__(LHS, RHS, max_iterations)
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False

+ 4 - 4
mt/ptcal/pytcore/rules/xfrule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from frule import FRule
+from .frule import FRule
 from ..tcore.rollbacker import Rollbacker
 from ..tcore.resolver import Resolver
 
@@ -22,7 +22,7 @@ class XFRule(FRule):
         super(XFRule, self).__init__(LHS, RHS, max_iterations)
         # max_iterations=1 because no all matches have been exhausted after first application
         self.B = Rollbacker(condition=LHS, max_iterations=1)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -73,7 +73,7 @@ class XFRule(FRule):
                     # Output success packet
                     self.is_success = True
                 return packet
-    
+
     def next_in(self, packet):
         # Only one roll-back
         self.exception = None
@@ -100,7 +100,7 @@ class XFRule_r(XFRule):
         super(XFRule_r, self).__init__(LHS, RHS, max_iterations)
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False

+ 5 - 5
mt/ptcal/pytcore/rules/xrule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from arule import ARule
+from .arule import ARule
 from ..tcore.rollbacker import Rollbacker
 from ..tcore.resolver import Resolver
 
@@ -26,7 +26,7 @@ class XRule(ARule):
         self.M.max = max_iterations
         self.I.max_iterations = max_iterations
         self.B = Rollbacker(condition=LHS, max_iterations=max_iterations)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -64,7 +64,7 @@ class XRule(ARule):
             return packet
         self.is_success = True
         return packet
-    
+
     def next_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -111,7 +111,7 @@ class XRule_r(XRule):
         super(XRule_r, self).__init__(LHS, RHS, max_iterations)
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         packet = super(XRule_r, self).packet_in(packet)
         # is_success is True
@@ -125,7 +125,7 @@ class XRule_r(XRule):
         else:
             self.is_success = False
         return packet
-    
+
     def next_in(self, packet):
         packet = super(XRule_r, self).next_in(packet)
         # is_success is True

+ 4 - 4
mt/ptcal/pytcore/rules/xsrule.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.infinity import INFINITY
-from srule import SRule
+from .srule import SRule
 from ..tcore.rollbacker import Rollbacker
 from ..tcore.resolver import Resolver
 
@@ -22,7 +22,7 @@ class XSRule(SRule):
         super(XSRule, self).__init__(LHS, RHS, max_iterations)
         # max_iterations=1 because no all matches have been exhausted after first application
         self.B = Rollbacker(condition=LHS, max_iterations=1)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -79,7 +79,7 @@ class XSRule(SRule):
                     self.exception = self.I.exception
                     self.is_success = False
                 return packet
-    
+
     def next_in(self, packet):
         # Only one roll-back
         self.exception = None
@@ -106,7 +106,7 @@ class XSRule_r(XSRule):
         super(XSRule_r, self).__init__(LHS, RHS, max_iterations)
         self.R = Resolver(external_matches_only=external_matches_only,
                           custom_resolution=custom_resolution)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False

+ 3 - 3
mt/ptcal/pytcore/tcore/composer.py

@@ -2,7 +2,7 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-from composite_primitive import CompositePrimitive
+from .composite_primitive import CompositePrimitive
 
 
 class Composer(CompositePrimitive):
@@ -16,9 +16,9 @@ class Composer(CompositePrimitive):
             Both packet_in & next_in methods must be overridden to provide meaningful behaviour. 
         '''
         super(Composer, self).__init__()
-    
+
     def packet_in(self, packet):
         return packet
-    
+
     def next_in(self, packet):
         return packet

+ 3 - 3
mt/ptcal/pytcore/tcore/composite_primitive.py

@@ -2,15 +2,15 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-from primitive import Primitive
+from .primitive import Primitive
 
 # Abstract class
 class CompositePrimitive(Primitive):
     def __init__(self):
         super(CompositePrimitive, self).__init__()
-    
+
     def packet_in(self, packet):
         raise AttributeError('Method not implemented')
-    
+
     def next_in(self, packet):
         raise AttributeError('Method not implemented')

+ 4 - 4
mt/ptcal/pytcore/tcore/control_primitive.py

@@ -2,7 +2,7 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-from primitive import Primitive
+from .primitive import Primitive
 
 # Abstract class
 class ControlPrimitive(Primitive):
@@ -10,13 +10,13 @@ class ControlPrimitive(Primitive):
         super(ControlPrimitive, self).__init__()
         self.success = []   # [Packet]
         self.fail = []      # [Packet]
-    
+
     def success_in(self, packet):
         raise AttributeError('Method not implemented')
-    
+
     def fail_in(self, packet):
         raise AttributeError('Method not implemented')
-    
+
     def reset(self):
         self.success = []
         self.fail = []

+ 6 - 6
mt/ptcal/pytcore/tcore/iterator.py

@@ -4,7 +4,7 @@ See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.seeded_random import Random
 from ..util.infinity import INFINITY
-from rule_primitive import RulePrimitive
+from .rule_primitive import RulePrimitive
 #from messages import TransformationException
 
 
@@ -25,12 +25,12 @@ class Iterator(RulePrimitive):
         self.rng = rng
         if condition:
             self.condition = condition.get_id()
-    
+
     def cancelIn(self, cancel):
         if self.condition not in cancel.exclusions:
             super(Iterator, self).cancelIn(cancel)
             self.iterations = 0
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -42,7 +42,7 @@ class Iterator(RulePrimitive):
             self.iterations = 1
             self.is_success = True
         return packet
-    
+
     def next_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -57,11 +57,11 @@ class Iterator(RulePrimitive):
             self.iterations += 1
             self.is_success = True
         return packet
-    
+
     def _choose(self, packet):
         # Choose a match form the current match set and remove it from the list of matches
         return packet.match_sets[packet.current].matches.pop((self.rng if self.rng != None else Random).randint(0, len(packet.match_sets[packet.current].matches) - 1))
-    
+
     def _globalize_pivots(self, packet):
         """
             Puts all local pivots of the current match in the global pivots of the packet.

+ 20 - 17
mt/ptcal/pytcore/tcore/matcher.py

@@ -2,13 +2,16 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
+import sys
 from copy import deepcopy
 from ..util.infinity import INFINITY
 from ..core.match_algo import HimesisMatcher
 from ..core.himesis import HConstants as HC
-from rule_primitive import RulePrimitive
-from messages import MatchSet, Match, TransformationException
+from .rule_primitive import RulePrimitive
+from .messages import MatchSet, Match, TransformationException
 
+if sys.version_info[0] >= 3:
+    from functools import reduce
 
 class Matcher(RulePrimitive):
     '''
@@ -23,13 +26,13 @@ class Matcher(RulePrimitive):
         super(Matcher, self).__init__()
         self.max = max
         self.condition = condition
-    
+
     def __str__(self):
         s = super(Matcher, self).__str__()
         s = s.split(' ')
         s.insert(1, '[%s]' % self.condition.name)
         return reduce(lambda x, y: '%s %s' % (x,y), s)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -37,7 +40,7 @@ class Matcher(RulePrimitive):
             matchSet = packet.match_sets[self.condition[HC.GUID]]
         else:
             matchSet = MatchSet()
-        
+
         # Find the matches
         try:
             i = 1
@@ -51,20 +54,20 @@ class Matcher(RulePrimitive):
                     if i > self.max:
                         # We don't need any more matches
                         break
-        except Exception, e:
+        except Exception as e:
             self.is_success = False
             self.exception = TransformationException(e)
             self.exception.packet = packet
             self.exception.transformation_unit = self
             return packet
-        
+
         # Don't forget to add the match set to the packet, even if no matches were found
         if len(matchSet.matches) > 0:
             packet.match_sets[self.condition[HC.GUID]] = matchSet
-        
+
         # Identify that this is the condition we are currently processing
         packet.current = self.condition[HC.GUID]
-        
+
         # Success only if matches were found
         self.is_success = len(matchSet.matches) > 0
         return packet
@@ -84,11 +87,11 @@ class Matcher(RulePrimitive):
         '''
         pred1 = {}  # To optimize the matcher, since otherwise matcher will compute the predecessors of the source graph many times
         succ1 = {}  # To optimize the matcher, since otherwise matcher will compute the successors of the source graph many times
-        
+
         # Cache the pivot nodes of the source graph
         pivots = deepcopy(pivots)
         pivots.to_source_node_indices(graph)
-        
+
         #===================================================================
         # First process the NACs that are not bound to the LHS
         #===================================================================
@@ -110,7 +113,7 @@ class Matcher(RulePrimitive):
             # For further matching optimizations
             pred1 = nacMatcher.pred1
             succ1 = nacMatcher.succ1
-        
+
         # Either there are no NACs, or there were only unbound NACs that do not match, so match the LHS now
         if not self.condition.hasBoundNACs():
             lhsMatcher = HimesisMatcher(source_graph=graph, pattern_graph=self.condition, pred1=pred1, succ1=succ1)
@@ -125,19 +128,19 @@ class Matcher(RulePrimitive):
                         yield mapping
             except: raise
             finally: lhsMatcher.reset_recursion_limit()
-            
+
             # The matching is complete
             return
-        
+
         #===================================================================
         # Now process the NACs that have some nodes bound to the LHS
         #===================================================================
-        
+
         # Continue the matching looking for the LHS now
         lhsMatcher = HimesisMatcher(source_graph=graph, pattern_graph=self.condition, pred1=pred1, succ1=succ1)
         # Augment the bridge mapping with the pivot mappings
         lhs_pivots = pivots.to_mapping(graph, self.condition)
-        
+
         try:
             for mapping in lhsMatcher.match_iter(context=lhs_pivots):
                 # Make the mapping into {...,LHSlabel:graphIndex,...}
@@ -149,7 +152,7 @@ class Matcher(RulePrimitive):
                     for NAC in self.condition.getBoundNACs():
                         # This mapping represents the mapping of the bridge of this NAC with the LHS
                         bridgeMapping = match.to_mapping(graph, NAC)
-                        
+
                         # Now continue the matching looking for a match of the corresponding NAC
                         nacMatcher = HimesisMatcher(source_graph=graph, pattern_graph=NAC, pred1=pred1, succ1=succ1)
                         for nac_mapping in nacMatcher.match_iter(context=bridgeMapping):

+ 56 - 56
mt/ptcal/pytcore/tcore/messages.py

@@ -3,7 +3,7 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 import copy, traceback
-from ..core.himesis import Himesis 
+from ..core.himesis import Himesis
 
 # Abstract class
 class Message(object): pass
@@ -46,7 +46,7 @@ Context:%s
        self.transformation_unit, self.transformation_context)
 
     def __str__(self):
-#        return self.debug_msg
+        #        return self.debug_msg
         return self.msg + '\n' + str(self.transformation_context)
 
 
@@ -56,7 +56,7 @@ class Cancel(Message):
     '''
     def __init__(self):
         self.exclusions = []    # the primitives to not be cancelled
-    
+
     def __str__(self):
         return 'Cancel - exclusion = %s' % self.exclusions
 
@@ -72,7 +72,7 @@ class Packet(Message):
         self.match_sets = {}             # holds of the matches for each pre-condition pattern already matched
         self.current = None              # points to the guid identifying the current match set
         self.global_pivots = Pivots()    # {pivot name: source node guid}
-    
+
     def __str__(self):
         ms = ''.join(['''
         %s: %s''' % (k, self.match_sets[k]) for k in sorted(self.match_sets)])
@@ -84,7 +84,7 @@ class Packet(Message):
     match_sets: %s
     pivots: %s''' % (self.current, self.graph, self.deltas, ms, self.global_pivots)
         return s
-    
+
     def clone(self):
         cpy = Packet()
         cpy.graph = self.graph.copy()
@@ -93,7 +93,7 @@ class Packet(Message):
         cpy.current = self.current
         cpy.match_sets = copy.deepcopy(self.match_sets)
         return cpy
-    
+
     def copy_readonly(self):
         cpy = Packet()
         cpy.graph = self.graph
@@ -102,17 +102,17 @@ class Packet(Message):
         cpy.current = self.current
         cpy.match_sets = copy.deepcopy(self.match_sets)
         return cpy
-    
+
     def copy_state(self, conditionId):
         cpy = Packet()
         cpy.graph = self.graph.copy()
-        cpy.deltas = self.deltas[:]        
+        cpy.deltas = self.deltas[:]
         cpy.global_pivots = copy.copy(self.global_pivots)
         cpy.current = self.current
         if conditionId in self.match_sets:
             cpy.match_sets = {conditionId: copy.copy(self.match_sets[conditionId])}
         return cpy
-    
+
     def set_state(self, packet):
         self.graph = packet.graph
         self.deltas = packet.deltas
@@ -120,38 +120,38 @@ class Packet(Message):
         self.current = packet.current
         if packet.match_sets is not None:
             self.match_sets.update(packet.match_sets)
-    
+
     def clear_state(self):
         self.deltas = []
         self.match_sets = {}
         self.current = None
         self.global_pivots = Pivots()
-    
+
     def __copy__(self):
         return self.copy_readonly()
-    
+
     def __deepcopy__(self, memo):
         return self.__copy__()
-    
-#    def get_curr_matchset(self):
-#        return self.match_sets[self.current]
-#    
-#    def get_match2rewrite(self, condition):
-#        return self.match_sets[condition].matches[self.match_sets[condition].match2rewrite]
-#    
-#    def get_curr_match2rewrite(self):
-#        return self.match_sets[self.current].matches[self.match_sets[self.current].match2rewrite]
-#    
-#    def remove_match2rewrite(self, condition):
-#        # Remove the match to rewrite
-#        del self.match_sets[condition].matches[self.match_sets[condition].match2rewrite]
-#        # If the corresponding match set has become empty, remove it too
-#        if len(self.match_sets[condition].matches) == 0:
-#            del self.match_sets[condition]
-#    
-#    def get_local_pivots(self):
-#        return self.match_sets[self.current].matches[self.match_sets[self.current].match2rewrite].local_pivots
-    
+
+    #    def get_curr_matchset(self):
+    #        return self.match_sets[self.current]
+    #
+    #    def get_match2rewrite(self, condition):
+    #        return self.match_sets[condition].matches[self.match_sets[condition].match2rewrite]
+    #
+    #    def get_curr_match2rewrite(self):
+    #        return self.match_sets[self.current].matches[self.match_sets[self.current].match2rewrite]
+    #
+    #    def remove_match2rewrite(self, condition):
+    #        # Remove the match to rewrite
+    #        del self.match_sets[condition].matches[self.match_sets[condition].match2rewrite]
+    #        # If the corresponding match set has become empty, remove it too
+    #        if len(self.match_sets[condition].matches) == 0:
+    #            del self.match_sets[condition]
+    #
+    #    def get_local_pivots(self):
+    #        return self.match_sets[self.current].matches[self.match_sets[self.current].match2rewrite].local_pivots
+
     def clean(self):
         '''
             Unflags dirty matches
@@ -168,18 +168,18 @@ class MatchSet:
     def __init__(self):
         self.match2rewrite = None   # the selected match to be transformed
         self.matches = []           # TODO: should it be a generator?
-# TODO: Should we store all the matches and let the iterator explicitly choose one randomly? Or rely on the matching algorithm and save memory space?
-    
+    # TODO: Should we store all the matches and let the iterator explicitly choose one randomly? Or rely on the matching algorithm and save memory space?
+
     def __str__(self):
         s = '''MatchSet (%s): %s''' % (self.match2rewrite, self.matches)
         return s
-    
+
     def __copy__(self):
         cpy = MatchSet()
         cpy.match2rewrite = self.match2rewrite
         cpy.matches = [copy.copy(match) for match in self.matches]
         return cpy
-    
+
     def __deepcopy__(self, memo):
         cpy = MatchSet()
         cpy.match2rewrite = self.match2rewrite
@@ -196,23 +196,23 @@ class Match(dict):
     def __init__(self):
         super(Match, self).__init__()   # {pattern node label : source node guid}
         self.local_pivots = Pivots()    # {pivot name : source node guid}
-    
+
     def __copy__(self):
         cpy = copy.copy(super(Match, self))
         cpy.local_pivots = copy.copy(self.local_pivots)
         return cpy
-    
+
     def __deepcopy__(self, memo):
         cpy = copy.deepcopy(super(Match, self))
         cpy.local_pivots = copy.deepcopy(self.local_pivots)
         return cpy
-    
+
     def is_dirty(self, packet):
         '''
             Determines whether a source model element is dirty.
             @param packet: The packet on which the mappings are bound.
         '''
-        for v in self.itervalues():
+        for v in self.values():
             node = packet.graph.get_node(v)
             node = packet.graph.vs[node]
             if node is not None:
@@ -223,20 +223,20 @@ class Match(dict):
                 # It was deleted
                 return True
         return False
-    
+
     def clean(self, packet):
-        for v in self.itervalues():
+        for v in self.values():
             node = packet.graph.get_node(v)
             node = packet.graph.vs[node]
             if node and Himesis.Constants.MT_DIRTY in node.attribute_names():
                 node[Himesis.Constants.MT_DIRTY] = False
-    
+
     def to_label_mapping(self, source_graph):
         '''
             Converts the match to a mapping dictionary {label: source node index}.
         '''
         mapping = {}
-        for label in self.iterkeys():
+        for label in self.keys():
             try:
                 sourceNode = source_graph.get_node(self[label])
             except KeyError:
@@ -246,19 +246,19 @@ class Match(dict):
             else:
                 raise Exception('The matched node %s does not exist' % label)
         return mapping
-    
+
     def to_mapping(self, source_graph, pattern_graph):
         '''
             Converts the match to a mapping dictionary {pattern node index: source node index}.
         '''
         mapping = {}
-        for label in self.iterkeys():
+        for label in self.keys():
             patternNode = pattern_graph.get_node_with_label(label)
             if patternNode is not None:
                 sourceNode = source_graph.get_node(self[label])
                 mapping[patternNode] = sourceNode
         return mapping
-    
+
     def from_mapping(self, mapping, source_graph, pattern_graph):
         '''
             Extracts all matches from a mapping dictionary {pattern node index: source node index}
@@ -274,7 +274,7 @@ class Match(dict):
                 label = pattern_graph.vs[pattern_node][Himesis.Constants.MT_LABEL]
                 guid = source_graph.vs[mapping[pattern_node]][Himesis.Constants.GUID]
                 self[label] = guid
-        
+
         self.local_pivots.from_mapping(mapping, source_graph, pattern_graph)
 
 
@@ -286,41 +286,41 @@ class Pivots(dict):
     def __init__(self):
         super(Pivots, self).__init__()     # {pivot name : source node guid}
         self.has_source_node_indices = False
-    
+
     def __copy__(self):
         cpy = copy.copy(super(Pivots, self))
         cpy.has_source_node_indices = self.has_source_node_indices
         return cpy
-    
+
     def __deepcopy__(self, memo):
         cpy = copy.deepcopy(super(Pivots, self))
         cpy.has_source_node_indices = self.has_source_node_indices
         return cpy
-    
+
     def to_source_node_indices(self, source_graph):
-        for p in self.iterkeys():
+        for p in self.keys():
             sourceNode = source_graph.get_node(self[p])
             self[p] = sourceNode
         self.has_source_node_indices = True
-    
+
     def to_mapping(self, source_graph, pattern_graph):
         '''
             Converts the pivots to a mapping dictionary {pattern node index: source node index}.
         '''
         mapping = {}
         if not self.has_source_node_indices:
-            for p in self.iterkeys():
+            for p in self.keys():
                 patternNode = pattern_graph.get_pivot_in(p)
                 if patternNode is not None:
                     sourceNode = source_graph.get_node(self[p])
                     mapping[patternNode] = sourceNode
         else:
-            for p in self.iterkeys():
+            for p in self.keys():
                 patternNode = pattern_graph.get_pivot_in(p)
                 if patternNode is not None:
                     mapping[patternNode] = self[p]
         return mapping
-    
+
     def from_mapping(self, mapping, source_graph, pattern_graph):
         '''
             Extracts all pivots from a mapping dictionary {pattern node index: source node index}

+ 3 - 3
mt/ptcal/pytcore/tcore/primitive.py

@@ -10,11 +10,11 @@ class Primitive(object):
     def __init__(self):
         self.is_success = False      # flags weather the primitive's action resulted in a success or not
         self.exception = None       # holds the exception object if one was raised
-        self._id = uuid.uuid4() 
-    
+        self._id = uuid.uuid4()
+
     def cancelIn(self, cancel):
         self.is_success = False
         self.exception = None
-    
+
     def __str__(self):
         return '%s %s' % (str(self.__class__.__name__), self._id) 

+ 5 - 5
mt/ptcal/pytcore/tcore/resolver.py

@@ -2,8 +2,8 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-from rule_primitive import RulePrimitive
-from messages import TransformationException
+from .rule_primitive import RulePrimitive
+from .messages import TransformationException
 
 
 class Resolver(RulePrimitive):
@@ -21,7 +21,7 @@ class Resolver(RulePrimitive):
         super(Resolver, self).__init__()
         self.external_matches_only = external_matches_only
         self.custom_resolution = custom_resolution
-    
+
     def packet_in(self, packet):
         '''
             Attempts to merge the packets into a single one, only if all threads had succeeded.
@@ -47,13 +47,13 @@ class Resolver(RulePrimitive):
         # No conflicts are to be reported
         self.is_success = True
         return packet
-    
+
     def _custom_resolution(self, packet, match):
         '''
             Applies the user-defined resolution function
         '''
         return self.custom_resolution(packet)
-    
+
     def _default_resolution(self, packet, match):
         '''
             Attempts to resolve conservatively any conflicts

+ 12 - 9
mt/ptcal/pytcore/tcore/rewriter.py

@@ -2,12 +2,15 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-from rule_primitive import RulePrimitive
-from messages import TransformationException
+from .rule_primitive import RulePrimitive
+from .messages import TransformationException
 from ..core.himesis import Himesis
 from ...tconstants import TConstants as TC
 from ...utils import Utilities as utils
 
+import sys
+if sys.version_info[0] >= 3:
+    from functools import reduce
 
 class Rewriter(RulePrimitive):
     '''
@@ -20,15 +23,15 @@ class Rewriter(RulePrimitive):
         '''
         super(Rewriter, self).__init__()
         self.condition = condition
-        
+
         self.sendAndApplyDeltaFunc = sendAndApplyDeltaFunc
-    
+
     def __str__(self):
         s = super(Rewriter, self).__str__()
         s = s.split(' ')
         s.insert(1, '[%s]' % self.condition.name)
         return reduce(lambda x, y: x + ' ' + y, s)
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -44,20 +47,20 @@ class Rewriter(RulePrimitive):
             # Apply the transformation on the match
             try:
                 self.condition.execute(packet, mapping)     # Sets dirty nodes as well
-            except Exception, e:
+            except Exception as e:
                 self.is_success = False
                 self.exception = TransformationException(e)
                 self.exception.packet = packet
                 self.exception.transformation_unit = self
                 return packet
-            
+
             # Remove the match
             packet.match_sets[self.condition.pre[Himesis.Constants.GUID]].match2rewrite = None
             if  len(packet.match_sets[self.condition.pre[Himesis.Constants.GUID]].matches) == 0:
                 del packet.match_sets[self.condition.pre[Himesis.Constants.GUID]]
-            
+
             #print self.condition
-            
+
             ''' hergin :: motif-integration :: start '''
             self.sendAndApplyDeltaFunc(packet.deltas)
             ''' hergin :: motif-integration :: end '''

+ 21 - 21
mt/ptcal/pytcore/tcore/rollbacker.py

@@ -4,7 +4,7 @@ See COPYING.lesser and README.md in the root of this project for full details'''
 
 #import pickle, os
 from ..util.infinity import INFINITY
-from iterator import Iterator
+from .iterator import Iterator
 from ..tcore.messages import TransformationException
 
 
@@ -21,14 +21,14 @@ class Rollbacker(Iterator):
         '''
         super(Rollbacker, self).__init__(condition, max_iterations)
         self.checkpoints = []   # Stack of file names
-    
+
     def packet_in(self, packet):
         self.exception = None
         self.is_success = False
         try:
             self.establish(packet)
             self.is_success = True
-        except Exception, e:
+        except Exception as e:
             self.is_success = False
             self.exception = TransformationException(e)
             self.exception.packet = packet
@@ -36,7 +36,7 @@ class Rollbacker(Iterator):
         finally:
             self.iterations = 1
             return packet
-    
+
     def next_in(self, packet):
         self.exception = None
         self.is_success = False
@@ -50,7 +50,7 @@ class Rollbacker(Iterator):
                 try:
                     packet.set_state(self.restore())
                     self.is_success = True
-                except Exception, e:
+                except Exception as e:
                     self.is_success = False
                     self.excepion = TransformationException(e)
                     self.exception.packet = packet
@@ -65,31 +65,31 @@ class Rollbacker(Iterator):
                 self.is_success = False
             finally:
                 return packet
-    
+
     def establish(self, packet):
-#        fileName = '%d.tc_state.%d' % (self._id, len(self.checkpoints))
-#        with open(fileName, 'w') as storage:
-#            pickle.dump(packet, storage)
-#        self.checkpoints.append(fileName)
+        #        fileName = '%d.tc_state.%d' % (self._id, len(self.checkpoints))
+        #        with open(fileName, 'w') as storage:
+        #            pickle.dump(packet, storage)
+        #        self.checkpoints.append(fileName)
         self.checkpoints.append(packet.copy_state(self.condition))
-            
-    
+
+
     def restore(self):
-#        with open(self.checkpoints[-1], 'r') as storage:
-#            packet = pickle.load(storage)
-#            return packet
-#        os.remove(self.checkpoints[-1])
+        #        with open(self.checkpoints[-1], 'r') as storage:
+        #            packet = pickle.load(storage)
+        #            return packet
+        #        os.remove(self.checkpoints[-1])
         if len(self.checkpoints) > 0:
             return self.checkpoints.pop()
         raise Exception('There are no checkpoints to restore')
-    
+
     def discard(self):
-#        os.remove(self.checkpoints[-1])
+        #        os.remove(self.checkpoints[-1])
         if len(self.checkpoints) > 0:
             del self.checkpoints[-1]
         raise Exception('There are no checkpoints to discard')
-    
+
     def discard_all(self):
-#        for fn in self.checkpoints:
-#            os.remove(fn)
+        #        for fn in self.checkpoints:
+        #            os.remove(fn)
         self.checkpoints = []

+ 2 - 2
mt/ptcal/pytcore/tcore/rule_primitive.py

@@ -2,12 +2,12 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-from primitive import Primitive
+from .primitive import Primitive
 
 # Abstract class
 class RulePrimitive(Primitive):
     def __init__(self):
         super(RulePrimitive, self).__init__()
-    
+
     def packet_in(self, packet):
         raise AttributeError('Method not implemented')

+ 7 - 7
mt/ptcal/pytcore/tcore/selector.py

@@ -3,8 +3,8 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.seeded_random import Random
-from control_primitive import ControlPrimitive
-from messages import Cancel, TransformationException, NIL_PACKET
+from .control_primitive import ControlPrimitive
+from .messages import Cancel, TransformationException, NIL_PACKET
 
 
 class Selector(ControlPrimitive):
@@ -17,7 +17,7 @@ class Selector(ControlPrimitive):
         '''
         super(Selector, self).__init__()
         self.exclusions = []
-    
+
     def success_in(self, packet):
         '''
             Receives a successful packet
@@ -25,7 +25,7 @@ class Selector(ControlPrimitive):
         self.exception = None
         self.is_success = False
         self.success.append(packet)
-    
+
     def fail_in(self, packet):
         '''
             Receives a failed packet
@@ -33,11 +33,11 @@ class Selector(ControlPrimitive):
         self.exception = None
         self.is_success = False
         self.fail.append(packet)
-    
+
     def reset(self):
         super(Selector, self).reset()
         self.exclusions = []
-    
+
     def select(self):
         '''
             Selects a packet randomly from the success list.
@@ -59,7 +59,7 @@ class Selector(ControlPrimitive):
             self.exception = TransformationException('No packet was received')
             self.exception.packet = NIL_PACKET
             return NIL_PACKET
-    
+
     def cancel(self):
         '''
             Produces a cancel event and resets its state

+ 10 - 10
mt/ptcal/pytcore/tcore/synchronizer.py

@@ -3,8 +3,8 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 from ..util.seeded_random import Random
-from control_primitive import ControlPrimitive
-from messages import TransformationException, NIL_PACKET
+from .control_primitive import ControlPrimitive
+from .messages import TransformationException, NIL_PACKET
 
 
 class Synchronizer(ControlPrimitive):
@@ -20,11 +20,11 @@ class Synchronizer(ControlPrimitive):
                                 By default, this returns None.
         '''
         super(Synchronizer, self)
-        
+
         assert(threads >= 2)
         self.threads = threads
         self.custom_merge = custom_merge
-    
+
     def success_in(self, packet):
         '''
             Receives a successful packet
@@ -32,7 +32,7 @@ class Synchronizer(ControlPrimitive):
         self.exception = None
         self.is_success = False
         self.success.append(packet)
-    
+
     def fail_in(self, packet):
         '''
             Receives a failed packet
@@ -40,32 +40,32 @@ class Synchronizer(ControlPrimitive):
         self.exception = None
         self.is_success = False
         self.fail.append(packet)
-    
+
     def _custom_merge(self):
         '''
             Applies the user-defined merge function
         '''
         return self.custom_merge(self.success)
-    
+
     def _default_merge(self):
         '''
             Attempts to merge the packets conservatively
         '''
         return None
-    
+
     def merge(self):
         '''
             Attempts to merge the packets into a single one, only if all threads had succeeded.
         '''
         self.exception = None
         self.is_success = False
-        
+
         def failure():
             self.is_success = False
             self.exception = TransformationException()
             self.exception.packet = NIL_PACKET
             return NIL_PACKET
-        
+
         if len(self.success) == self.threads:
             packet = self._custom_merge()
             if packet is not None:

+ 11 - 11
mt/ptcal/pytcore/util/infinity.py

@@ -27,7 +27,7 @@ class Infty(object):
     __instantiated = False
     def __init__(self):
         if self.__instantiated:
-            raise NotImplementedError, "singleton class already instantiated"
+            raise NotImplementedError("singleton class already instantiated")
         self.__instantiatiated = True
 
     def __deepcopy__(self, memo):
@@ -40,7 +40,7 @@ class Infty(object):
     def __sub__(self, other):
         """ INFINITY - x = INFINITY (if x != INF), or NaN (if x == INFINITY) """
         if other == self:
-            raise ValueError, "INFINITY - INFINITY gives NaN (not defined)"
+            raise ValueError("INFINITY - INFINITY gives NaN (not defined)")
         return self
 
     def __mul__(self, other):
@@ -54,8 +54,8 @@ class Infty(object):
     def __rsub__(self, other):
         """ x - INFINITY = -INFINITY (if x != INFINITY), or NaN (if x == INFINITY) """
         if other == self:
-            raise ValueError, "INFINITY - INFINITY gives NaN (not defined)"
-        raise ValueError, "x - INFINITY gives MINUS_INFINITY (not defined)"
+            raise ValueError("INFINITY - INFINITY gives NaN (not defined)")
+        raise ValueError("x - INFINITY gives MINUS_INFINITY (not defined)")
 
     def __rmul__(self, other):
         """ x * INFINITY = INFINITY """
@@ -65,11 +65,11 @@ class Infty(object):
         """ abs(INFINITY) = INFINITY -- absolute value """
         return self
 
-#    def __cmp__(self, other):
-#        if other is self:
-#            return 0
-#        else:
-#            return 1
+    #    def __cmp__(self, other):
+    #        if other is self:
+    #            return 0
+    #        else:
+    #            return 1
 
     def __eq__(self, other):
         if other is self:
@@ -81,7 +81,7 @@ class Infty(object):
         if other is self:
             return False
         else:
-            return True 
+            return True
 
     def __lt__(self, other):
         return False
@@ -96,7 +96,7 @@ class Infty(object):
         if other is self:
             return False
         else:
-            return True 
+            return True
 
     def __ge__(self, other):
         return True

+ 2 - 2
mt/ptcal/pytcore/util/seeded_random.py

@@ -14,8 +14,8 @@ class SeededRandom(random.Random):
             Singleton class: the single instance "INFINITY" stands for infinity.
         '''
         if SeededRandom.__instantiated:
-            raise NotImplementedError, "singleton class already instantiated"
-        
+            raise NotImplementedError("singleton class already instantiated")
+
         SeededRandom.__instantiatiated = True
         random.Random.__init__(self)
         self.seed(seed)

+ 46 - 42
mt/ptcal/synchgraph.py

@@ -2,10 +2,14 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-from petrinet import *
+from .petrinet import *
 from threading import *
 import igraph as ig
-import Queue as q
+import sys
+if sys.version_info[0] < 3:
+  from Queue import *
+else:
+  from queue import *
 from random import choice
 
 
@@ -18,10 +22,10 @@ class synchgraph:
     self.sg.add_vertices(1)
     self.sg.vs[0]['M'] = M0
     self.last = M0
-  
+
   def summary(self):
     ig.summary(self.sg)
-  
+
   def statePresent(self, M):
     for v in self.sg.vs:
       #for key,value in v['M']:
@@ -29,7 +33,7 @@ class synchgraph:
         #print 'marking present in synchgraph'
         return v.index
     return -1
-  
+
   def statePresentReach(self, M):
     for v in self.sg.vs:
       #for key,value in v['M']:
@@ -37,19 +41,19 @@ class synchgraph:
         #print 'marking present in synchgraph'
         return v.index
     return -1
-  
+
   def addMarkingBatch(self,T,from_prod,to_prod):
     new = None
     for i in range(len(from_prod[0])):
-       new = self.addMarking(from_prod[0][i],to_prod[0][i],T,self.last)
+      new = self.addMarking(from_prod[0][i],to_prod[0][i],T,self.last)
     self.last = new
     #self.graph(synchgraph.id);
     #synchgraph.id+=1
-  
+
   def addMarking(self,Ms,Mnew,Arc,last):
     fr = self.statePresent(last)
     to = self.statePresent(Mnew)
-    
+
     #self.last = Mnew
     if not to == -1:
       self.sg.add_edges([(fr,to)])
@@ -61,7 +65,7 @@ class synchgraph:
       self.sg.add_edges([(fr,to)])
       self.sg.es[self.sg.get_eid(fr, to)]['T'] = '%s,%s'%(Ms,Arc)
     return Mnew
-  
+
   def markSCC(self, modules):
     for v in self.sg.vs:
       newval = []
@@ -70,10 +74,10 @@ class synchgraph:
         new = '%s-%d'%(ls[0],modules[ls[0]].getSCCvid(ls[1]))
         newval.append(new)
       v['SM'] = newval
-      
-  
+
+
   def graph(self,id=None):
-    key = choice(range(20))
+    key = choice(list(range(20)))
     vattr=''
     eattr = ''
     nodes = {}
@@ -81,37 +85,37 @@ class synchgraph:
     dateTag = datetime.datetime.now().strftime("%Y-%b-%d_%H-%M-%S")
     for v in self.sg.vs:
       #sort(v['M'])
-     # vattr +='('
-#      i = len(v['M'])
-#      leng = i
-#      j=0
+      # vattr +='('
+      #      i = len(v['M'])
+      #      leng = i
+      #      j=0
       if 'SM' in self.sg.vs.attribute_names():
         vattr+='ssc\n';
         for value in v['SM']:
-  #        if leng == 1:
-  #         if 'SCC' in self.sg.vs.attribute_names():
-  #            vattr +='SCC-%s\n'%v['SCC']
-  #         vattr = 'fstate%d'%choice(range(100))
-  #        else:
-            #if int(value) > 0:
-  #          if 'SCC' in self.sg.vs.attribute_names():
-  #            vattr +='SCC-%s\n'%v['SCC']
-            vattr += '%s'%(value.capitalize())
-      else: 
+          #        if leng == 1:
+          #         if 'SCC' in self.sg.vs.attribute_names():
+          #            vattr +='SCC-%s\n'%v['SCC']
+          #         vattr = 'fstate%d'%choice(range(100))
+          #        else:
+          #if int(value) > 0:
+          #          if 'SCC' in self.sg.vs.attribute_names():
+          #            vattr +='SCC-%s\n'%v['SCC']
+          vattr += '%s'%(value.capitalize())
+      else:
         for value in v['M']:
-  #        if leng == 1:
-  #         if 'SCC' in self.sg.vs.attribute_names():
-  #            vattr +='SCC-%s\n'%v['SCC']
-  #         vattr = 'fstate%d'%choice(range(100))
-  #        else:
-            #if int(value) > 0:
-  #          if 'SCC' in self.sg.vs.attribute_names():
-  #            vattr +='SCC-%s\n'%v['SCC']
-            vattr += '%s'%(value.capitalize())
-#        if not i-1 == 0:
-#          pass#vattr+=','
-       # i -=1
-        #j+=1
+          #        if leng == 1:
+          #         if 'SCC' in self.sg.vs.attribute_names():
+          #            vattr +='SCC-%s\n'%v['SCC']
+          #         vattr = 'fstate%d'%choice(range(100))
+          #        else:
+          #if int(value) > 0:
+          #          if 'SCC' in self.sg.vs.attribute_names():
+          #            vattr +='SCC-%s\n'%v['SCC']
+          vattr += '%s'%(value.capitalize())
+      #        if not i-1 == 0:
+      #          pass#vattr+=','
+      # i -=1
+      #j+=1
       #vattr +=')'
       nodes[v.index] = pydot.Node(vattr)
       graph.add_node(nodes[v.index])
@@ -122,10 +126,10 @@ class synchgraph:
       graph.add_edge(pydot.Edge(nodes[e.source],nodes[e.target],label=e['T']))
     #graph.write_svg('graphs/STATE%s%d%s.svg'%(self.key,choice(range(100)),dateTag))
     if id == None:
-      graph.write_svg('../graphs/SYNCH%s%d%s.svg'%(key,choice(range(100)),dateTag))
+      graph.write_svg('../graphs/SYNCH%s%d%s.svg'%(key,choice(list(range(100))),dateTag))
     else:
       graph.write_svg('../graphs/SYNCH%d.svg'%(id))
-  
+
 #  def process(self,packet):
 #    if packet.ismarking():
 #      self.marking[packet.key()] = packet.payload() 

+ 11 - 11
mt/ptcal/tconstants.py

@@ -7,22 +7,22 @@ class TConstants :
 	NOT_APPLICABLE = 'NotApplicable'
 	SUCCEEDED 		= 'Success'
 	FAILED			= 'Failure'
-	
+
 	''' hergin :: motif-integration :: start '''
 	EXCEPTION			= 'Exception'
 	RULE_EXCEPTION_MSG				= 'EXCEPTION :: rule exception on '
-	
+
 	MODE_DEBUG = 'debug'
 	MODE_RELEASE = 'release'
-	
+
 	''' hergin :: motif-integration :: end '''
 
 	#inter-rule delay in PLAY mode
 	INTER_RULE_DELAY 		= 0.05
-	
+
 	#the delay between verifications that all changelogs pertaining to the last executed rule have been handled
 	WAIT_ON_CHLOG_DELAY	= 0.02
-	
+
 	#console output for various rule/transformation completion cases
 	RULE_SUCCESS_MSG				= 'rule succeeded'
 	''' hergin :: motif-integration :: modify fail message '''
@@ -36,19 +36,19 @@ class TConstants :
 	#console output for various debugging messages
 	DEBUGGING_ON		= 'transformation debugging has been enabled'
 	DEBUGGING_OFF		= 'transformation debugging has been disabled'
-	DEBUGGING_HALT		= 'WARNING :: popping up transformation debugging window,'+\
-							  ' resume transformation with "play" or "step" buttons'+\
-							  ' from current window'
+	DEBUGGING_HALT		= 'WARNING :: popping up transformation debugging window,'+ \
+							' resume transformation with "play" or "step" buttons'+ \
+							' from current window'
 
 	#supported designer code languages
 	JAVASCRIPT	= 'JAVASCRIPT'
 	PYTHON		= 'PYTHON'
 
 	#metamodel paths
-	RULEMM	= '/Formalisms/__Transformations__/TransformationRule/'+\
-					  'TransformationRule'
+	RULEMM	= '/Formalisms/__Transformations__/TransformationRule/'+ \
+				'TransformationRule'
 	TRANSFMM	= '/Formalisms/__Transformations__/Transformation/Transformation'
-	
+
 	''' hergin :: motif-integration '''
 	MOTIFMM	= '/Formalisms/__Transformations__/Transformation/MoTif'
 	TCOREMM = '/Formalisms/__Transformations__/Transformation/T-Core'

+ 28 - 28
mt/ptcal/tcontext.py

@@ -3,8 +3,8 @@ Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
 import random
-from tconstants import TConstants as TC
-from utils import Utilities as utils
+from .tconstants import TConstants as TC
+from .utils import Utilities as utils
 
 
 '''
@@ -45,13 +45,13 @@ class TransformationContext(object) :
 		future reference '''
 	def getRuntimeConfiguration(self) :
 		if self._rconfig == None :
-			mm = '/Formalisms/__Transformations__/Transformation/Transformation/'		
+			mm = '/Formalisms/__Transformations__/Transformation/Transformation/'
 			for id in self.t['nodes'] :
 				if self.t['nodes'][id]['$type'] == mm+'RuntimeConfiguration' :
 					self._rconfig = self.t['nodes'][id]['options']['value']
 			self._rconfig = (self._rconfig or {})
 		return self._rconfig
-			
+
 
 	'''
 		return true if no step has run yet (1st condition), or if the last step's
@@ -59,7 +59,7 @@ class TransformationContext(object) :
 	 	has terminated) feedbackReceived flag is set '''
 	def isLastStepFeedbackReceived(self) :
 		return (not self._expired and self._lastStep == {}) or \
-				 'feedbackReceived' in self._lastStep
+			   'feedbackReceived' in self._lastStep
 
 
 	''' 
@@ -80,20 +80,20 @@ class TransformationContext(object) :
 		a) N/A, b) applicable and succeeded or c) applicable and failed '''
 	def setLastStepApplicationInfo(self,applicationInfo) :
 		raise NotImplementedError('implement in subclass')
-		
+
 	'''
 		set the feedbackReceived flag of the last step to true (i.e., indicate 
 		that all relevant asworker changelogs have been received and handled '''
 	def setLastStepFeedbackReceived(self) :
 		self._lastStep['feedbackReceived'] = True
-	
+
 	'''
 		add 'a' amount of time to total execution time '''
 	def setLastStepExecTime(self,a):
 		raise NotImplementedError('implement in subclass')
 
-	
-			
+
+
 '''
 	holds the execution context of a 'Transformation' construct 
 	
@@ -112,7 +112,7 @@ class ModelTransformationContext(TransformationContext) :
 	def getCurrentStepId(self) :
 		if self._lastStep == {} :
 			assert False, \
-				 "this function shouldn't be called when there is no current step"
+				"this function shouldn't be called when there is no current step"
 		else :
 			return self._lastStep['id']
 
@@ -123,12 +123,12 @@ class ModelTransformationContext(TransformationContext) :
 	def _getInitialStep(self) :
 		for id in self.t['nodes'] :
 			if 'isStart' in self.t['nodes'][id] and \
-				self.t['nodes'][id]['isStart']['value'] :
-					if 'filename' in self.t['nodes'][id] :
-						return {'fname':self.t['nodes'][id]['filename']['value'],
-								  'id':id}
-					else :
-						return {'id':id}
+					self.t['nodes'][id]['isStart']['value'] :
+				if 'filename' in self.t['nodes'][id] :
+					return {'fname':self.t['nodes'][id]['filename']['value'],
+							'id':id}
+				else :
+					return {'id':id}
 
 
 	'''
@@ -157,12 +157,12 @@ class ModelTransformationContext(TransformationContext) :
 			return ns
 		else :
 			mm = '/Formalisms/__Transformations__/Transformation/Transformation/'
-			def f(e) : 
+			def f(e) :
 				return e['src'] == self._lastStep['id'] and \
-						 self.t['nodes'][e['dest']]['$type'] == \
-							mm+'On'+self._lastStep['applicationInfo']
+					   self.t['nodes'][e['dest']]['$type'] == \
+					   mm+'On'+self._lastStep['applicationInfo']
 
-			ne = filter(f,self.t['edges'])
+			ne = list(filter(f,self.t['edges']))
 			if len(ne) == 0 :
 				ai = self._applicationInfo()
 				self._lastStep = {}
@@ -179,9 +179,9 @@ class ModelTransformationContext(TransformationContext) :
 						else :
 							self._lastStep = {'id':e['dest']}
 						return self._lastStep
-				raise ValueError('invalid transformation model, dangling '+\
-									  'On'+self._lastStep['applicationInfo']+' edge')
-		
+				raise ValueError('invalid transformation model, dangling '+ \
+								 'On'+self._lastStep['applicationInfo']+' edge')
+
 
 	'''
 		set the application information of the last step '''
@@ -189,11 +189,11 @@ class ModelTransformationContext(TransformationContext) :
 		if applicationInfo == TC.SUCCEEDED :
 			self._notApplicable = False
 		self._lastStep['applicationInfo'] = applicationInfo
-		
+
 	def setLastStepExecTime(self,a):
 		''' to be implemented '''
 		pass
-				
+
 
 
 
@@ -226,10 +226,10 @@ class ExhaustContext(TransformationContext) :
 		2. if this is an ExhaustRandom, randomly choose a not-N/A step and set
 	  		it as self._lastStep
 		2. if this is an Exhaust, increment self._lastStep
-		3. return self._lastStep '''			
+		3. return self._lastStep '''
 	def nextStep(self) :
 		steps = self.t['nodes'][self._id]['filenames']['value']
-		
+
 		if self._expired == True :
 			raise RuntimeError('can not step in expired mtContext')
 		elif len(steps) == 0 :
@@ -259,7 +259,7 @@ class ExhaustContext(TransformationContext) :
 			   indicates whether a rule was succesfully applied within this 
 				transformation context... we also reset self._NAs to indicate
 				that each step should be run at least once to re-establish its
-				(non-)applicability '''			
+				(non-)applicability '''
 	def setLastStepApplicationInfo(self,applicationInfo) :
 		if applicationInfo == TC.SUCCEEDED :
 			self._notApplicable = False

+ 52 - 52
mt/ptcal/tcorecontext.py

@@ -9,19 +9,19 @@ Purpose: Tcore integration to AtomPM
 '''
 
 import random
-from tconstants import TConstants as TC
-from utils import Utilities as utils
-from tcontext import TransformationContext
-from pytcore.tcore.messages import Pivots
-
-from pytcore.tcore.matcher import Matcher
-from pytcore.tcore.iterator import Iterator
-from pytcore.tcore.rewriter import Rewriter
-			
+from .tconstants import TConstants as TC
+from .utils import Utilities as utils
+from .tcontext import TransformationContext
+from .pytcore.tcore.messages import Pivots
+
+from .pytcore.tcore.matcher import Matcher
+from .pytcore.tcore.iterator import Iterator
+from .pytcore.tcore.rewriter import Rewriter
+
 '''
 	holds the execution context of a 'Transformation' construct 
 	fname					the filename of the transformation model '''
-	
+
 class TCoreContext(TransformationContext) :
 	def __init__(self,fname,ptcal) :
 		super(TCoreContext,self).__init__(ptcal._transfData[fname])
@@ -31,54 +31,54 @@ class TCoreContext(TransformationContext) :
 		self.pivots = Pivots()
 		self.sendAndApplyDeltaFunc=ptcal.sendAndApplyDelta
 		self.nextInput = "packetIn"
-		
+
 		self.compiler = ptcal._compiler
 		self.rules = {}
-		
+
 		self.startPacketInID = None
 		self.startNextInID = None
 		self.startCancelInID = None
-		
+
 		for id in self.t['nodes']:
-			
+
 			if self.t['nodes'][id]['$type'] == self.metamodel+"/Matcher":
 				max = self.t['nodes'][id]['max']['value']
 				ruleName = self.t['nodes'][id]['pattern']['value']
 				compiledRule = self.compiler.compileRule(None,ruleName)
 				matcher = Matcher(condition=compiledRule['lhs'],max=max)
 				self.rules[id] = {'id':id,
-							'name':self.t['nodes'][id]['name']['value'],
-							'alias':self.t['nodes'][id]['alias']['value'],
-							'rule':matcher}
-				
+								  'name':self.t['nodes'][id]['name']['value'],
+								  'alias':self.t['nodes'][id]['alias']['value'],
+								  'rule':matcher}
+
 			elif self.t['nodes'][id]['$type'] == self.metamodel+"/Rewriter":
 				ruleName = self.t['nodes'][id]['pattern']['value']
 				compiledRule = self.compiler.compileRule(None,ruleName)
 				rewriter = Rewriter(condition=compiledRule['rhs'],sendAndApplyDeltaFunc=self.sendAndApplyDeltaFunc)
 				self.rules[id] = {'id':id,
-							'name':self.t['nodes'][id]['name']['value'],
-							'alias':self.t['nodes'][id]['alias']['value'],
-							'rule':rewriter}
-				
+								  'name':self.t['nodes'][id]['name']['value'],
+								  'alias':self.t['nodes'][id]['alias']['value'],
+								  'rule':rewriter}
+
 			elif self.t['nodes'][id]['$type'] == self.metamodel+"/Iterator":
 				maxIterations = self.t['nodes'][id]['maxIterations']['value']
 				ruleName = self.t['nodes'][id]['pattern']['value']
 				compiledRule = self.compiler.compileRule(None,ruleName)
 				iterator = Iterator(condition=compiledRule['lhs'])
 				self.rules[id] = {'id':id,
-							'name':self.t['nodes'][id]['name']['value'],
-							'alias':self.t['nodes'][id]['alias']['value'],
-							'rule':iterator}
-				
+								  'name':self.t['nodes'][id]['name']['value'],
+								  'alias':self.t['nodes'][id]['alias']['value'],
+								  'rule':iterator}
+
 			elif self.t['nodes'][id]['$type'] == self.metamodel+"/StartPacketIn":
 				self.startPacketInID = id
 			elif self.t['nodes'][id]['$type'] == self.metamodel+"/StartNextIn":
 				self.startNextInID = id
 			elif self.t['nodes'][id]['$type'] == self.metamodel+"/StartCancelIn":
 				self.startCancelInID = id
-				
+
 		''' TODO add other rules '''
-				
+
 	def setLastStepExecTime(self,a):
 		self._lastStep['time'] = a
 		self.totalExecutionTime += a
@@ -88,7 +88,7 @@ class TCoreContext(TransformationContext) :
 	def getCurrentStepId(self) :
 		if self._lastStep == {} :
 			assert False, \
-				 "this function shouldn't be called when there is no current step"
+				"this function shouldn't be called when there is no current step"
 		else :
 			return self._lastStep['id']
 
@@ -96,19 +96,19 @@ class TCoreContext(TransformationContext) :
 		Returns the initial step of transformation which is the step after start state
 	'''
 	def _getInitialStep(self) :
-		
+
 		if self.startPacketInID == None and self.startNextInID == None and self.startCancelInID == None:
 			raise RuntimeError('There is no start state in loaded TCore instance!')
 
 		''' a regular Tcore trafo starts with an startPacketIn node '''
 		if self.startPacketInID != None:
-			startStateEdges = filter(lambda e:e['src']==self.startPacketInID,self.t['edges'])
-			
+			startStateEdges = [e for e in self.t['edges'] if e['src']==self.startPacketInID]
+
 			if len(startStateEdges) == 0 :
 				raise RuntimeError('StartPacketIn is not connected to any other rule!')
 			else:
-				initialStepID=filter(lambda e:e['src']==startStateEdges[0]['dest'],self.t['edges'])[0]['dest']
-				
+				initialStepID=[e for e in self.t['edges'] if e['src']==startStateEdges[0]['dest']][0]['dest']
+
 				return self.rules[initialStepID]
 
 	'''
@@ -149,19 +149,19 @@ class TCoreContext(TransformationContext) :
 			self._lastStep = ns
 			return ns
 		else :
-			
+
 			def f(e) :
 				return e['src'] == self._lastStep['id']
-			
-			edgesFromLastStep = filter(f,self.t['edges'])
-			
+
+			edgesFromLastStep = list(filter(f,self.t['edges']))
+
 			if len(edgesFromLastStep) == 0 :
 				ai = self._applicationInfo()
 				self._lastStep = {}
 				self._expired = True
 				return ai
 			else :
-				
+
 				targetLinkID=None
 				resString = None
 				if self._lastStep['applicationInfo'] == TC.SUCCEEDED :
@@ -170,7 +170,7 @@ class TCoreContext(TransformationContext) :
 					resString = "fail"
 				else: #exception
 					resString = "exception"
-				
+
 				for edgeLS in edgesFromLastStep:
 					if 'output' in self.t['nodes'][edgeLS['dest']] and self.t['nodes'][edgeLS['dest']]['output']['value'] == resString:
 						targetLinkID=edgeLS['dest']
@@ -188,29 +188,29 @@ class TCoreContext(TransformationContext) :
 
 				if 'input' in self.t['nodes'][targetLinkID]:
 					self.nextInput = self.t['nodes'][targetLinkID]['input']['value']
-					
-				def f(e) : 
+
+				def f(e) :
 					return e['src'] == targetLinkID
-				nodesAfterLastStep = filter(f,self.t['edges'])
-				
+				nodesAfterLastStep = list(filter(f,self.t['edges']))
+
 				nextStepID = nodesAfterLastStep[0]['dest']
-				
+
 				if nextStepID in self.rules:
 					self._lastStep = self.rules[nextStepID]
 				else:
 					if self.t['nodes'][nextStepID]['$type']==self.metamodel+"/EndSuccess":
 						self._lastStep = {'trafoResult':TC.SUCCEEDED,
-										'feedbackReceived':'True'}
+										  'feedbackReceived':'True'}
 					elif self.t['nodes'][nextStepID]['$type']==self.metamodel+"/EndFail":
 						self._lastStep = {'trafoResult':TC.FAILED,
-										'feedbackReceived':'True'}
+										  'feedbackReceived':'True'}
 					elif self.t['nodes'][nextStepID]['$type']==self.metamodel+"/EndException":
 						self._lastStep = {'trafoResult':TC.EXCEPTION,
-										'feedbackReceived':'True'}
-						
+										  'feedbackReceived':'True'}
+
 				return self._lastStep
-				
-		
+
+
 
 	'''
 		set the application information of the last step '''
@@ -222,4 +222,4 @@ class TCoreContext(TransformationContext) :
 
 	def isLastStepFeedbackReceived(self) :
 		return (not self._expired and self._lastStep=={}) or \
-				 'feedbackReceived' in self._lastStep
+			   'feedbackReceived' in self._lastStep

+ 17 - 12
mt/ptcal/utils.py

@@ -1,7 +1,12 @@
 '''This file is part of AToMPM - A Tool for Multi-Paradigm Modelling
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
-import pprint, json, re, math, threading, httplib
+import pprint, json, re, math, threading, sys
+
+if sys.version_info[0] < 3:
+	import httplib as httplib
+else:
+	import http.client as httplib
 
 
 class Utilities :
@@ -10,17 +15,17 @@ class Utilities :
 		do 'callback()' when 'condition' is satisfied, polling every 'delay' 
 		seconds until it is '''
 	@staticmethod
- 	def doWhen(condition, delay, callback) :
-		if	condition() : 
+	def doWhen(condition, delay, callback) :
+		if	condition() :
 			callback()
-		else : 
+		else :
 			t = threading.Timer(delay,Utilities.doWhen,[condition,delay,callback])
 			t.start()
 
 	'''
 		flatten an array of arrays into a single array '''
 	@staticmethod
- 	def flatten(arrays) :
+	def flatten(arrays) :
 		return [item for array in arrays for item in array]
 
 
@@ -29,10 +34,10 @@ class Utilities :
 		parsed contents (or parsed asm, if path describes a *.model file) '''
 	@staticmethod
 	def fread(path,isJson=True,relative=True) :
-		try : 
+		try :
 			if relative :
 				path = './'+path
-				
+
 			f = open(path,'r')
 			contents = f.read()
 			f.close()
@@ -43,7 +48,7 @@ class Utilities :
 					contents = contents['asm']
 
 			return contents
-		except Exception, e :
+		except Exception as e :
 			raise IOError('crashed while reading data :: '+str(e))
 
 
@@ -53,7 +58,7 @@ class Utilities :
 	@staticmethod
 	def getMetamodel(fulltype) :
 		return re.match("(.*)/.*",fulltype).group(1)
-	
+
 
 	'''
 		split a full type of the form '/path/to/metamodel/type' and return 
@@ -89,7 +94,7 @@ class Utilities :
 	def isHttpSuccessCode(statusCode) :
 		return math.floor(statusCode/100.0) == 2
 
-	
+
 	'''
 		pretty-print anything '''
 	@staticmethod
@@ -108,10 +113,10 @@ class Utilities :
 		if len(hg.es) > 0 :
 			print(hg.get_adjacency())
 
-	
+
 	'''
 		same as JavaScript setTimeout... do 'callback()' after 'delay' seconds
-	  	have elapsed '''		
+	  	have elapsed '''
 	@staticmethod
 	def setTimeout(delay, callback, args=[]) :
 		t = threading.Timer(delay,callback,args)

+ 3 - 0
mt/websocket/README

@@ -0,0 +1,3 @@
+websocket-client
+https://github.com/websocket-client/websocket-client/
+version 0.48.0 - May 27th, 2018

+ 29 - 0
mt/websocket/__init__.py

@@ -0,0 +1,29 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+from ._abnf import *
+from ._app import WebSocketApp
+from ._core import *
+from ._exceptions import *
+from ._logging import *
+from ._socket import *
+
+__version__ = "0.48.0"

+ 447 - 0
mt/websocket/_abnf.py

@@ -0,0 +1,447 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+import array
+import os
+import struct
+
+import six
+
+from ._exceptions import *
+from ._utils import validate_utf8
+from threading import Lock
+
+try:
+    if six.PY3:
+        import numpy
+    else:
+        numpy = None
+except ImportError:
+    numpy = None
+
+try:
+    # If wsaccel is available we use compiled routines to mask data.
+    if not numpy:
+        from wsaccel.xormask import XorMaskerSimple
+
+        def _mask(_m, _d):
+            return XorMaskerSimple(_m).process(_d)
+except ImportError:
+    # wsaccel is not available, we rely on python implementations.
+    def _mask(_m, _d):
+        for i in range(len(_d)):
+            _d[i] ^= _m[i % 4]
+
+        if six.PY3:
+            return _d.tobytes()
+        else:
+            return _d.tostring()
+
+
+__all__ = [
+    'ABNF', 'continuous_frame', 'frame_buffer',
+    'STATUS_NORMAL',
+    'STATUS_GOING_AWAY',
+    'STATUS_PROTOCOL_ERROR',
+    'STATUS_UNSUPPORTED_DATA_TYPE',
+    'STATUS_STATUS_NOT_AVAILABLE',
+    'STATUS_ABNORMAL_CLOSED',
+    'STATUS_INVALID_PAYLOAD',
+    'STATUS_POLICY_VIOLATION',
+    'STATUS_MESSAGE_TOO_BIG',
+    'STATUS_INVALID_EXTENSION',
+    'STATUS_UNEXPECTED_CONDITION',
+    'STATUS_BAD_GATEWAY',
+    'STATUS_TLS_HANDSHAKE_ERROR',
+]
+
+# closing frame status codes.
+STATUS_NORMAL = 1000
+STATUS_GOING_AWAY = 1001
+STATUS_PROTOCOL_ERROR = 1002
+STATUS_UNSUPPORTED_DATA_TYPE = 1003
+STATUS_STATUS_NOT_AVAILABLE = 1005
+STATUS_ABNORMAL_CLOSED = 1006
+STATUS_INVALID_PAYLOAD = 1007
+STATUS_POLICY_VIOLATION = 1008
+STATUS_MESSAGE_TOO_BIG = 1009
+STATUS_INVALID_EXTENSION = 1010
+STATUS_UNEXPECTED_CONDITION = 1011
+STATUS_BAD_GATEWAY = 1014
+STATUS_TLS_HANDSHAKE_ERROR = 1015
+
+VALID_CLOSE_STATUS = (
+    STATUS_NORMAL,
+    STATUS_GOING_AWAY,
+    STATUS_PROTOCOL_ERROR,
+    STATUS_UNSUPPORTED_DATA_TYPE,
+    STATUS_INVALID_PAYLOAD,
+    STATUS_POLICY_VIOLATION,
+    STATUS_MESSAGE_TOO_BIG,
+    STATUS_INVALID_EXTENSION,
+    STATUS_UNEXPECTED_CONDITION,
+    STATUS_BAD_GATEWAY,
+)
+
+
+class ABNF(object):
+    """
+    ABNF frame class.
+    see http://tools.ietf.org/html/rfc5234
+    and http://tools.ietf.org/html/rfc6455#section-5.2
+    """
+
+    # operation code values.
+    OPCODE_CONT = 0x0
+    OPCODE_TEXT = 0x1
+    OPCODE_BINARY = 0x2
+    OPCODE_CLOSE = 0x8
+    OPCODE_PING = 0x9
+    OPCODE_PONG = 0xa
+
+    # available operation code value tuple
+    OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE,
+               OPCODE_PING, OPCODE_PONG)
+
+    # opcode human readable string
+    OPCODE_MAP = {
+        OPCODE_CONT: "cont",
+        OPCODE_TEXT: "text",
+        OPCODE_BINARY: "binary",
+        OPCODE_CLOSE: "close",
+        OPCODE_PING: "ping",
+        OPCODE_PONG: "pong"
+    }
+
+    # data length threshold.
+    LENGTH_7 = 0x7e
+    LENGTH_16 = 1 << 16
+    LENGTH_63 = 1 << 63
+
+    def __init__(self, fin=0, rsv1=0, rsv2=0, rsv3=0,
+                 opcode=OPCODE_TEXT, mask=1, data=""):
+        """
+        Constructor for ABNF.
+        please check RFC for arguments.
+        """
+        self.fin = fin
+        self.rsv1 = rsv1
+        self.rsv2 = rsv2
+        self.rsv3 = rsv3
+        self.opcode = opcode
+        self.mask = mask
+        if data is None:
+            data = ""
+        self.data = data
+        self.get_mask_key = os.urandom
+
+    def validate(self, skip_utf8_validation=False):
+        """
+        validate the ABNF frame.
+        skip_utf8_validation: skip utf8 validation.
+        """
+        if self.rsv1 or self.rsv2 or self.rsv3:
+            raise WebSocketProtocolException("rsv is not implemented, yet")
+
+        if self.opcode not in ABNF.OPCODES:
+            raise WebSocketProtocolException("Invalid opcode %r", self.opcode)
+
+        if self.opcode == ABNF.OPCODE_PING and not self.fin:
+            raise WebSocketProtocolException("Invalid ping frame.")
+
+        if self.opcode == ABNF.OPCODE_CLOSE:
+            l = len(self.data)
+            if not l:
+                return
+            if l == 1 or l >= 126:
+                raise WebSocketProtocolException("Invalid close frame.")
+            if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]):
+                raise WebSocketProtocolException("Invalid close frame.")
+
+            code = 256 * \
+                six.byte2int(self.data[0:1]) + six.byte2int(self.data[1:2])
+            if not self._is_valid_close_status(code):
+                raise WebSocketProtocolException("Invalid close opcode.")
+
+    @staticmethod
+    def _is_valid_close_status(code):
+        return code in VALID_CLOSE_STATUS or (3000 <= code < 5000)
+
+    def __str__(self):
+        return "fin=" + str(self.fin) \
+            + " opcode=" + str(self.opcode) \
+            + " data=" + str(self.data)
+
+    @staticmethod
+    def create_frame(data, opcode, fin=1):
+        """
+        create frame to send text, binary and other data.
+
+        data: data to send. This is string value(byte array).
+            if opcode is OPCODE_TEXT and this value is unicode,
+            data value is converted into unicode string, automatically.
+
+        opcode: operation code. please see OPCODE_XXX.
+
+        fin: fin flag. if set to 0, create continue fragmentation.
+        """
+        if opcode == ABNF.OPCODE_TEXT and isinstance(data, six.text_type):
+            data = data.encode("utf-8")
+        # mask must be set if send data from client
+        return ABNF(fin, 0, 0, 0, opcode, 1, data)
+
+    def format(self):
+        """
+        format this object to string(byte array) to send data to server.
+        """
+        if any(x not in (0, 1) for x in [self.fin, self.rsv1, self.rsv2, self.rsv3]):
+            raise ValueError("not 0 or 1")
+        if self.opcode not in ABNF.OPCODES:
+            raise ValueError("Invalid OPCODE")
+        length = len(self.data)
+        if length >= ABNF.LENGTH_63:
+            raise ValueError("data is too long")
+
+        frame_header = chr(self.fin << 7
+                           | self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4
+                           | self.opcode)
+        if length < ABNF.LENGTH_7:
+            frame_header += chr(self.mask << 7 | length)
+            frame_header = six.b(frame_header)
+        elif length < ABNF.LENGTH_16:
+            frame_header += chr(self.mask << 7 | 0x7e)
+            frame_header = six.b(frame_header)
+            frame_header += struct.pack("!H", length)
+        else:
+            frame_header += chr(self.mask << 7 | 0x7f)
+            frame_header = six.b(frame_header)
+            frame_header += struct.pack("!Q", length)
+
+        if not self.mask:
+            return frame_header + self.data
+        else:
+            mask_key = self.get_mask_key(4)
+            return frame_header + self._get_masked(mask_key)
+
+    def _get_masked(self, mask_key):
+        s = ABNF.mask(mask_key, self.data)
+
+        if isinstance(mask_key, six.text_type):
+            mask_key = mask_key.encode('utf-8')
+
+        return mask_key + s
+
+    @staticmethod
+    def mask(mask_key, data):
+        """
+        mask or unmask data. Just do xor for each byte
+
+        mask_key: 4 byte string(byte).
+
+        data: data to mask/unmask.
+        """
+        if data is None:
+            data = ""
+
+        if isinstance(mask_key, six.text_type):
+            mask_key = six.b(mask_key)
+
+        if isinstance(data, six.text_type):
+            data = six.b(data)
+
+        if numpy:
+            origlen = len(data)
+            _mask_key = mask_key[3] << 24 | mask_key[2] << 16 | mask_key[1] << 8 | mask_key[0]
+
+            # We need data to be a multiple of four...
+            data += bytes(" " * (4 - (len(data) % 4)), "us-ascii")
+            a = numpy.frombuffer(data, dtype="uint32")
+            masked = numpy.bitwise_xor(a, [_mask_key]).astype("uint32")
+            if len(data) > origlen:
+              return masked.tobytes()[:origlen]
+            return masked.tobytes()
+        else:
+            _m = array.array("B", mask_key)
+            _d = array.array("B", data)
+            return _mask(_m, _d)
+
+
+class frame_buffer(object):
+    _HEADER_MASK_INDEX = 5
+    _HEADER_LENGTH_INDEX = 6
+
+    def __init__(self, recv_fn, skip_utf8_validation):
+        self.recv = recv_fn
+        self.skip_utf8_validation = skip_utf8_validation
+        # Buffers over the packets from the layer beneath until desired amount
+        # bytes of bytes are received.
+        self.recv_buffer = []
+        self.clear()
+        self.lock = Lock()
+
+    def clear(self):
+        self.header = None
+        self.length = None
+        self.mask = None
+
+    def has_received_header(self):
+        return self.header is None
+
+    def recv_header(self):
+        header = self.recv_strict(2)
+        b1 = header[0]
+
+        if six.PY2:
+            b1 = ord(b1)
+
+        fin = b1 >> 7 & 1
+        rsv1 = b1 >> 6 & 1
+        rsv2 = b1 >> 5 & 1
+        rsv3 = b1 >> 4 & 1
+        opcode = b1 & 0xf
+        b2 = header[1]
+
+        if six.PY2:
+            b2 = ord(b2)
+
+        has_mask = b2 >> 7 & 1
+        length_bits = b2 & 0x7f
+
+        self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits)
+
+    def has_mask(self):
+        if not self.header:
+            return False
+        return self.header[frame_buffer._HEADER_MASK_INDEX]
+
+    def has_received_length(self):
+        return self.length is None
+
+    def recv_length(self):
+        bits = self.header[frame_buffer._HEADER_LENGTH_INDEX]
+        length_bits = bits & 0x7f
+        if length_bits == 0x7e:
+            v = self.recv_strict(2)
+            self.length = struct.unpack("!H", v)[0]
+        elif length_bits == 0x7f:
+            v = self.recv_strict(8)
+            self.length = struct.unpack("!Q", v)[0]
+        else:
+            self.length = length_bits
+
+    def has_received_mask(self):
+        return self.mask is None
+
+    def recv_mask(self):
+        self.mask = self.recv_strict(4) if self.has_mask() else ""
+
+    def recv_frame(self):
+
+        with self.lock:
+            # Header
+            if self.has_received_header():
+                self.recv_header()
+            (fin, rsv1, rsv2, rsv3, opcode, has_mask, _) = self.header
+
+            # Frame length
+            if self.has_received_length():
+                self.recv_length()
+            length = self.length
+
+            # Mask
+            if self.has_received_mask():
+                self.recv_mask()
+            mask = self.mask
+
+            # Payload
+            payload = self.recv_strict(length)
+            if has_mask:
+                payload = ABNF.mask(mask, payload)
+
+            # Reset for next frame
+            self.clear()
+
+            frame = ABNF(fin, rsv1, rsv2, rsv3, opcode, has_mask, payload)
+            frame.validate(self.skip_utf8_validation)
+
+        return frame
+
+    def recv_strict(self, bufsize):
+        shortage = bufsize - sum(len(x) for x in self.recv_buffer)
+        while shortage > 0:
+            # Limit buffer size that we pass to socket.recv() to avoid
+            # fragmenting the heap -- the number of bytes recv() actually
+            # reads is limited by socket buffer and is relatively small,
+            # yet passing large numbers repeatedly causes lots of large
+            # buffers allocated and then shrunk, which results in
+            # fragmentation.
+            bytes_ = self.recv(min(16384, shortage))
+            self.recv_buffer.append(bytes_)
+            shortage -= len(bytes_)
+
+        unified = six.b("").join(self.recv_buffer)
+
+        if shortage == 0:
+            self.recv_buffer = []
+            return unified
+        else:
+            self.recv_buffer = [unified[bufsize:]]
+            return unified[:bufsize]
+
+
+class continuous_frame(object):
+
+    def __init__(self, fire_cont_frame, skip_utf8_validation):
+        self.fire_cont_frame = fire_cont_frame
+        self.skip_utf8_validation = skip_utf8_validation
+        self.cont_data = None
+        self.recving_frames = None
+
+    def validate(self, frame):
+        if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT:
+            raise WebSocketProtocolException("Illegal frame")
+        if self.recving_frames and \
+                frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
+            raise WebSocketProtocolException("Illegal frame")
+
+    def add(self, frame):
+        if self.cont_data:
+            self.cont_data[1] += frame.data
+        else:
+            if frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
+                self.recving_frames = frame.opcode
+            self.cont_data = [frame.opcode, frame.data]
+
+        if frame.fin:
+            self.recving_frames = None
+
+    def is_fire(self, frame):
+        return frame.fin or self.fire_cont_frame
+
+    def extract(self, frame):
+        data = self.cont_data
+        self.cont_data = None
+        frame.data = data[1]
+        if not self.fire_cont_frame and data[0] == ABNF.OPCODE_TEXT and not self.skip_utf8_validation and not validate_utf8(frame.data):
+            raise WebSocketPayloadException(
+                "cannot decode: " + repr(frame.data))
+
+        return [data[0], frame]

+ 325 - 0
mt/websocket/_app.py

@@ -0,0 +1,325 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+
+"""
+WebSocketApp provides higher level APIs.
+"""
+import select
+import sys
+import threading
+import time
+import traceback
+
+import six
+
+from ._abnf import ABNF
+from ._core import WebSocket, getdefaulttimeout
+from ._exceptions import *
+from . import _logging
+
+
+__all__ = ["WebSocketApp"]
+
+class Dispatcher:
+    def __init__(self, app, ping_timeout):
+        self.app  = app
+        self.ping_timeout = ping_timeout
+
+    def read(self, sock, read_callback, check_callback):
+        while self.app.sock.connected:
+            r, w, e = select.select(
+            (self.app.sock.sock, ), (), (), self.ping_timeout) # Use a 10 second timeout to avoid to wait forever on close
+            if r:
+                if not read_callback():
+                    break
+            check_callback()
+
+class SSLDispacther:
+    def __init__(self, app, ping_timeout):
+        self.app  = app
+        self.ping_timeout = ping_timeout
+
+    def read(self, sock, read_callback, check_callback):
+        while self.app.sock.connected:
+            r = self.select()
+            if r:
+                if not read_callback():
+                    break
+            check_callback()
+
+    def select(self):
+        sock = self.app.sock.sock
+        if sock.pending():
+            return [sock,]
+
+        r, w, e = select.select((sock, ), (), (), self.ping_timeout)
+        return r
+
+class WebSocketApp(object):
+    """
+    Higher level of APIs are provided.
+    The interface is like JavaScript WebSocket object.
+    """
+
+    def __init__(self, url, header=None,
+                 on_open=None, on_message=None, on_error=None,
+                 on_close=None, on_ping=None, on_pong=None,
+                 on_cont_message=None,
+                 keep_running=True, get_mask_key=None, cookie=None,
+                 subprotocols=None,
+                 on_data=None):
+        """
+        url: websocket url.
+        header: custom header for websocket handshake.
+        on_open: callable object which is called at opening websocket.
+          this function has one argument. The argument is this class object.
+        on_message: callable object which is called when received data.
+         on_message has 2 arguments.
+         The 1st argument is this class object.
+         The 2nd argument is utf-8 string which we get from the server.
+        on_error: callable object which is called when we get error.
+         on_error has 2 arguments.
+         The 1st argument is this class object.
+         The 2nd argument is exception object.
+        on_close: callable object which is called when closed the connection.
+         this function has one argument. The argument is this class object.
+        on_cont_message: callback object which is called when receive continued
+         frame data.
+         on_cont_message has 3 arguments.
+         The 1st argument is this class object.
+         The 2nd argument is utf-8 string which we get from the server.
+         The 3rd argument is continue flag. if 0, the data continue
+         to next frame data
+        on_data: callback object which is called when a message received.
+          This is called before on_message or on_cont_message,
+          and then on_message or on_cont_message is called.
+          on_data has 4 argument.
+          The 1st argument is this class object.
+          The 2nd argument is utf-8 string which we get from the server.
+          The 3rd argument is data type. ABNF.OPCODE_TEXT or ABNF.OPCODE_BINARY will be came.
+          The 4th argument is continue flag. if 0, the data continue
+        keep_running: this parameter is obosleted and ignored it.
+        get_mask_key: a callable to produce new mask keys,
+          see the WebSocket.set_mask_key's docstring for more information
+        subprotocols: array of available sub protocols. default is None.
+        """
+        self.url = url
+        self.header = header if header is not None else []
+        self.cookie = cookie
+        self.on_open = on_open
+        self.on_message = on_message
+        self.on_data = on_data
+        self.on_error = on_error
+        self.on_close = on_close
+        self.on_ping = on_ping
+        self.on_pong = on_pong
+        self.on_cont_message = on_cont_message
+        self.keep_running = False
+        self.get_mask_key = get_mask_key
+        self.sock = None
+        self.last_ping_tm = 0
+        self.last_pong_tm = 0
+        self.subprotocols = subprotocols
+
+    def send(self, data, opcode=ABNF.OPCODE_TEXT):
+        """
+        send message.
+        data: message to send. If you set opcode to OPCODE_TEXT,
+              data must be utf-8 string or unicode.
+        opcode: operation code of data. default is OPCODE_TEXT.
+        """
+
+        if not self.sock or self.sock.send(data, opcode) == 0:
+            raise WebSocketConnectionClosedException(
+                "Connection is already closed.")
+
+    def close(self, **kwargs):
+        """
+        close websocket connection.
+        """
+        self.keep_running = False
+        if self.sock:
+            self.sock.close(**kwargs)
+
+    def _send_ping(self, interval, event):
+        while not event.wait(interval):
+            self.last_ping_tm = time.time()
+            if self.sock:
+                try:
+                    self.sock.ping()
+                except Exception as ex:
+                    _logging.warning("send_ping routine terminated: {}".format(ex))
+                    break
+
+    def run_forever(self, sockopt=None, sslopt=None,
+                    ping_interval=0, ping_timeout=None,
+                    http_proxy_host=None, http_proxy_port=None,
+                    http_no_proxy=None, http_proxy_auth=None,
+                    skip_utf8_validation=False,
+                    host=None, origin=None, dispatcher=None):
+        """
+        run event loop for WebSocket framework.
+        This loop is infinite loop and is alive during websocket is available.
+        sockopt: values for socket.setsockopt.
+            sockopt must be tuple
+            and each element is argument of sock.setsockopt.
+        sslopt: ssl socket optional dict.
+        ping_interval: automatically send "ping" command
+            every specified period(second)
+            if set to 0, not send automatically.
+        ping_timeout: timeout(second) if the pong message is not received.
+        http_proxy_host: http proxy host name.
+        http_proxy_port: http proxy port. If not set, set to 80.
+        http_no_proxy: host names, which doesn't use proxy.
+        skip_utf8_validation: skip utf8 validation.
+        host: update host header.
+        origin: update origin header.
+        """
+
+        if not ping_timeout or ping_timeout <= 0:
+            ping_timeout = None
+        if ping_timeout and ping_interval and ping_interval <= ping_timeout:
+            raise WebSocketException("Ensure ping_interval > ping_timeout")
+        if sockopt is None:
+            sockopt = []
+        if sslopt is None:
+            sslopt = {}
+        if self.sock:
+            raise WebSocketException("socket is already opened")
+        thread = None
+        close_frame = None
+        self.keep_running = True
+        self.last_ping_tm = 0
+        self.last_pong_tm = 0
+
+        def teardown():
+            if thread and thread.isAlive():
+                event.set()
+                thread.join()
+            self.keep_running = False
+            self.sock.close()
+            close_args = self._get_close_args(
+                close_frame.data if close_frame else None)
+            self._callback(self.on_close, *close_args)
+            self.sock = None
+
+        try:
+            self.sock = WebSocket(
+                self.get_mask_key, sockopt=sockopt, sslopt=sslopt,
+                fire_cont_frame=self.on_cont_message and True or False,
+                skip_utf8_validation=skip_utf8_validation)
+            self.sock.settimeout(getdefaulttimeout())
+            self.sock.connect(
+                self.url, header=self.header, cookie=self.cookie,
+                http_proxy_host=http_proxy_host,
+                http_proxy_port=http_proxy_port, http_no_proxy=http_no_proxy,
+                http_proxy_auth=http_proxy_auth, subprotocols=self.subprotocols,
+                host=host, origin=origin)
+            if not dispatcher:
+                dispatcher = self.create_dispatcher(ping_timeout)
+
+            self._callback(self.on_open)
+
+            if ping_interval:
+                event = threading.Event()
+                thread = threading.Thread(
+                    target=self._send_ping, args=(ping_interval, event))
+                thread.setDaemon(True)
+                thread.start()
+
+            def read():
+                if not self.keep_running:
+                    return teardown()
+
+                op_code, frame = self.sock.recv_data_frame(True)
+                if op_code == ABNF.OPCODE_CLOSE:
+                    close_frame = frame
+                    return teardown()
+                elif op_code == ABNF.OPCODE_PING:
+                    self._callback(self.on_ping, frame.data)
+                elif op_code == ABNF.OPCODE_PONG:
+                    self.last_pong_tm = time.time()
+                    self._callback(self.on_pong, frame.data)
+                elif op_code == ABNF.OPCODE_CONT and self.on_cont_message:
+                    self._callback(self.on_data, frame.data,
+                                   frame.opcode, frame.fin)
+                    self._callback(self.on_cont_message,
+                                   frame.data, frame.fin)
+                else:
+                    data = frame.data
+                    if six.PY3 and op_code == ABNF.OPCODE_TEXT:
+                        data = data.decode("utf-8")
+                    self._callback(self.on_data, data, frame.opcode, True)
+                    self._callback(self.on_message, data)
+
+                return True
+
+            def check():
+                if ping_timeout and self.last_ping_tm \
+                        and time.time() - self.last_ping_tm > ping_timeout \
+                        and self.last_ping_tm - self.last_pong_tm > ping_timeout:
+                    raise WebSocketTimeoutException("ping/pong timed out")
+                return True
+
+            dispatcher.read(self.sock.sock, read, check)
+        except (Exception, KeyboardInterrupt, SystemExit) as e:
+            self._callback(self.on_error, e)
+            if isinstance(e, SystemExit):
+                # propagate SystemExit further
+                raise
+            teardown()
+
+    def create_dispatcher(self, ping_timeout):
+        timeout = ping_timeout or 10
+        if self.sock.is_ssl():
+            return SSLDispacther(self, timeout)
+
+        return Dispatcher(self, timeout)
+
+    def _get_close_args(self, data):
+        """ this functions extracts the code, reason from the close body
+        if they exists, and if the self.on_close except three arguments """
+        import inspect
+        # if the on_close callback is "old", just return empty list
+        if sys.version_info < (3, 0):
+            if not self.on_close or len(inspect.getargspec(self.on_close).args) != 3:
+                return []
+        else:
+            if not self.on_close or len(inspect.getfullargspec(self.on_close).args) != 3:
+                return []
+
+        if data and len(data) >= 2:
+            code = 256 * six.byte2int(data[0:1]) + six.byte2int(data[1:2])
+            reason = data[2:].decode('utf-8')
+            return [code, reason]
+
+        return [None, None]
+
+    def _callback(self, callback, *args):
+        if callback:
+            try:
+                callback(self, *args)
+            except Exception as e:
+                _logging.error("error from callback {}: {}".format(callback, e))
+                if _logging.isEnabledForDebug():
+                    _, _, tb = sys.exc_info()
+                    traceback.print_tb(tb)

+ 52 - 0
mt/websocket/_cookiejar.py

@@ -0,0 +1,52 @@
+try:
+    import Cookie
+except:
+    import http.cookies as Cookie
+
+
+class SimpleCookieJar(object):
+    def __init__(self):
+        self.jar = dict()
+
+    def add(self, set_cookie):
+        if set_cookie:
+            try:
+                simpleCookie = Cookie.SimpleCookie(set_cookie)
+            except:
+                simpleCookie = Cookie.SimpleCookie(set_cookie.encode('ascii', 'ignore'))
+
+            for k, v in simpleCookie.items():
+                domain = v.get("domain")
+                if domain:
+                    if not domain.startswith("."):
+                        domain = "." + domain
+                    cookie = self.jar.get(domain) if self.jar.get(domain) else Cookie.SimpleCookie()
+                    cookie.update(simpleCookie)
+                    self.jar[domain.lower()] = cookie
+
+    def set(self, set_cookie):
+        if set_cookie:
+            try:
+                simpleCookie = Cookie.SimpleCookie(set_cookie)
+            except:
+                simpleCookie = Cookie.SimpleCookie(set_cookie.encode('ascii', 'ignore'))
+
+            for k, v in simpleCookie.items():
+                domain = v.get("domain")
+                if domain:
+                    if not domain.startswith("."):
+                        domain = "." + domain
+                    self.jar[domain.lower()] = simpleCookie
+
+    def get(self, host):
+        if not host:
+            return ""
+
+        cookies = []
+        for domain, simpleCookie in self.jar.items():
+            host = host.lower()
+            if host.endswith(domain) or host == domain[1:]:
+                cookies.append(self.jar.get(domain))
+
+        return "; ".join(filter(None, ["%s=%s" % (k, v.value) for cookie in filter(None, sorted(cookies)) for k, v in
+                                       sorted(cookie.items())]))

+ 495 - 0
mt/websocket/_core.py

@@ -0,0 +1,495 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+from __future__ import print_function
+
+import socket
+import struct
+import threading
+
+import six
+
+# websocket modules
+from ._abnf import *
+from ._exceptions import *
+from ._handshake import *
+from ._http import *
+from ._logging import *
+from ._socket import *
+from ._ssl_compat import *
+from ._utils import *
+
+__all__ = ['WebSocket', 'create_connection']
+
+"""
+websocket python client.
+=========================
+
+This version support only hybi-13.
+Please see http://tools.ietf.org/html/rfc6455 for protocol.
+"""
+
+
+class WebSocket(object):
+    """
+    Low level WebSocket interface.
+    This class is based on
+      The WebSocket protocol draft-hixie-thewebsocketprotocol-76
+      http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
+
+    We can connect to the websocket server and send/receive data.
+    The following example is an echo client.
+
+    >>> import websocket
+    >>> ws = websocket.WebSocket()
+    >>> ws.connect("ws://echo.websocket.org")
+    >>> ws.send("Hello, Server")
+    >>> ws.recv()
+    'Hello, Server'
+    >>> ws.close()
+
+    get_mask_key: a callable to produce new mask keys, see the set_mask_key
+      function's docstring for more details
+    sockopt: values for socket.setsockopt.
+        sockopt must be tuple and each element is argument of sock.setsockopt.
+    sslopt: dict object for ssl socket option.
+    fire_cont_frame: fire recv event for each cont frame. default is False
+    enable_multithread: if set to True, lock send method.
+    skip_utf8_validation: skip utf8 validation.
+    """
+
+    def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,
+                 fire_cont_frame=False, enable_multithread=False,
+                 skip_utf8_validation=False, **_):
+        """
+        Initialize WebSocket object.
+        """
+        self.sock_opt = sock_opt(sockopt, sslopt)
+        self.handshake_response = None
+        self.sock = None
+
+        self.connected = False
+        self.get_mask_key = get_mask_key
+        # These buffer over the build-up of a single frame.
+        self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)
+        self.cont_frame = continuous_frame(
+            fire_cont_frame, skip_utf8_validation)
+
+        if enable_multithread:
+            self.lock = threading.Lock()
+            self.readlock = threading.Lock()
+        else:
+            self.lock = NoLock()
+            self.readlock = NoLock()
+
+    def __iter__(self):
+        """
+        Allow iteration over websocket, implying sequential `recv` executions.
+        """
+        while True:
+            yield self.recv()
+
+    def __next__(self):
+        return self.recv()
+
+    def next(self):
+        return self.__next__()
+
+    def fileno(self):
+        return self.sock.fileno()
+
+    def set_mask_key(self, func):
+        """
+        set function to create musk key. You can customize mask key generator.
+        Mainly, this is for testing purpose.
+
+        func: callable object. the func takes 1 argument as integer.
+              The argument means length of mask key.
+              This func must return string(byte array),
+              which length is argument specified.
+        """
+        self.get_mask_key = func
+
+    def gettimeout(self):
+        """
+        Get the websocket timeout(second).
+        """
+        return self.sock_opt.timeout
+
+    def settimeout(self, timeout):
+        """
+        Set the timeout to the websocket.
+
+        timeout: timeout time(second).
+        """
+        self.sock_opt.timeout = timeout
+        if self.sock:
+            self.sock.settimeout(timeout)
+
+    timeout = property(gettimeout, settimeout)
+
+    def getsubprotocol(self):
+        """
+        get subprotocol
+        """
+        if self.handshake_response:
+            return self.handshake_response.subprotocol
+        else:
+            return None
+
+    subprotocol = property(getsubprotocol)
+
+    def getstatus(self):
+        """
+        get handshake status
+        """
+        if self.handshake_response:
+            return self.handshake_response.status
+        else:
+            return None
+
+    status = property(getstatus)
+
+    def getheaders(self):
+        """
+        get handshake response header
+        """
+        if self.handshake_response:
+            return self.handshake_response.headers
+        else:
+            return None
+
+    def is_ssl(self):
+        return isinstance(self.sock, ssl.SSLSocket)
+
+    headers = property(getheaders)
+
+    def connect(self, url, **options):
+        """
+        Connect to url. url is websocket url scheme.
+        ie. ws://host:port/resource
+        You can customize using 'options'.
+        If you set "header" list object, you can set your own custom header.
+
+        >>> ws = WebSocket()
+        >>> ws.connect("ws://echo.websocket.org/",
+                ...     header=["User-Agent: MyProgram",
+                ...             "x-custom: header"])
+
+        timeout: socket timeout time. This value is integer.
+                 if you set None for this value,
+                 it means "use default_timeout value"
+
+        options: "header" -> custom http header list or dict.
+                 "cookie" -> cookie value.
+                 "origin" -> custom origin url.
+                 "host"   -> custom host header string.
+                 "http_proxy_host" - http proxy host name.
+                 "http_proxy_port" - http proxy port. If not set, set to 80.
+                 "http_no_proxy"   - host names, which doesn't use proxy.
+                 "http_proxy_auth" - http proxy auth information.
+                                     tuple of username and password.
+                                     default is None
+                 "subprotocols" - array of available sub protocols.
+                                  default is None.
+                 "socket" - pre-initialized stream socket.
+
+        """
+        self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options),
+                                   options.pop('socket', None))
+
+        try:
+            self.handshake_response = handshake(self.sock, *addrs, **options)
+            self.connected = True
+        except:
+            if self.sock:
+                self.sock.close()
+                self.sock = None
+            raise
+
+    def send(self, payload, opcode=ABNF.OPCODE_TEXT):
+        """
+        Send the data as string.
+
+        payload: Payload must be utf-8 string or unicode,
+                  if the opcode is OPCODE_TEXT.
+                  Otherwise, it must be string(byte array)
+
+        opcode: operation code to send. Please see OPCODE_XXX.
+        """
+
+        frame = ABNF.create_frame(payload, opcode)
+        return self.send_frame(frame)
+
+    def send_frame(self, frame):
+        """
+        Send the data frame.
+
+        frame: frame data created  by ABNF.create_frame
+
+        >>> ws = create_connection("ws://echo.websocket.org/")
+        >>> frame = ABNF.create_frame("Hello", ABNF.OPCODE_TEXT)
+        >>> ws.send_frame(frame)
+        >>> cont_frame = ABNF.create_frame("My name is ", ABNF.OPCODE_CONT, 0)
+        >>> ws.send_frame(frame)
+        >>> cont_frame = ABNF.create_frame("Foo Bar", ABNF.OPCODE_CONT, 1)
+        >>> ws.send_frame(frame)
+
+        """
+        if self.get_mask_key:
+            frame.get_mask_key = self.get_mask_key
+        data = frame.format()
+        length = len(data)
+        trace("send: " + repr(data))
+
+        with self.lock:
+            while data:
+                l = self._send(data)
+                data = data[l:]
+
+        return length
+
+    def send_binary(self, payload):
+        return self.send(payload, ABNF.OPCODE_BINARY)
+
+    def ping(self, payload=""):
+        """
+        send ping data.
+
+        payload: data payload to send server.
+        """
+        if isinstance(payload, six.text_type):
+            payload = payload.encode("utf-8")
+        self.send(payload, ABNF.OPCODE_PING)
+
+    def pong(self, payload):
+        """
+        send pong data.
+
+        payload: data payload to send server.
+        """
+        if isinstance(payload, six.text_type):
+            payload = payload.encode("utf-8")
+        self.send(payload, ABNF.OPCODE_PONG)
+
+    def recv(self):
+        """
+        Receive string data(byte array) from the server.
+
+        return value: string(byte array) value.
+        """
+        with self.readlock:
+            opcode, data = self.recv_data()
+        if six.PY3 and opcode == ABNF.OPCODE_TEXT:
+            return data.decode("utf-8")
+        elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY:
+            return data
+        else:
+            return ''
+
+    def recv_data(self, control_frame=False):
+        """
+        Receive data with operation code.
+
+        control_frame: a boolean flag indicating whether to return control frame
+        data, defaults to False
+
+        return  value: tuple of operation code and string(byte array) value.
+        """
+        opcode, frame = self.recv_data_frame(control_frame)
+        return opcode, frame.data
+
+    def recv_data_frame(self, control_frame=False):
+        """
+        Receive data with operation code.
+
+        control_frame: a boolean flag indicating whether to return control frame
+        data, defaults to False
+
+        return  value: tuple of operation code and string(byte array) value.
+        """
+        while True:
+            frame = self.recv_frame()
+            if not frame:
+                # handle error:
+                # 'NoneType' object has no attribute 'opcode'
+                raise WebSocketProtocolException(
+                    "Not a valid frame %s" % frame)
+            elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT):
+                self.cont_frame.validate(frame)
+                self.cont_frame.add(frame)
+
+                if self.cont_frame.is_fire(frame):
+                    return self.cont_frame.extract(frame)
+
+            elif frame.opcode == ABNF.OPCODE_CLOSE:
+                self.send_close()
+                return frame.opcode, frame
+            elif frame.opcode == ABNF.OPCODE_PING:
+                if len(frame.data) < 126:
+                    self.pong(frame.data)
+                else:
+                    raise WebSocketProtocolException(
+                        "Ping message is too long")
+                if control_frame:
+                    return frame.opcode, frame
+            elif frame.opcode == ABNF.OPCODE_PONG:
+                if control_frame:
+                    return frame.opcode, frame
+
+    def recv_frame(self):
+        """
+        receive data as frame from server.
+
+        return value: ABNF frame object.
+        """
+        return self.frame_buffer.recv_frame()
+
+    def send_close(self, status=STATUS_NORMAL, reason=six.b("")):
+        """
+        send close data to the server.
+
+        status: status code to send. see STATUS_XXX.
+
+        reason: the reason to close. This must be string or bytes.
+        """
+        if status < 0 or status >= ABNF.LENGTH_16:
+            raise ValueError("code is invalid range")
+        self.connected = False
+        self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
+
+    def close(self, status=STATUS_NORMAL, reason=six.b(""), timeout=3):
+        """
+        Close Websocket object
+
+        status: status code to send. see STATUS_XXX.
+
+        reason: the reason to close. This must be string.
+
+        timeout: timeout until receive a close frame.
+            If None, it will wait forever until receive a close frame.
+        """
+        if self.connected:
+            if status < 0 or status >= ABNF.LENGTH_16:
+                raise ValueError("code is invalid range")
+
+            try:
+                self.connected = False
+                self.send(struct.pack('!H', status) +
+                          reason, ABNF.OPCODE_CLOSE)
+                sock_timeout = self.sock.gettimeout()
+                self.sock.settimeout(timeout)
+                try:
+                    frame = self.recv_frame()
+                    if isEnabledForError():
+                        recv_status = struct.unpack("!H", frame.data[0:2])[0]
+                        if recv_status != STATUS_NORMAL:
+                            error("close status: " + repr(recv_status))
+                except:
+                    pass
+                self.sock.settimeout(sock_timeout)
+                self.sock.shutdown(socket.SHUT_RDWR)
+            except:
+                pass
+
+        self.shutdown()
+
+    def abort(self):
+        """
+        Low-level asynchronous abort, wakes up other threads that are waiting in recv_*
+        """
+        if self.connected:
+            self.sock.shutdown(socket.SHUT_RDWR)
+
+    def shutdown(self):
+        """close socket, immediately."""
+        if self.sock:
+            self.sock.close()
+            self.sock = None
+            self.connected = False
+
+    def _send(self, data):
+        return send(self.sock, data)
+
+    def _recv(self, bufsize):
+        try:
+            return recv(self.sock, bufsize)
+        except WebSocketConnectionClosedException:
+            if self.sock:
+                self.sock.close()
+            self.sock = None
+            self.connected = False
+            raise
+
+
+def create_connection(url, timeout=None, class_=WebSocket, **options):
+    """
+    connect to url and return websocket object.
+
+    Connect to url and return the WebSocket object.
+    Passing optional timeout parameter will set the timeout on the socket.
+    If no timeout is supplied,
+    the global default timeout setting returned by getdefauttimeout() is used.
+    You can customize using 'options'.
+    If you set "header" list object, you can set your own custom header.
+
+    >>> conn = create_connection("ws://echo.websocket.org/",
+         ...     header=["User-Agent: MyProgram",
+         ...             "x-custom: header"])
+
+
+    timeout: socket timeout time. This value is integer.
+             if you set None for this value,
+             it means "use default_timeout value"
+
+    class_: class to instantiate when creating the connection. It has to implement
+            settimeout and connect. It's __init__ should be compatible with
+            WebSocket.__init__, i.e. accept all of it's kwargs.
+    options: "header" -> custom http header list or dict.
+             "cookie" -> cookie value.
+             "origin" -> custom origin url.
+             "host"   -> custom host header string.
+             "http_proxy_host" - http proxy host name.
+             "http_proxy_port" - http proxy port. If not set, set to 80.
+             "http_no_proxy"   - host names, which doesn't use proxy.
+             "http_proxy_auth" - http proxy auth information.
+                                    tuple of username and password.
+                                    default is None
+             "enable_multithread" -> enable lock for multithread.
+             "sockopt" -> socket options
+             "sslopt" -> ssl option
+             "subprotocols" - array of available sub protocols.
+                              default is None.
+             "skip_utf8_validation" - skip utf8 validation.
+             "socket" - pre-initialized stream socket.
+    """
+    sockopt = options.pop("sockopt", [])
+    sslopt = options.pop("sslopt", {})
+    fire_cont_frame = options.pop("fire_cont_frame", False)
+    enable_multithread = options.pop("enable_multithread", False)
+    skip_utf8_validation = options.pop("skip_utf8_validation", False)
+    websock = class_(sockopt=sockopt, sslopt=sslopt,
+                     fire_cont_frame=fire_cont_frame,
+                     enable_multithread=enable_multithread,
+                     skip_utf8_validation=skip_utf8_validation, **options)
+    websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
+    websock.connect(url, **options)
+    return websock

+ 87 - 0
mt/websocket/_exceptions.py

@@ -0,0 +1,87 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+
+
+"""
+define websocket exceptions
+"""
+
+
+class WebSocketException(Exception):
+    """
+    websocket exception class.
+    """
+    pass
+
+
+class WebSocketProtocolException(WebSocketException):
+    """
+    If the websocket protocol is invalid, this exception will be raised.
+    """
+    pass
+
+
+class WebSocketPayloadException(WebSocketException):
+    """
+    If the websocket payload is invalid, this exception will be raised.
+    """
+    pass
+
+
+class WebSocketConnectionClosedException(WebSocketException):
+    """
+    If remote host closed the connection or some network error happened,
+    this exception will be raised.
+    """
+    pass
+
+
+class WebSocketTimeoutException(WebSocketException):
+    """
+    WebSocketTimeoutException will be raised at socket timeout during read/write data.
+    """
+    pass
+
+
+class WebSocketProxyException(WebSocketException):
+    """
+    WebSocketProxyException will be raised when proxy error occurred.
+    """
+    pass
+
+
+class WebSocketBadStatusException(WebSocketException):
+    """
+    WebSocketBadStatusException will be raised when we get bad handshake status code.
+    """
+
+    def __init__(self, message, status_code, status_message=None):
+        msg = message % (status_code, status_message) if status_message is not None \
+            else  message % status_code
+        super(WebSocketBadStatusException, self).__init__(msg)
+        self.status_code = status_code
+
+class WebSocketAddressException(WebSocketException):
+    """
+    If the websocket address info cannot be found, this exception will be raised.
+    """
+    pass

+ 180 - 0
mt/websocket/_handshake.py

@@ -0,0 +1,180 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+import hashlib
+import hmac
+import os
+
+import six
+
+from ._cookiejar import SimpleCookieJar
+from ._exceptions import *
+from ._http import *
+from ._logging import *
+from ._socket import *
+
+if six.PY3:
+    from base64 import encodebytes as base64encode
+else:
+    from base64 import encodestring as base64encode
+
+__all__ = ["handshake_response", "handshake"]
+
+if hasattr(hmac, "compare_digest"):
+    compare_digest = hmac.compare_digest
+else:
+    def compare_digest(s1, s2):
+        return s1 == s2
+
+# websocket supported version.
+VERSION = 13
+
+CookieJar = SimpleCookieJar()
+
+
+class handshake_response(object):
+
+    def __init__(self, status, headers, subprotocol):
+        self.status = status
+        self.headers = headers
+        self.subprotocol = subprotocol
+        CookieJar.add(headers.get("set-cookie"))
+
+
+def handshake(sock, hostname, port, resource, **options):
+    headers, key = _get_handshake_headers(resource, hostname, port, options)
+
+    header_str = "\r\n".join(headers)
+    send(sock, header_str)
+    dump("request header", header_str)
+
+    status, resp = _get_resp_headers(sock)
+    success, subproto = _validate(resp, key, options.get("subprotocols"))
+    if not success:
+        raise WebSocketException("Invalid WebSocket Header")
+
+    return handshake_response(status, resp, subproto)
+
+def _pack_hostname(hostname):
+    # IPv6 address
+    if ':' in hostname:
+        return '[' + hostname + ']'
+
+    return hostname
+
+def _get_handshake_headers(resource, host, port, options):
+    headers = [
+        "GET %s HTTP/1.1" % resource,
+        "Upgrade: websocket",
+        "Connection: Upgrade"
+    ]
+    if port == 80 or port == 443:
+        hostport = _pack_hostname(host)
+    else:
+        hostport = "%s:%d" % (_pack_hostname(host), port)
+
+    if "host" in options and options["host"] is not None:
+        headers.append("Host: %s" % options["host"])
+    else:
+        headers.append("Host: %s" % hostport)
+
+    if "origin" in options and options["origin"] is not None:
+        headers.append("Origin: %s" % options["origin"])
+    else:
+        headers.append("Origin: http://%s" % hostport)
+
+    key = _create_sec_websocket_key()
+    headers.append("Sec-WebSocket-Key: %s" % key)
+    headers.append("Sec-WebSocket-Version: %s" % VERSION)
+
+    subprotocols = options.get("subprotocols")
+    if subprotocols:
+        headers.append("Sec-WebSocket-Protocol: %s" % ",".join(subprotocols))
+
+    if "header" in options:
+        header = options["header"]
+        if isinstance(header, dict):
+            header = map(": ".join, header.items())
+        headers.extend(header)
+
+    server_cookie = CookieJar.get(host)
+    client_cookie = options.get("cookie", None)
+
+    cookie = "; ".join(filter(None, [server_cookie, client_cookie]))
+
+    if cookie:
+        headers.append("Cookie: %s" % cookie)
+
+    headers.append("")
+    headers.append("")
+
+    return headers, key
+
+
+def _get_resp_headers(sock, success_status=101):
+    status, resp_headers, status_message = read_headers(sock)
+    if status != success_status:
+        raise WebSocketBadStatusException("Handshake status %d %s", status, status_message)
+    return status, resp_headers
+
+_HEADERS_TO_CHECK = {
+    "upgrade": "websocket",
+    "connection": "upgrade",
+}
+
+
+def _validate(headers, key, subprotocols):
+    subproto = None
+    for k, v in _HEADERS_TO_CHECK.items():
+        r = headers.get(k, None)
+        if not r:
+            return False, None
+        r = r.lower()
+        if v != r:
+            return False, None
+
+    if subprotocols:
+        subproto = headers.get("sec-websocket-protocol", None).lower()
+        if not subproto or subproto not in [s.lower() for s in subprotocols]:
+            error("Invalid subprotocol: " + str(subprotocols))
+            return False, None
+
+    result = headers.get("sec-websocket-accept", None)
+    if not result:
+        return False, None
+    result = result.lower()
+
+    if isinstance(result, six.text_type):
+        result = result.encode('utf-8')
+
+    value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8')
+    hashed = base64encode(hashlib.sha1(value).digest()).strip().lower()
+    success = compare_digest(hashed, result)
+
+    if success:
+        return True, subproto
+    else:
+        return False, None
+
+
+def _create_sec_websocket_key():
+    randomness = os.urandom(16)
+    return base64encode(randomness).decode('utf-8').strip()

+ 319 - 0
mt/websocket/_http.py

@@ -0,0 +1,319 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+import errno
+import os
+import socket
+import sys
+
+import six
+
+from ._exceptions import *
+from ._logging import *
+from ._socket import*
+from ._ssl_compat import *
+from ._url import *
+
+if six.PY3:
+    from base64 import encodebytes as base64encode
+else:
+    from base64 import encodestring as base64encode
+
+__all__ = ["proxy_info", "connect", "read_headers"]
+
+try:
+    import socks
+    ProxyConnectionError = socks.ProxyConnectionError
+    HAS_PYSOCKS = True
+except:
+    class ProxyConnectionError(BaseException):
+        pass
+    HAS_PYSOCKS = False
+
+class proxy_info(object):
+
+    def __init__(self, **options):
+        self.type = options.get("proxy_type", "http")
+        if not(self.type in ['http', 'socks4', 'socks5', 'socks5h']):
+            raise ValueError("proxy_type must be 'http', 'socks4', 'socks5' or 'socks5h'")
+        self.host = options.get("http_proxy_host", None)
+        if self.host:
+            self.port = options.get("http_proxy_port", 0)
+            self.auth = options.get("http_proxy_auth", None)
+            self.no_proxy = options.get("http_no_proxy", None)
+        else:
+            self.port = 0
+            self.auth = None
+            self.no_proxy = None
+
+def _open_proxied_socket(url, options, proxy):
+    hostname, port, resource, is_secure = parse_url(url)
+
+    if not HAS_PYSOCKS:
+        raise WebSocketException("PySocks module not found.")
+
+    ptype = socks.SOCKS5
+    rdns = False
+    if proxy.type == "socks4":
+        ptype = socks.SOCKS4
+    if proxy.type == "http":
+        ptype = socks.HTTP
+    if proxy.type[-1] == "h":
+        rdns = True
+
+    sock = socks.create_connection(
+            (hostname, port),
+            proxy_type = ptype,
+            proxy_addr = proxy.host,
+            proxy_port = proxy.port,
+            proxy_rdns = rdns,
+            proxy_username = proxy.auth[0] if proxy.auth else None,
+            proxy_password = proxy.auth[1] if proxy.auth else None,
+            timeout = options.timeout,
+            socket_options = DEFAULT_SOCKET_OPTION + options.sockopt
+    )
+
+    if is_secure:
+        if HAVE_SSL:
+            sock = _ssl_socket(sock, options.sslopt, hostname)
+        else:
+            raise WebSocketException("SSL not available.")
+
+    return sock, (hostname, port, resource)
+
+
+def connect(url, options, proxy, socket):
+    if proxy.host and not socket and not(proxy.type == 'http'):
+        return _open_proxied_socket(url, options, proxy)
+
+    hostname, port, resource, is_secure = parse_url(url)
+
+    if socket:
+        return socket, (hostname, port, resource)
+
+    addrinfo_list, need_tunnel, auth = _get_addrinfo_list(
+        hostname, port, is_secure, proxy)
+    if not addrinfo_list:
+        raise WebSocketException(
+            "Host not found.: " + hostname + ":" + str(port))
+
+    sock = None
+    try:
+        sock = _open_socket(addrinfo_list, options.sockopt, options.timeout)
+        if need_tunnel:
+            sock = _tunnel(sock, hostname, port, auth)
+
+        if is_secure:
+            if HAVE_SSL:
+                sock = _ssl_socket(sock, options.sslopt, hostname)
+            else:
+                raise WebSocketException("SSL not available.")
+
+        return sock, (hostname, port, resource)
+    except:
+        if sock:
+            sock.close()
+        raise
+
+
+def _get_addrinfo_list(hostname, port, is_secure, proxy):
+    phost, pport, pauth = get_proxy_info(
+        hostname, is_secure, proxy.host, proxy.port, proxy.auth, proxy.no_proxy)
+    try:
+        if not phost:
+            addrinfo_list = socket.getaddrinfo(
+                hostname, port, 0, 0, socket.SOL_TCP)
+            return addrinfo_list, False, None
+        else:
+            pport = pport and pport or 80
+            # when running on windows 10, the getaddrinfo used above
+            # returns a socktype 0. This generates an error exception:
+            #_on_error: exception Socket type must be stream or datagram, not 0
+            # Force the socket type to SOCK_STREAM
+            addrinfo_list = socket.getaddrinfo(phost, pport, 0, socket.SOCK_STREAM, socket.SOL_TCP)
+            return addrinfo_list, True, pauth
+    except socket.gaierror as e:
+        raise WebSocketAddressException(e)
+
+
+def _open_socket(addrinfo_list, sockopt, timeout):
+    err = None
+    for addrinfo in addrinfo_list:
+        family, socktype, proto = addrinfo[:3]
+        sock = socket.socket(family, socktype, proto)
+        sock.settimeout(timeout)
+        for opts in DEFAULT_SOCKET_OPTION:
+            sock.setsockopt(*opts)
+        for opts in sockopt:
+            sock.setsockopt(*opts)
+
+        address = addrinfo[4]
+        try:
+            sock.connect(address)
+            err = None
+        except ProxyConnectionError as error:
+            err = WebSocketProxyException(str(error))
+            err.remote_ip = str(address[0])
+            continue
+        except socket.error as error:
+            error.remote_ip = str(address[0])
+            try:
+                eConnRefused = (errno.ECONNREFUSED, errno.WSAECONNREFUSED)
+            except:
+                eConnRefused = (errno.ECONNREFUSED, )
+            if error.errno in eConnRefused:
+                err = error
+                continue
+            else:
+                raise error
+        else:
+            break
+    else:
+        raise err
+
+    return sock
+
+
+def _can_use_sni():
+    return six.PY2 and sys.version_info >= (2, 7, 9) or sys.version_info >= (3, 2)
+
+
+def _wrap_sni_socket(sock, sslopt, hostname, check_hostname):
+    context = ssl.SSLContext(sslopt.get('ssl_version', ssl.PROTOCOL_SSLv23))
+
+    if sslopt.get('cert_reqs', ssl.CERT_NONE) != ssl.CERT_NONE:
+        cafile = sslopt.get('ca_certs', None)
+        capath = sslopt.get('ca_cert_path', None)
+        if cafile or capath:
+            context.load_verify_locations(cafile=cafile, capath=capath)
+        elif hasattr(context, 'load_default_certs'):
+            context.load_default_certs(ssl.Purpose.SERVER_AUTH)
+    if sslopt.get('certfile', None):
+        context.load_cert_chain(
+            sslopt['certfile'],
+            sslopt.get('keyfile', None),
+            sslopt.get('password', None),
+        )
+    # see
+    # https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531adb19716b52ca3ca#commitcomment-10803153
+    context.verify_mode = sslopt['cert_reqs']
+    if HAVE_CONTEXT_CHECK_HOSTNAME:
+        context.check_hostname = check_hostname
+    if 'ciphers' in sslopt:
+        context.set_ciphers(sslopt['ciphers'])
+    if 'cert_chain' in sslopt:
+        certfile, keyfile, password = sslopt['cert_chain']
+        context.load_cert_chain(certfile, keyfile, password)
+    if 'ecdh_curve' in sslopt:
+        context.set_ecdh_curve(sslopt['ecdh_curve'])
+
+    return context.wrap_socket(
+        sock,
+        do_handshake_on_connect=sslopt.get('do_handshake_on_connect', True),
+        suppress_ragged_eofs=sslopt.get('suppress_ragged_eofs', True),
+        server_hostname=hostname,
+    )
+
+
+def _ssl_socket(sock, user_sslopt, hostname):
+    sslopt = dict(cert_reqs=ssl.CERT_REQUIRED)
+    sslopt.update(user_sslopt)
+
+    certPath = os.environ.get('WEBSOCKET_CLIENT_CA_BUNDLE')
+    if certPath and os.path.isfile(certPath) \
+            and user_sslopt.get('ca_certs', None) is None \
+            and user_sslopt.get('ca_cert', None) is None:
+        sslopt['ca_certs'] = certPath
+    elif certPath and os.path.isdir(certPath) \
+            and user_sslopt.get('ca_cert_path', None) is None:
+        sslopt['ca_cert_path'] = certPath
+
+    check_hostname = sslopt["cert_reqs"] != ssl.CERT_NONE and sslopt.pop(
+        'check_hostname', True)
+
+    if _can_use_sni():
+        sock = _wrap_sni_socket(sock, sslopt, hostname, check_hostname)
+    else:
+        sslopt.pop('check_hostname', True)
+        sock = ssl.wrap_socket(sock, **sslopt)
+
+    if not HAVE_CONTEXT_CHECK_HOSTNAME and check_hostname:
+        match_hostname(sock.getpeercert(), hostname)
+
+    return sock
+
+
+def _tunnel(sock, host, port, auth):
+    debug("Connecting proxy...")
+    connect_header = "CONNECT %s:%d HTTP/1.0\r\n" % (host, port)
+    # TODO: support digest auth.
+    if auth and auth[0]:
+        auth_str = auth[0]
+        if auth[1]:
+            auth_str += ":" + auth[1]
+        encoded_str = base64encode(auth_str.encode()).strip().decode()
+        connect_header += "Proxy-Authorization: Basic %s\r\n" % encoded_str
+    connect_header += "\r\n"
+    dump("request header", connect_header)
+
+    send(sock, connect_header)
+
+    try:
+        status, resp_headers, status_message = read_headers(sock)
+    except Exception as e:
+        raise WebSocketProxyException(str(e))
+
+    if status != 200:
+        raise WebSocketProxyException(
+            "failed CONNECT via proxy status: %r" % status)
+
+    return sock
+
+
+def read_headers(sock):
+    status = None
+    status_message = None
+    headers = {}
+    trace("--- response header ---")
+
+    while True:
+        line = recv_line(sock)
+        line = line.decode('utf-8').strip()
+        if not line:
+            break
+        trace(line)
+        if not status:
+
+            status_info = line.split(" ", 2)
+            status = int(status_info[1])
+            if len(status_info) > 2:
+                status_message = status_info[2]
+        else:
+            kv = line.split(":", 1)
+            if len(kv) == 2:
+                key, value = kv
+                headers[key.lower()] = value.strip()
+            else:
+                raise WebSocketException("Invalid header")
+
+    trace("-----------------------")
+
+    return status, headers, status_message

+ 75 - 0
mt/websocket/_logging.py

@@ -0,0 +1,75 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+import logging
+
+_logger = logging.getLogger('websocket')
+_logger.addHandler(logging.NullHandler())
+_traceEnabled = False
+
+__all__ = ["enableTrace", "dump", "error", "warning", "debug", "trace",
+           "isEnabledForError", "isEnabledForDebug"]
+
+
+def enableTrace(traceable):
+    """
+    turn on/off the traceability.
+
+    traceable: boolean value. if set True, traceability is enabled.
+    """
+    global _traceEnabled
+    _traceEnabled = traceable
+    if traceable:
+        if not _logger.handlers:
+            _logger.addHandler(logging.StreamHandler())
+        _logger.setLevel(logging.DEBUG)
+
+
+def dump(title, message):
+    if _traceEnabled:
+        _logger.debug("--- " + title + " ---")
+        _logger.debug(message)
+        _logger.debug("-----------------------")
+
+
+def error(msg):
+    _logger.error(msg)
+
+
+def warning(msg):
+    _logger.warning(msg)
+
+
+def debug(msg):
+    _logger.debug(msg)
+
+
+def trace(msg):
+    if _traceEnabled:
+        _logger.debug(msg)
+
+
+def isEnabledForError():
+    return _logger.isEnabledFor(logging.ERROR)
+
+
+def isEnabledForDebug():
+    return _logger.isEnabledFor(logging.DEBUG)

+ 126 - 0
mt/websocket/_socket.py

@@ -0,0 +1,126 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA 02110-1335  USA
+
+"""
+import socket
+
+import six
+import sys
+
+from ._exceptions import *
+from ._ssl_compat import *
+from ._utils import *
+
+DEFAULT_SOCKET_OPTION = [(socket.SOL_TCP, socket.TCP_NODELAY, 1)]
+if hasattr(socket, "SO_KEEPALIVE"):
+    DEFAULT_SOCKET_OPTION.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1))
+if hasattr(socket, "TCP_KEEPIDLE"):
+    DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPIDLE, 30))
+if hasattr(socket, "TCP_KEEPINTVL"):
+    DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPINTVL, 10))
+if hasattr(socket, "TCP_KEEPCNT"):
+    DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPCNT, 3))
+
+_default_timeout = None
+
+__all__ = ["DEFAULT_SOCKET_OPTION", "sock_opt", "setdefaulttimeout", "getdefaulttimeout",
+           "recv", "recv_line", "send"]
+
+
+class sock_opt(object):
+
+    def __init__(self, sockopt, sslopt):
+        if sockopt is None:
+            sockopt = []
+        if sslopt is None:
+            sslopt = {}
+        self.sockopt = sockopt
+        self.sslopt = sslopt
+        self.timeout = None
+
+
+def setdefaulttimeout(timeout):
+    """
+    Set the global timeout setting to connect.
+
+    timeout: default socket timeout time. This value is second.
+    """
+    global _default_timeout
+    _default_timeout = timeout
+
+
+def getdefaulttimeout():
+    """
+    Return the global timeout setting(second) to connect.
+    """
+    return _default_timeout
+
+
+def recv(sock, bufsize):
+    if not sock:
+        raise WebSocketConnectionClosedException("socket is already closed.")
+
+    try:
+        bytes_ = sock.recv(bufsize)
+    except socket.timeout as e:
+        message = extract_err_message(e)
+        raise WebSocketTimeoutException(message)
+    except SSLError as e:
+        message = extract_err_message(e)
+        if isinstance(message, str) and 'timed out' in message:
+            raise WebSocketTimeoutException(message)
+        else:
+            raise
+
+    if not bytes_:
+        raise WebSocketConnectionClosedException(
+            "Connection is already closed.")
+
+    return bytes_
+
+
+def recv_line(sock):
+    line = []
+    while True:
+        c = recv(sock, 1)
+        line.append(c)
+        if c == six.b("\n"):
+            break
+    return six.b("").join(line)
+
+
+def send(sock, data):
+    if isinstance(data, six.text_type):
+        data = data.encode('utf-8')
+
+    if not sock:
+        raise WebSocketConnectionClosedException("socket is already closed.")
+
+    try:
+        return sock.send(data)
+    except socket.timeout as e:
+        message = extract_err_message(e)
+        raise WebSocketTimeoutException(message)
+    except Exception as e:
+        message = extract_err_message(e)
+        if isinstance(message, str) and "timed out" in message:
+            raise WebSocketTimeoutException(message)
+        else:
+            raise

+ 44 - 0
mt/websocket/_ssl_compat.py

@@ -0,0 +1,44 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA 02110-1335  USA
+
+"""
+__all__ = ["HAVE_SSL", "ssl", "SSLError"]
+
+try:
+    import ssl
+    from ssl import SSLError
+    if hasattr(ssl, 'SSLContext') and hasattr(ssl.SSLContext, 'check_hostname'):
+        HAVE_CONTEXT_CHECK_HOSTNAME = True
+    else:
+        HAVE_CONTEXT_CHECK_HOSTNAME = False
+        if hasattr(ssl, "match_hostname"):
+            from ssl import match_hostname
+        else:
+            from backports.ssl_match_hostname import match_hostname
+        __all__.append("match_hostname")
+    __all__.append("HAVE_CONTEXT_CHECK_HOSTNAME")
+
+    HAVE_SSL = True
+except ImportError:
+    # dummy class of SSLError for ssl none-support environment.
+    class SSLError(Exception):
+        pass
+
+    HAVE_SSL = False

+ 163 - 0
mt/websocket/_url.py

@@ -0,0 +1,163 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA  02110-1335  USA
+
+"""
+
+import os
+import socket
+import struct
+
+from six.moves.urllib.parse import urlparse
+
+
+__all__ = ["parse_url", "get_proxy_info"]
+
+
+def parse_url(url):
+    """
+    parse url and the result is tuple of
+    (hostname, port, resource path and the flag of secure mode)
+
+    url: url string.
+    """
+    if ":" not in url:
+        raise ValueError("url is invalid")
+
+    scheme, url = url.split(":", 1)
+
+    parsed = urlparse(url, scheme="ws")
+    if parsed.hostname:
+        hostname = parsed.hostname
+    else:
+        raise ValueError("hostname is invalid")
+    port = 0
+    if parsed.port:
+        port = parsed.port
+
+    is_secure = False
+    if scheme == "ws":
+        if not port:
+            port = 80
+    elif scheme == "wss":
+        is_secure = True
+        if not port:
+            port = 443
+    else:
+        raise ValueError("scheme %s is invalid" % scheme)
+
+    if parsed.path:
+        resource = parsed.path
+    else:
+        resource = "/"
+
+    if parsed.query:
+        resource += "?" + parsed.query
+
+    return hostname, port, resource, is_secure
+
+
+DEFAULT_NO_PROXY_HOST = ["localhost", "127.0.0.1"]
+
+
+def _is_ip_address(addr):
+    try:
+        socket.inet_aton(addr)
+    except socket.error:
+        return False
+    else:
+        return True
+
+
+def _is_subnet_address(hostname):
+    try:
+        addr, netmask = hostname.split("/")
+        return _is_ip_address(addr) and 0 <= int(netmask) < 32
+    except ValueError:
+        return False
+
+
+def _is_address_in_network(ip, net):
+    ipaddr = struct.unpack('I', socket.inet_aton(ip))[0]
+    netaddr, bits = net.split('/')
+    netmask = struct.unpack('I', socket.inet_aton(netaddr))[0] & ((2 << int(bits) - 1) - 1)
+    return ipaddr & netmask == netmask
+
+
+def _is_no_proxy_host(hostname, no_proxy):
+    if not no_proxy:
+        v = os.environ.get("no_proxy", "").replace(" ", "")
+        no_proxy = v.split(",")
+    if not no_proxy:
+        no_proxy = DEFAULT_NO_PROXY_HOST
+
+    if hostname in no_proxy:
+        return True
+    elif _is_ip_address(hostname):
+        return any([_is_address_in_network(hostname, subnet) for subnet in no_proxy if _is_subnet_address(subnet)])
+
+    return False
+
+
+def get_proxy_info(
+        hostname, is_secure, proxy_host=None, proxy_port=0, proxy_auth=None,
+        no_proxy=None, proxy_type='http'):
+    """
+    try to retrieve proxy host and port from environment
+    if not provided in options.
+    result is (proxy_host, proxy_port, proxy_auth).
+    proxy_auth is tuple of username and password
+     of proxy authentication information.
+
+    hostname: websocket server name.
+
+    is_secure:  is the connection secure? (wss)
+                looks for "https_proxy" in env
+                before falling back to "http_proxy"
+
+    options:    "http_proxy_host" - http proxy host name.
+                "http_proxy_port" - http proxy port.
+                "http_no_proxy"   - host names, which doesn't use proxy.
+                "http_proxy_auth" - http proxy auth information.
+                                    tuple of username and password.
+                                    default is None
+                "proxy_type"      - if set to "socks5" PySocks wrapper
+                                    will be used in place of a http proxy.
+                                    default is "http"
+    """
+    if _is_no_proxy_host(hostname, no_proxy):
+        return None, 0, None
+
+    if proxy_host:
+        port = proxy_port
+        auth = proxy_auth
+        return proxy_host, port, auth
+
+    env_keys = ["http_proxy"]
+    if is_secure:
+        env_keys.insert(0, "https_proxy")
+
+    for key in env_keys:
+        value = os.environ.get(key, None)
+        if value:
+            proxy = urlparse(value)
+            auth = (proxy.username, proxy.password) if proxy.username else None
+            return proxy.hostname, proxy.port, auth
+
+    return None, 0, None

+ 105 - 0
mt/websocket/_utils.py

@@ -0,0 +1,105 @@
+"""
+websocket - WebSocket client library for Python
+
+Copyright (C) 2010 Hiroki Ohtani(liris)
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor,
+    Boston, MA 02110-1335  USA
+
+"""
+import six
+
+__all__ = ["NoLock", "validate_utf8", "extract_err_message"]
+
+
+class NoLock(object):
+
+    def __enter__(self):
+        pass
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        pass
+
+try:
+    # If wsaccel is available we use compiled routines to validate UTF-8
+    # strings.
+    from wsaccel.utf8validator import Utf8Validator
+
+    def _validate_utf8(utfbytes):
+        return Utf8Validator().validate(utfbytes)[0]
+
+except ImportError:
+    # UTF-8 validator
+    # python implementation of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
+
+    _UTF8_ACCEPT = 0
+    _UTF8_REJECT = 12
+
+    _UTF8D = [
+        # The first part of the table maps bytes to character classes that
+        # to reduce the size of the transition table and create bitmasks.
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,  0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,  0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,  0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,  0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+        1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,  9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,
+        7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,  7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
+        8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,  2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+        10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8,
+
+        # The second part is a transition table that maps a combination
+        # of a state of the automaton and a character class to a state.
+        0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12,
+        12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12,
+        12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12,
+        12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12,
+        12,36,12,12,12,12,12,12,12,12,12,12, ]
+
+    def _decode(state, codep, ch):
+        tp = _UTF8D[ch]
+
+        codep = (ch & 0x3f) | (codep << 6) if (
+            state != _UTF8_ACCEPT) else (0xff >> tp) & ch
+        state = _UTF8D[256 + state + tp]
+
+        return state, codep
+
+    def _validate_utf8(utfbytes):
+        state = _UTF8_ACCEPT
+        codep = 0
+        for i in utfbytes:
+            if six.PY2:
+                i = ord(i)
+            state, codep = _decode(state, codep, i)
+            if state == _UTF8_REJECT:
+                return False
+
+        return True
+
+
+def validate_utf8(utfbytes):
+    """
+    validate utf8 byte string.
+    utfbytes: utf byte string to check.
+    return value: if valid utf8 string, return true. Otherwise, return false.
+    """
+    return _validate_utf8(utfbytes)
+
+
+def extract_err_message(exception):
+    if exception.args:
+        return exception.args[0]
+    else:
+        return None

+ 42 - 22
mt/ws.py

@@ -2,8 +2,17 @@
 Copyright 2011 by the AToMPM team and licensed under the LGPL
 See COPYING.lesser and README.md in the root of this project for full details'''
 
-import re, ___websocket as websocket, threading, json, httplib, logging
+import re, threading, json, logging
 
+import sys
+
+
+if sys.version_info[0] < 3:
+	import httplib as httplib
+	import websocket._app as websocket
+else:
+	import http.client as httplib
+	import websocket._app as websocket
 
 '''
 	a friendly wrapper around python-websockets that doubles as a socketio client
@@ -12,7 +21,6 @@ import re, ___websocket as websocket, threading, json, httplib, logging
 	_chlogh		a reference to an object that implements onchangelog(), this
   					method is called upon reception of changelogs from the asworker
 				  	we're subscribed to 
-	_dummy		true if this is a 'dummy' websocket... see note in main.py
 	subscribed  describes the current state of our subscription to our asworker
 						None:  don't know yet
 						True:  subscribed
@@ -32,14 +40,18 @@ class WebSocket :
 
 
 	def __init__(self,chlogh=None) :
-		assert chlogh == None or 'onchangelog' in dir(chlogh)		
+		assert chlogh == None or 'onchangelog' in dir(chlogh)
 		self._opened 	 = False
 		self._chlogh 	 = chlogh
-		self._dummy	 	 = (chlogh == None)
 		self.subscribed = None
 		self.connect()
 
-
+	def _start_ws(self, hskey):
+		self._ws = websocket.WebSocketApp(
+			'ws://127.0.0.1:8124/socket.io/1/websocket/' + hskey,
+			on_message = self._onmessage,
+			on_open = self._onopen)
+		self._ws.run_forever()
 
 	'''
 		connect to the socketio server
@@ -50,14 +62,22 @@ class WebSocket :
 	def connect(self) :
 		conn  = httplib.HTTPConnection('127.0.0.1:8124')
 		conn.request('POST','/socket.io/1/')
-		resp  = conn.getresponse() 
+		resp  = conn.getresponse()
 
 		if resp.status == 200 :
-			hskey = resp.read().split(':')[0]
-			self._ws = websocket.WebSocket(
-						'ws://127.0.0.1:8124/socket.io/1/websocket/'+hskey,
-						onopen	 = self._onopen,
-						onmessage = self._onmessage)
+			resp = resp.read()
+
+			try: #handle bytes
+				resp = resp.decode()
+			except AttributeError:
+				pass
+
+			hskey = resp.split(':')[0]
+
+			# start the websocket on a different thread as it loops forever
+			thr = threading.Thread(target = self._start_ws, args = (hskey, ))
+			thr.start()
+
 		else :
 			raise Exception('websocket initialization failed :: '+str(resp.reason))
 
@@ -65,16 +85,16 @@ class WebSocket :
 
 	'''
 		close the socket '''
-	def close(self) :
+	def close(self, ws) :
 		self._ws.close()
 
 
 
 	''' 
 		parse and handle incoming message '''
-	def _onmessage(self,msg) : 
-		if not self._dummy :
-			logging.debug('## msg recvd '+msg)
+	def _onmessage(self,ws, msg) :
+
+		logging.debug('## msg recvd '+msg)
 
 		msgType = msg[0]
 		if msgType == WebSocket.CONNECT :
@@ -96,18 +116,18 @@ class WebSocket :
 				#on POST /changeListener response
 				if msg['statusCode'] == 201 :
 					self.subscribed = True
-				else : 
-					self.subscribed = False				
-			elif self._chlogh and self.subscribed : 
+				else :
+					self.subscribed = False
+			elif self._chlogh and self.subscribed :
 				self._chlogh.onchangelog(msg['data'])
 		else :
 			pass
-	
+
 
 
 	''' 
 		mark socket connection as opened '''
-	def _onopen(self) :
+	def _onopen(self, ws) :
 		self._opened = True
 
 
@@ -118,6 +138,6 @@ class WebSocket :
 		if not self._opened :
 			t = threading.Timer(0.25,self.subscribe,[aswid])
 			t.start()
-		else : 
+		else :
 			self._ws.send(
-					'4:::{"method":"POST","url":"/changeListener?wid='+aswid+'"}')
+				'4:::{"method":"POST","url":"/changeListener?wid='+aswid+'"}')