ptcal.py 64 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538
  1. '''*****************************************************************************
  2. AToMPM - A Tool for Multi-Paradigm Modelling
  3. Copyright (c) 2011 Raphael Mannadiar (raphael.mannadiar@mail.mcgill.ca)
  4. This file is part of AToMPM.
  5. AToMPM is free software: you can redistribute it and/or modify it under the
  6. terms of the GNU Lesser General Public License as published by the Free Software
  7. Foundation, either version 3 of the License, or (at your option) any later
  8. version.
  9. AToMPM is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
  11. PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public License along
  13. with AToMPM. If not, see <http://www.gnu.org/licenses/>.
  14. *****************************************************************************'''
  15. import re, json, uuid, cPickle as pickle, threading, itertools, traceback, logging
  16. from random import Random
  17. from utils import Utilities as utils
  18. from tcontext import ModelTransformationContext, ExhaustContext
  19. from tconstants import TConstants as TC
  20. from compiler import ModelAndRuleCompiler
  21. from pytcore.core.himesis import HConstants as HC
  22. from pytcore.rules.ndarule import NDARule
  23. from pytcore.tcore.messages import Packet
  24. import cProfile, pstats, StringIO
  25. ''' hergin :: motif-integration start '''
  26. from motifcontext import MotifContext
  27. from tcorecontext import TCoreContext
  28. from pytcore.tcore.messages import Pivots
  29. ''' hergin :: motif-integration end '''
  30. import igraph as ig
  31. #import pydot
  32. import datetime
  33. from random import *
  34. from threading import *
  35. from Queue import *
  36. from barrier import *
  37. from synchgraph import *
  38. from itertools import *
  39. #from petrinet import *
  40. from _abcoll import Iterable
  41. from time import *
  42. import timeit
  43. from pprint import isreadable
  44. from math import *
  45. import os
  46. '''
  47. py-t-core abstraction layer
  48. _lock used to synchronize access to self._changelogs
  49. _changelogs used to hold asworker changelogs
  50. _M the himesis graph we're transforming
  51. _mtContexts the current transformation contexts
  52. _transfData a mapping between transformation filenames and
  53. their data
  54. _userTransfs stores the transformations loaded by the user
  55. (needed to return to the pre-run state after
  56. running)
  57. _execmode the current execution mode
  58. _debugOn indicates whether or not debugging is enabled
  59. _debugProgrammedBreak a blocking flag used to freeze the execution of a
  60. transformation until the user explicitly resumes,
  61. at which point we continue precisely where we left
  62. off
  63. _mtContexts2debugClients used to map ModelTransformationContexts to their
  64. associated atompm debugging window, if any
  65. _aswCommTools bundle of properties and functions that enable and
  66. facilitate sending requests to our parent
  67. mtworker's asworker
  68. _aswNextSequenceNumber used to determine if a changelog is received out
  69. of order, and if a pending changelog is now ready
  70. to be handled
  71. username the user's username
  72. defaultDCL the user's preferred designer code language '''
  73. class PyTCoreAbstractionLayer :
  74. def __init__(self,aswCommTools,mtwid) :
  75. self._lock = threading.Condition()
  76. self._changelogs = []
  77. self._M = None
  78. ''' hergin :: motif-integration start '''
  79. self.packet = None
  80. self.globalDeltas = []
  81. self.incUpdates = True
  82. self.sendDeltas = True
  83. ''' hergin :: motif-integration end '''
  84. self._mtContexts = []
  85. self._transfData = {}
  86. self._userTransfs = []
  87. self._execmode = 'STOPPED'
  88. self._debugOn = False
  89. self._debugProgrammedBreak = threading.Event()
  90. self._debugProgrammedBreak.set()
  91. self._mtContexts2debugClients = {}
  92. self._aswCommTools = aswCommTools
  93. self._aswNextSequenceNumber = None
  94. self.username = None
  95. self.defaultDCL = TC.PYTHON
  96. self._mtwid = mtwid
  97. ''' Used only in COMP 522 and comp 621
  98. Petri Net Modules, docomposition of disconnected
  99. _M graph '''
  100. self.modules = {}
  101. ''' Synchronization graph for modular state spaces '''
  102. self.SG = None
  103. self.modStart = None
  104. self.modEnd = None
  105. self.flatStart = None
  106. self.flatEnd = None
  107. ''' State spaces for individual modules '''
  108. self.modIDtoPOS= {}
  109. self.SSs = {}
  110. #Enactment vars
  111. self.bdapiQueue = Queue()
  112. #Enactment, If this is true the OpenModel will append model to the canvas, else reload.
  113. self.loadedModel=False
  114. #Enactment, set of formalisms we load automatically to do the cleanups.
  115. self._loadedMMs = set()
  116. def processQuery(self,query):
  117. result = ""
  118. query = query['query'].replace('"',"'")
  119. qValue = query.split('(')[0].strip()
  120. if qValue == "getCount":
  121. typ = query.split("'")[1].strip()
  122. def f(e):
  123. return e['$ft__'].endswith(typ)
  124. result = "Number of '" + typ +"' in the resulting graph: " + str(len(filter(f,self.packet.graph.vs)))
  125. # I tried to use self.packet.graph.vs.select(tokens_eq=3) but the attribute names starting with $ didnt let me
  126. elif qValue == "toggleSendDelta":
  127. self.sendDeltas = not self.sendDeltas
  128. if self.sendDeltas:
  129. result = "Deltas will be sent at the end!"
  130. else:
  131. result = "Deltas won't be sent at the end!"
  132. elif qValue == "toggleIncUpdate":
  133. self.incUpdates = not self.incUpdates
  134. if self.incUpdates:
  135. result = "Incremental updates are on!"
  136. else:
  137. result = "Incremental updates are off!"
  138. self._aswPrintReq(result)
  139. ''' Use this to put the BDAPI response from client'''
  140. def _queueBDAPI(self,resp):
  141. self.bdapiQueue.put(resp)
  142. ''' send a PUT /GET/console request to the asworker '''
  143. def _aswPrintReq(self,msg) :
  144. return self._aswCommTools['httpReq']('PUT','/GET/console',{'text':msg})
  145. '''
  146. do 'callback()' once we've received the feedback for the last step that
  147. ran, if any, checking for the said feedback and handling any newly
  148. received feedback every TC.WAIT_ON_CHLOG_DELAY seconds '''
  149. def _doWhenLastStepFeedbackReceived(self,callback) :
  150. if self.incUpdates:
  151. def condition() :
  152. self._handleChangelogs()
  153. return self._mtContexts[-1].isLastStepFeedbackReceived()
  154. utils.doWhen(
  155. condition,
  156. TC.WAIT_ON_CHLOG_DELAY,
  157. callback)
  158. else:
  159. callback()
  160. '''
  161. synchronously verify if any changelogs have been received, and if we are
  162. ready to handle them, remove them from self._changelogs and handle them
  163. 1. sort changelogs by ascending sequence#
  164. 2. do nothing if no self._changelogs is empty, if no model has been loaded
  165. yet (i.e., self._aswNextSequenceNumber == None), or if the oldest
  166. changelog is still too new to be handled
  167. 3. crash if we encouter an invalid sequence#
  168. 4. otherwise (we're ready to handle the oldest changelog), handle the
  169. oldest changelog, increment self._aswNextSequenceNumber and recurse
  170. back to step 1
  171. TBI: performance would benefit greatly from caches that map atompm ids to
  172. GUIDs '''
  173. def _handleChangelogs(self) :
  174. '''
  175. handle a single changelog '''
  176. def _handleChangelog(changelog) :
  177. def eq(a,b) : return str(a) == str(b)
  178. for c in changelog :
  179. if c['op'] == 'MKEDGE' :
  180. node1 = \
  181. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id1']))[0]
  182. node2 = \
  183. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id2']))[0]
  184. self._M.add_edges((node1.index, node2.index))
  185. elif c['op'] == 'RMEDGE' :
  186. pass
  187. elif c['op'] == 'MKNODE' :
  188. self._compiler.addNode(self._M, json.loads(c['node']), c['id'])
  189. elif c['op'] == 'RMNODE' :
  190. node = \
  191. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id']))[0]
  192. self._M.delete_nodes([node.index])
  193. elif c['op'] == 'CHATTR' :
  194. node = \
  195. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id']))[0]
  196. self._M.vs[node.index][c['attr']] = c['new_val']
  197. elif c['op'] == 'LOADMM' :
  198. self._compiler.parseMetamodel(
  199. c['name'],
  200. utils.fread(
  201. '/users/%s/%s.metamodel'%(self.username,c['name'])),
  202. loadMM=True)
  203. elif c['op'] == 'DUMPMM' :
  204. self._compiler.unloadMetamodel(c['name'])
  205. elif c['op'] == 'RESETM' :
  206. self._M = self._compiler.compileModel(c['new_model'])
  207. self._M.mmTypeData = self._compiler.getMMTypeData()
  208. elif c['op'] == 'SYSOUT' :
  209. ''' hergin :: motif-integration :: modify :: added startsWith functions '''
  210. if c['text'].startswith(TC.RULE_SUCCESS_MSG) or \
  211. c['text'].startswith(TC.RULE_NOT_APPLICABLE_MSG) or \
  212. c['text'].startswith(TC.RULE_FAILURE_MSG) or \
  213. c['text'].startswith(TC.TRANSFORMATION_DONE) or \
  214. c['text'].startswith(TC.REMOTE_APPLICATION_FAILURE) :
  215. self._mtContexts[-1].setLastStepFeedbackReceived()
  216. self._lock.acquire()
  217. self._changelogs.sort(key=lambda c : utils.sn2int(c['sequence#']))
  218. if len(self._changelogs) == 0 or \
  219. self._aswNextSequenceNumber == None or \
  220. utils.sn2int(self._changelogs[0]['sequence#']) > \
  221. utils.sn2int(self._aswNextSequenceNumber) :
  222. self._lock.release()
  223. else :
  224. sn = self._changelogs[0]['sequence#']
  225. if utils.sn2int(sn) < utils.sn2int(self._aswNextSequenceNumber) :
  226. raise ValueError('invalid changelog sequence# :: '+sn)
  227. else :
  228. logging.debug('++ ('+sn+') '+str(self._changelogs[0]['changelog']))
  229. _handleChangelog(self._changelogs.pop(0)['changelog'])
  230. self._aswNextSequenceNumber = \
  231. utils.incrementSequenceNumber(self._aswNextSequenceNumber)
  232. self._lock.release()
  233. self._handleChangelogs()
  234. '''
  235. load a model (and its metamodels)
  236. 1. compile the provided model into a himesis graph and save it to self._M
  237. 2. synchronize self._M's mmTypeData with that of self._compiler's so that
  238. it gets updated as new metamodels are loaded
  239. 3. initialize self._aswNextSequenceNumber based on the sequence# 'sn' of
  240. the provided model, and forget any already received out-of-date
  241. changelogs
  242. NOTE: this function should only get called once (when the asworker
  243. initially sets up this mtworker) '''
  244. def loadModel(self,m,mms,sn) :
  245. assert self._M == None, 'ptcal.loadModel() should only be called once'
  246. self._compiler = ModelAndRuleCompiler(
  247. self.username,
  248. self._aswCommTools['wid'],
  249. self.defaultDCL,
  250. self._mtwid)
  251. self._M = self._compiler.compileModel(m,mmsData=mms)
  252. self._M.mmTypeData = self._compiler.getMMTypeData()
  253. ''' hergin :: motif-integration start '''
  254. self.packet = Packet(self._M)
  255. ''' hergin :: motif-integration end '''
  256. self._aswNextSequenceNumber = utils.incrementSequenceNumber(sn)
  257. self._lock.acquire()
  258. self._changelogs = \
  259. filter(lambda c : utils.sn2int(sn) < utils.sn2int(c['sequence#']),
  260. self._changelogs)
  261. self._lock.release()
  262. '''
  263. load a PN model (and its metamodels)
  264. 1. compile the provided model into a himesis graph and save it to self._M
  265. 2. synchronize self._M's mmTypeData with that of self._compiler's so that
  266. it gets updated as new metamodels are loaded
  267. 3. initialize self._aswNextSequenceNumber based on the sequence# 'sn' of
  268. the provided model, and forget any already received out-of-date
  269. changelogs
  270. NOTE: this function should only get called once (when the asworker
  271. initially sets up this mtworker) '''
  272. def loadModelPN(self,m,mms,sn) :
  273. assert self._M == None, 'ptcal.loadModel() should only be called once'
  274. self._compiler = ModelAndRuleCompiler(self.username)
  275. self._M = self._compiler.compileModelPN(m,mmsData=mms)
  276. disjoint = self._M.decompose(mode=ig.WEAK)
  277. #create dictionary of modules
  278. for mod in disjoint:
  279. self.modules [ uuid.uuid4()] = mod
  280. self._M.mmTypeData = self._compiler.getMMTypeData()
  281. self._aswNextSequenceNumber = utils.incrementSequenceNumber(sn)
  282. self._lock.acquire()
  283. self._changelogs = \
  284. filter(lambda c : utils.sn2int(sn) < utils.sn2int(c['sequence#']),
  285. self._changelogs)
  286. self._lock.release()
  287. ''' setup internal state to reflect given runtime configuration '''
  288. def _loadRuntimeConfiguration(self,rc) :
  289. if 'looseSubtypingMM' in rc :
  290. self._compiler.RC__looseSubtypingMM = rc['looseSubtypingMM']
  291. '''
  292. read in some json that describes a model transformation from file 'fname',
  293. store in self._transfData, and push a new ModelTransformationContext onto
  294. self._mtContexts '''
  295. def _loadTransform(self,fname) :
  296. if fname not in self._transfData :
  297. self._transfData[fname] = \
  298. utils.fread('/users/%s/%s'%(self.username,fname))
  299. ''' hergin :: motif-integration start '''
  300. if TC.MOTIFMM in self._transfData[fname]['metamodels']:
  301. self._mtContexts.append(MotifContext(fname,self))
  302. elif TC.TCOREMM in self._transfData[fname]['metamodels']:
  303. self._mtContexts.append(TCoreContext(fname,self))
  304. elif TC.TRANSFMM in self._transfData[fname]['metamodels']:
  305. self._mtContexts.append(ModelTransformationContext(self._transfData[fname],fname))
  306. ''' hergin :: motif-integration end '''
  307. '''
  308. load a set of user-specified transformations and forget anything
  309. previously loaded
  310. 1. forget previously loaded transformations and compiled rules
  311. 2. reset 'session' area
  312. 3. load transformations
  313. 4. remember loaded transformations '''
  314. def loadTransforms(self,fnames) :
  315. self._transfData = {}
  316. self._mtContexts = []
  317. self._compiler.forgetCompiledRules()
  318. self._M.session = {}
  319. for fname in fnames :
  320. self._loadTransform(fname)
  321. self._userTransfs = fnames
  322. '''
  323. returns the filename and compiled form of the next rule to run
  324. 1. fetch the current transformation context
  325. 2. retrieve and load runtime configuration options, if any
  326. 3. if transformatio debugging is enabled and we're entering a
  327. ModelTransformationContext for the first time, we
  328. a) remember self._execmode and set self._execmode to PAUSE
  329. b) notify the user of the impending "programmed debugging pause"
  330. c) request a new atompm instance loaded with the transformation model
  331. corresponding to the current transformation context
  332. d) unset the _debugProgrammedBreak flag and make a blocking call that
  333. waits for another thread to reset it (via
  334. self._startDebugProgrammedBreak())... this may occur in a number of
  335. cases if when this occurs
  336. i. user presses stop --> self._execmode == 'STOPPING' :
  337. _nextRule() returns an error which in turn triggers the
  338. stopping of the current transformation
  339. ii. user disables debugging --> self._execmode == 'PAUSE' :
  340. self._execmode is restored to its previous value and execution
  341. continues as planned (i.e., _nextRule() returns with the next
  342. rule to run)
  343. iii. user presses play/step --> self._execmode == 'PLAY/STEP' :
  344. same behaviour as ii.
  345. 4. call its nextStep() function to get the next transformation step
  346. a) if the step is not an object (i.e., is an application code)
  347. i. if there is more than 1 loaded transformation context,
  348. j. pop the newly completed transformation context
  349. jj. if the next context (which may be a "parent" transformation
  350. or the next of multiple loaded transformations) is a parent,
  351. update its last step application info
  352. jjj. make a recursive call to get the next rule to run from
  353. within the "next" context
  354. ii. otherwise, simply return the said application code
  355. b) if the step is an error, return it
  356. c) otherwise, determine the type of the step (via the node's $type
  357. attribute or by inspecting the 'metamodels' array in the provided
  358. .model file)
  359. i. Rules get stored in 'nr' to be later returned
  360. ii. Transformations and Exhausts[Randoms] cause the pushing of a
  361. new transformation context onto self._mtContexts, and of a
  362. recursive call to get the next rule to run from within that
  363. new context
  364. 5. return the rule name and compiled form
  365. NOTE:: in step 4c), while in debug mode, we highlight transformations,
  366. exhausts and rules before recursing on them or returning them,
  367. respectively '''
  368. def _nextRule(self) :
  369. mtc = self._mtContexts[-1]
  370. self._loadRuntimeConfiguration(mtc.getRuntimeConfiguration())
  371. if self._debugOn and not mtc.isTransformationUnderWay() and \
  372. (type(mtc) == MotifContext or type(mtc) == TCoreContext) : # hergin :: motif-integration modify
  373. _execmode = self._execmode
  374. self._execmode = 'PAUSE'
  375. self._aswPrintReq(TC.DEBUGGING_HALT)
  376. self._requestClientDebugWindow(mtc.fname)
  377. self._startDebugProgrammedBreak()
  378. if self._execmode == 'STOPPING' :
  379. return {'$err':'transformation stopped during debugging pause'}
  380. elif self._execmode == 'PAUSE' or \
  381. self._execmode == 'PLAY' or \
  382. self._execmode == 'STEP' :
  383. self._execmode = _execmode
  384. self.bdapiQueue = Queue()
  385. ns = mtc.nextStep()
  386. if ns.__class__ != {}.__class__ :
  387. if len(self._mtContexts) > 1 :
  388. self._mtContexts = self._mtContexts[:-1]
  389. if self._mtContexts[-1].isTransformationUnderWay() :
  390. self._mtContexts[-1].setLastStepApplicationInfo(ns)
  391. return self._nextRule()
  392. else :
  393. return ns
  394. elif '$err' in ns :
  395. return ns['$err']
  396. else :
  397. def highlightUpcomingStep() :
  398. for _mtc in reversed(self._mtContexts) :
  399. if id(_mtc) in self._mtContexts2debugClients :
  400. debugClient = self._mtContexts2debugClients[id(_mtc)]
  401. self._requestNodeHighlight(
  402. debugClient['host'],
  403. debugClient['aswid'],
  404. _mtc.getCurrentStepId())
  405. break
  406. if 'id' in ns :
  407. fulltype = mtc.t['nodes'][ns['id']]['$type']
  408. ''' hergin :: motif-integration start '''
  409. if fulltype == mtc.metamodel+"/CRule":
  410. if self._debugOn :
  411. highlightUpcomingStep()
  412. self._loadTransform(ns['rule'])
  413. return self._nextRule()
  414. elif fulltype.startswith(TC.TCOREMM) or\
  415. fulltype.startswith(TC.MOTIFMM):
  416. if self._debugOn :
  417. highlightUpcomingStep()
  418. return ns
  419. ''' hergin :: motif-integration end '''
  420. elif fulltype == TC.TRANSFMM+'/Rule' :
  421. if self._debugOn :
  422. highlightUpcomingStep()
  423. return {'fname':ns['fname'],
  424. 'cr':self._compiler.compileRule(None,ns['fname'])}
  425. #Enactment OpenModel blob, pathToFormalism is present is MM, but not used here,
  426. #for functionality of opening window with formalisms is in WriteModel.
  427. #pathToFormalism should be removed from MM for OpenModel (was not removed due to
  428. #language evolution).
  429. elif fulltype == TC.TRANSFMM+'/OpenModel' :
  430. if self._debugOn :
  431. highlightUpcomingStep()
  432. fname = mtc.t['nodes'][ns['id']]['pathToModel']['value']
  433. #formalism = mtc.t['nodes'][ns['id']]['pathToFormalism']['value']
  434. formalism = ""
  435. return {'fname':fname,'formalism':formalism,'rtype':'OpenModel'}
  436. #Enactment WriteModel blob
  437. elif fulltype == TC.TRANSFMM+'/WriteModel' :
  438. if self._debugOn :
  439. highlightUpcomingStep()
  440. fname = mtc.t['nodes'][ns['id']]['pathToModel']['value']
  441. formalism = mtc.t['nodes'][ns['id']]['pathToFormalism']['value']
  442. return {'fname':fname,'formalism':formalism,'rtype':'WriteModel'}
  443. elif fulltype == TC.TRANSFMM+'/Transformation' :
  444. if self._debugOn :
  445. highlightUpcomingStep()
  446. self._loadTransform(ns['fname'])
  447. return self._nextRule()
  448. elif fulltype == TC.TRANSFMM+'/Exhaust' :
  449. self._mtContexts.append( ExhaustContext(mtc.t,ns['id']) )
  450. return self._nextRule()
  451. elif fulltype == TC.TRANSFMM+'/ExhaustRandom' :
  452. self._mtContexts.append( ExhaustContext(mtc.t,ns['id'],self._randomGen) )
  453. return self._nextRule()
  454. else :
  455. ''' hergin :: motif-integration start '''
  456. if 'trafoResult' in ns:
  457. return ns;
  458. ''' hergin :: motif-integration end '''
  459. contents = utils.fread('/users/%s/%s'%(self.username,ns['fname']))
  460. if self._debugOn :
  461. highlightUpcomingStep()
  462. if TC.RULEMM in contents['metamodels'] :
  463. return {'fname':ns['fname'],
  464. 'cr':self._compiler.compileRule(contents,ns['fname'])}
  465. elif TC.TRANSFMM in contents['metamodels'] :
  466. self._transfData[ns['fname']] = contents
  467. self._loadTransform(ns['fname'])
  468. return self._nextRule()
  469. raise ValueError(\
  470. 'file does not contain valid rule or transformation '+\
  471. 'model :: '+ns['fname'])
  472. ''' Enactment do OpenModel magic
  473. '''
  474. def runOpenModelRule(self, fname="",formalism=""):
  475. unload = ""
  476. if not fname:
  477. return (None,TC.FAILED)
  478. else:
  479. if not formalism:
  480. self._aswPrintReq('auto loading model :: '+fname)
  481. try:
  482. with open(os.getcwd()+'/users/'+self.username+fname) as f:
  483. pass
  484. except IOError as e:
  485. self._aswPrintReq('failed opening a file :: '+fname)
  486. return (None,TC.FAILED)
  487. if not self.loadedModel:
  488. method = '_loadModelForTransform'
  489. if len(self._loadedMMs) == 0:
  490. self._loadedMMs = self._compiler._loadedMMs.copy()
  491. diff = self._compiler._loadedMMs.difference(self._loadedMMs)
  492. for u in diff:
  493. unload += u+'.defaultIcons.metamodel,'
  494. else:
  495. method = '_appendModelForTransform'
  496. resp = self._aswCommTools['httpReq'](
  497. 'PUT',
  498. '/GET/console',
  499. {'text':'CLIENT_BDAPI :: '+
  500. '{"func":"'+method+'",'+
  501. ' "args":'+
  502. '{"fname":"'+fname+'",'+
  503. '"unload":"'+unload+'",'+
  504. ' "callback-url":"/__mt/bdapiresp?wid='+
  505. self._aswCommTools['wid']+'"}}'})
  506. resp = self.bdapiQueue.get(block=True,timeout=5000)
  507. if not resp['resp'] == 'ok':
  508. return (None,TC.FAILED)
  509. else:
  510. if not self.loadedModel:
  511. self.loadedModel = True
  512. return (None,TC.SUCCEEDED)
  513. #Was used to open new window with loaded formalisms and pause the transform, now
  514. #this functionality is in WriteMOdel.
  515. else:
  516. pass
  517. # Keep for now....
  518. # self._aswPrintReq('pausing transform')
  519. # self._execmode = 'PAUSE'
  520. # self._aswPrintReq('opening new window for manual step:: '+fname)
  521. # try:
  522. # with open(os.getcwd()+'/users/'+self.username+fname) as f:
  523. # exists = 'true'
  524. # except IOError as e:
  525. # exists = 'false'
  526. # resp = self._aswCommTools['httpReq'](
  527. # 'PUT',
  528. # '/GET/console',
  529. # {'text':'CLIENT_BDAPI :: '+
  530. # '{"func":"_createEmptyModelInNewWindow",'+
  531. # ' "args":'+
  532. # '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
  533. # ' "callback-url":"/__mt/bdapiresp?wid='+
  534. # self._aswCommTools['wid']+'"}}'})
  535. # self.loadedModel = False
  536. # return (None,TC.SUCCEEDED)
  537. ''' Enactment do WriteModel magic
  538. '''
  539. def runWriteModelRule(self,fname="",formalism=""):
  540. if not fname:
  541. #this makes next openmodel call a load model instead of append.
  542. #basically use this trick to clean up canvas and have new model:
  543. #first place WriteModel blob without fname,
  544. #followed by OpenModel.
  545. self.loadedModel = False
  546. return (None,TC.SUCCEEDED)
  547. else:
  548. #No formalism specified, save model
  549. if not formalism:
  550. self._aswPrintReq('auto saving model :: '+fname)
  551. resp = self._aswCommTools['httpReq'](
  552. 'PUT',
  553. '/GET/console',
  554. {'text':'CLIENT_BDAPI :: '+
  555. '{"func":"_writeModelAfterTransform",'+
  556. ' "args":'+
  557. '{"fname":"'+fname+'",'+
  558. ' "callback-url":"/__mt/bdapiresp?wid='+
  559. self._aswCommTools['wid']+'"}}'})
  560. #Need to wait for the model to load.
  561. resp = self.bdapiQueue.get(block=True,timeout=5000)
  562. if resp['resp'] == 'ok':
  563. self.loadedModel = False
  564. return (None,TC.SUCCEEDED)
  565. else:
  566. (None,TC.FAILED)
  567. #Formalism specified, open new window, with loaded formalism and/or model.
  568. else:
  569. self._aswPrintReq('pausing transform')
  570. self._execmode = 'PAUSE'
  571. self._aswPrintReq('opening new window for manual step:: '+fname)
  572. try:
  573. with open(os.getcwd()+'/users/'+self.username+fname) as f:
  574. #open existing model
  575. exists = 'true'
  576. except IOError as e:
  577. #or save model with the fname provided
  578. exists = 'false'
  579. resp = self._aswCommTools['httpReq'](
  580. 'PUT',
  581. '/GET/console',
  582. {'text':'CLIENT_BDAPI :: '+
  583. '{"func":"_createEmptyModelInNewWindow",'+
  584. ' "args":'+
  585. '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
  586. ' "callback-url":"/__mt/bdapiresp?wid='+
  587. self._aswCommTools['wid']+'"}}'})
  588. self.loadedModel = False
  589. return (None,TC.SUCCEEDED)
  590. '''
  591. synchronously save 1 changelog into self._changelogs '''
  592. def onchangelog(self,c) :
  593. self._lock.acquire()
  594. self._changelogs.append(c)
  595. self._lock.release()
  596. '''
  597. causes the execution of the current transformation(s) to pause (by
  598. preventing _play()'s next call to _step(), if any) '''
  599. def pause(self) :
  600. self._execmode = 'PAUSE'
  601. if not self.incUpdates:
  602. req = self.buildEditHttpReq(self.globalDeltas)
  603. self.globalDeltas = []
  604. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  605. if not utils.isHttpSuccessCode(resp['statusCode']) :
  606. self.stop()
  607. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  608. return
  609. self._handleChangelogs()
  610. '''
  611. play()
  612. calls _play() if it isn't already running (i.e., if we're already in PLAY
  613. mode, a timed call to _play() has already been placed) and if there isn't
  614. already another thread currently paused on _debugProgrammedBreak... if
  615. there is such a thread, it is unpaused (via _stopDebugProgrammedBreak())
  616. and the current thread terminates immediately
  617. _play()
  618. schedules an action for when feedback from the last step is received...
  619. the action is
  620. 1. return if we're no longer in PLAY mode
  621. 2. take one _step()
  622. 3. schedule a recursive call to _play() in TC.INTER_RULE_DELAY
  623. seconds '''
  624. def play(self) :
  625. if self._execmode == 'STOPPED':
  626. self._randomGen = Random(0)
  627. if self._execmode != 'PLAY' :
  628. self._execmode = 'PLAY'
  629. if not self._stopDebugProgrammedBreak() :
  630. self._play()
  631. def _play(self) :
  632. if self.incUpdates:
  633. self._doWhenLastStepFeedbackReceived(
  634. lambda : self._execmode == 'PLAY' and \
  635. self._step() and \
  636. utils.setTimeout(TC.INTER_RULE_DELAY,self._play))
  637. else:
  638. self._doWhenLastStepFeedbackReceived(
  639. lambda : self._execmode == 'PLAY' and \
  640. self._step() and \
  641. self._play())
  642. '''
  643. associate the newly created debugging window described by 'clientInfo'
  644. to the ModelTransformationContext that requested its creation '''
  645. def registerDebugClient(self,clientInfo) :
  646. clientInfo = json.loads(clientInfo)
  647. for mtc in reversed(self._mtContexts) :
  648. if hasattr(mtc,'fname') and mtc.fname == clientInfo['fname'] :
  649. self._mtContexts2debugClients[id(mtc)] = clientInfo
  650. '''
  651. request a new atompm client via the client backdoor API... the new client
  652. will be loaded with the specified model and sufficient information to
  653. identify and communicate with the client will be POSTed to the callback
  654. url '''
  655. def _requestClientDebugWindow(self,fname) :
  656. return self._aswCommTools['httpReq'](
  657. 'PUT',
  658. '/GET/console',
  659. {'text':'CLIENT_BDAPI :: '+
  660. '{"func":"_loadModelInNewWindow",'+
  661. ' "args":'+
  662. '{"fname":"'+fname+'",'+
  663. ' "callback-url":"/__mt/debugClient?wid='+
  664. self._aswCommTools['wid']+'"}}'})
  665. '''
  666. request that the specified node from the specified atompm instance be
  667. highlighted '''
  668. def _requestNodeHighlight(self,host,aswid,asid,timeout=5000) :
  669. return utils.httpReq(
  670. 'PUT',
  671. host,
  672. '/GET/console?wid='+aswid,
  673. {'text':'CLIENT_BDAPI :: '+
  674. '{"func":"_highlight",'+
  675. ' "args":'+
  676. '{"asid":"'+asid+'",'+
  677. ' "timeout":'+str(timeout)+'}}'})
  678. ''' hergin :: motif-integration :: START :: put this to outside of step function '''
  679. ''' also added self '''
  680. '''
  681. go through a rule's deltas and (1) produce a batchEdit request, and
  682. (2) undo them
  683. NOTE: we undo the deltas for 2 reasons
  684. 1. so that changes become driven by asworker changelogs (like in
  685. csworkers)
  686. 2. so that we don't need to figure out which entries in received
  687. changelogs correspond to user-operations and which ones
  688. correspond to the effects of the constructed batchEdit
  689. NOTE: since we sometimes need to use the result from one request as the
  690. parameter of another (i.e., create a node, update *it*), we use
  691. the 'mknodes' map to remember which requests created which new
  692. nodes... this also allows to know which nodes already exist and
  693. which ones were created by the last rule
  694. NOTE: because creation of connector nodes and their linking to their
  695. ends is described in non-contiguous deltas, we use the 'mknodes'
  696. map to remember incomplete connector creation requests until the
  697. appropriate MKEDGE deltas are encountered '''
  698. def buildEditHttpReq(self,deltas) :
  699. reqs = []
  700. mknodes = {}
  701. neighborhood = None
  702. '''
  703. construct an atompmId given a node... the result will be
  704. a) a 'macro' to be replaced by the result of an earlier request
  705. within the batchEdit, if the node was created by the last rule
  706. b) the atompmId stored within the node, if the node already has
  707. a counter-part in atompm '''
  708. def atompmInstanceId(node) :
  709. if node[HC.GUID] in mknodes :
  710. return '$'+str(mknodes[node[HC.GUID]])+'$'
  711. else :
  712. return node['$atompmId']
  713. for d in deltas :
  714. if d['op'] == 'RMNODE' :
  715. reqs.append({\
  716. 'method':'DELETE',
  717. 'uri':d['attrs'][HC.FULLTYPE]+'/'+\
  718. d['attrs']['$atompmId']+'.instance'})
  719. elif d['op'] == 'MKNODE' :
  720. mknodes[d['guid']] = len(reqs)
  721. node = self._M.vs[self._M.get_node(d['guid'])]
  722. if neighborhood == None :
  723. neighborhood = map(
  724. lambda n: n[HC.FULLTYPE]+'/'+n['$atompmId']+'.instance',
  725. d['neighborhood'])
  726. if node[HC.CONNECTOR_TYPE] :
  727. reqs.append({\
  728. 'method':'POST',
  729. 'uri':node[HC.FULLTYPE]+'.type',
  730. 'reqData':
  731. {'src':None,
  732. 'dest':None,
  733. 'hitchhiker':
  734. {'segments':None,
  735. 'asSrc':None,
  736. 'asDest':None,
  737. 'neighborhood':neighborhood}}})
  738. else :
  739. reqs.append({\
  740. 'method':'POST',
  741. 'uri':node[HC.FULLTYPE]+'.type',
  742. 'reqData':{'hitchhiker':{'neighborhood':neighborhood}}})
  743. elif d['op'] == 'RMEDGE' :
  744. pass
  745. elif d['op'] == 'MKEDGE' :
  746. def isConnectorMKNODE(req):
  747. return 'dest' in req['reqData']
  748. if d['guid1'] in mknodes :
  749. req = reqs[ mknodes[d['guid1']] ]
  750. if isConnectorMKNODE(req) :
  751. node2 = self._M.vs[self._M.get_node(d['guid2'])]
  752. id = atompmInstanceId(node2)
  753. req['reqData']['dest'] = \
  754. req['reqData']['hitchhiker']['asDest'] = \
  755. node2[HC.FULLTYPE]+'/'+id+'.instance'
  756. if d['guid2'] in mknodes :
  757. req = reqs[ mknodes[d['guid2']] ]
  758. if isConnectorMKNODE(req) :
  759. node1 = self._M.vs[self._M.get_node(d['guid1'])]
  760. id = atompmInstanceId(node1)
  761. req['reqData']['src'] = \
  762. req['reqData']['hitchhiker']['asSrc'] = \
  763. node1[HC.FULLTYPE]+'/'+id+'.instance'
  764. elif d['op'] == 'CHATTR' :
  765. node = self._M.vs[self._M.get_node(d['guid'])]
  766. id = atompmInstanceId(node)
  767. reqs.append({\
  768. 'method':'PUT',
  769. 'uri':node[HC.FULLTYPE]+'/'+id+'.instance',
  770. 'reqData':{'changes':{d['attr']:d['new_val']}}})
  771. elif d['op'] == 'LOADMM' :
  772. reqs.append({\
  773. 'method':'PUT',
  774. 'uri':'/current.metamodels',
  775. 'reqData':
  776. {'mm':'/%s%s.metamodel'%(self.username,d['name'])}})
  777. if self.incUpdates:
  778. for d in reversed(deltas) :
  779. if d['op'] == 'RMNODE' :
  780. newNodeIndex = self._M.add_node(newNodeGuid=d['attrs'][HC.GUID])
  781. for attr,val in d['attrs'].iteritems() :
  782. self._M.vs[newNodeIndex][attr] = val
  783. elif d['op'] == 'MKNODE' :
  784. node = self._M.vs[self._M.get_node(d['guid'])]
  785. self._M.delete_nodes([node.index])
  786. elif d['op'] == 'RMEDGE' :
  787. node1 = self._M.vs[self._M.get_node(d['guid1'])]
  788. node2 = self._M.vs[self._M.get_node(d['guid2'])]
  789. self._M.add_edges((node1.index, node2.index))
  790. elif d['op'] == 'MKEDGE' :
  791. pass
  792. elif d['op'] == 'CHATTR' :
  793. node = self._M.vs[self._M.get_node(d['guid'])]
  794. node[d['attr']] = d['old_val']
  795. elif d['op'] == 'LOADMM' :
  796. pass
  797. ''' hergin :: motif-integration modify: succeeded rule name + time '''
  798. #reqs.append({\
  799. # 'method':'PUT',
  800. # 'uri':'/GET/console',
  801. # 'reqData':{'text':TC.RULE_SUCCESS_MSG+" ("+self._mtContexts[-1]._lastStep['alias']+":"+self._mtContexts[-1]._lastStep['name']+") in "+str(self._mtContexts[-1]._lastStep['time'])}})
  802. # 'reqData':{'text':TC.RULE_SUCCESS_MSG}})
  803. return reqs
  804. ''' hergin :: motif-integration :: END :: put this to outside of step function '''
  805. '''
  806. author: hergin
  807. sendAndApplyDelta()
  808. If debug mode:
  809. Sends and applies the inputted deltas to the model and UI instance.
  810. else:
  811. Collect deltas in a globalDeltas variable to handle later
  812. '''
  813. def sendAndApplyDelta(self,deltas):
  814. if self.incUpdates:
  815. req = self.buildEditHttpReq(deltas)
  816. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  817. if not utils.isHttpSuccessCode(resp['statusCode']) :
  818. self.stop()
  819. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  820. return
  821. self._handleChangelogs()
  822. else:
  823. self.globalDeltas.extend(deltas)
  824. self.packet.deltas = []
  825. '''
  826. step()
  827. wrapper around _step() that ensures that step requests from user are
  828. ignored when in PLAY mode, and that valid requests only go through (i.e.,
  829. actually call _step()) when feedback for the last step is received...
  830. moreover, as is the case for play(), if there is already another thread
  831. currently paused on _debugProgrammedBreak, it is unpaused (via
  832. _stopDebugProgrammedBreak()) and the current thread terminates immediately
  833. _step()
  834. fetch and run next rule
  835. 1. fetch next rule
  836. a) if next rule is not a {} (i.e., all available transformations have
  837. terminated and _nextRule() returned the resulting application code),
  838. report application code and stop()
  839. b) if an error is returned, report it and stop()
  840. c) otherwise,
  841. i. run rule (returns (deltas|error,applicationInfo))
  842. ii. set ran rule's application info
  843. iii. if rule was n/a, report this
  844. iii. if rule failed, report this and error
  845. iii. otherwise,
  846. j. construct a batchEdit operation based on the rule's effects,
  847. and unfo the said effects
  848. jj. send off the batchEdit
  849. jjj. stop() if the batchEdit fails
  850. NOTE: this function assumes that feedback for the last step has already
  851. been received '''
  852. def step(self) :
  853. if self._execmode == 'PLAY' :
  854. pass
  855. else :
  856. if self._execmode == 'STOPPED':
  857. self._randomGen = Random(0)
  858. self._execmode = 'STEP'
  859. if not self._stopDebugProgrammedBreak() :
  860. self._doWhenLastStepFeedbackReceived(self._step)
  861. def _step(self) :
  862. '''
  863. run the specified rule and return a tuple describing its execution '''
  864. def runRule(r) :
  865. ''' hergin :: motif-integration start '''
  866. #self._aswPrintReq('launching rule :: '+r['fname'])
  867. #ar = NDARule(r['cr']['lhs'],r['cr']['rhs'],rng=self._randomGen)
  868. mtc = self._mtContexts[-1]
  869. if mtc.metamodel == TC.MOTIFMM or mtc.metamodel == TC.TCOREMM:
  870. ar = r['rule']
  871. else:
  872. ar = NDARule(r['cr']['lhs'],r['cr']['rhs'],rng=self._randomGen,sendAndApplyDeltaFunc=self.sendAndApplyDelta)
  873. if mtc.nextInput == "packetIn":
  874. startTime=clock()
  875. #pr = cProfile.Profile()
  876. #pr.enable()
  877. self.packet = ar.packet_in(self.packet)
  878. #pr.disable()
  879. #s = StringIO.StringIO()
  880. #sortby = 'cumulative'
  881. #ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
  882. #ps.print_stats()
  883. #print s.getvalue()
  884. mtc.setLastStepExecTime(clock()-startTime)
  885. elif mtc.nextInput == "nextIn":
  886. startTime=clock()
  887. self.packet = ar.next_in(self.packet)
  888. mtc.setLastStepExecTime(clock()-startTime)
  889. elif mtc.nextInput == "cancelIn":
  890. startTime=clock()
  891. self.packet = ar.cancelIn(self.packet)
  892. mtc.setLastStepExecTime(clock()-startTime)
  893. elif mtc.nextInput == "successIn":
  894. startTime=clock()
  895. self.packet = ar.success_in(self.packet)
  896. mtc.setLastStepExecTime(clock()-startTime)
  897. ''' hergin :: motif-integration end '''
  898. if ar.is_success :
  899. return (self.packet.deltas,TC.SUCCEEDED)
  900. elif not ar.is_success :
  901. ''' hergin :: motif-integration start (Some terminology fixed) '''
  902. if ar.exception :
  903. return (str(ar.exception),TC.EXCEPTION)
  904. else :
  905. return (None,TC.FAILED)
  906. ''' hergin :: motif-integration end '''
  907. try :
  908. nr = self._nextRule()
  909. except Exception :
  910. nr = {'$err':traceback.format_exc()}
  911. ''' hergin :: motif-integration start TRAFO RESULT: in case of a CRule_end, pop it from context and continue the rest '''
  912. while 'trafoResult' in nr:
  913. if len(self._mtContexts)==1:
  914. if not self.incUpdates and self.sendDeltas:
  915. ''' hergin TO BE MODIFIED - release mode will change '''
  916. req = self.buildEditHttpReq(self.globalDeltas)
  917. self.globalDeltas = []
  918. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  919. if not utils.isHttpSuccessCode(resp['statusCode']) :
  920. self.stop()
  921. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  922. return
  923. self._handleChangelogs()
  924. self._aswPrintReq(TC.TRANSFORMATION_DONE+nr['trafoResult']+" in "+str(self._mtContexts[-1].totalExecutionTime)+" seconds")
  925. self.stop()
  926. return
  927. else:
  928. prevTrafo=self._mtContexts.pop()
  929. self._mtContexts[-1].setLastStepExecTime(prevTrafo.totalExecutionTime)
  930. self._mtContexts[-1].setLastStepApplicationInfo(nr['trafoResult'])
  931. try :
  932. nr = self._nextRule()
  933. except Exception :
  934. nr = {'$err':traceback.format_exc()}
  935. if nr.__class__ != {}.__class__ :
  936. self._aswPrintReq(TC.TRANSFORMATION_DONE + nr)
  937. self.stop()
  938. return
  939. elif '$err' in nr :
  940. self._aswPrintReq(TC.NO_NEXT_RULE+nr['$err'])
  941. self._stop()
  942. return
  943. else :
  944. if 'rtype' in nr:
  945. type = nr['rtype']
  946. if type == 'OpenModel':
  947. (res,ai) = self.runOpenModelRule(nr['fname'],nr['formalism'])
  948. elif type == 'WriteModel':
  949. (res,ai) = self.runWriteModelRule(nr['fname'],nr['formalism'])
  950. self._mtContexts[-1].setLastStepApplicationInfo(ai)
  951. self._mtContexts[-1].setLastStepFeedbackReceived()
  952. return True
  953. else:
  954. (res,ai) = runRule(nr)
  955. self._mtContexts[-1].setLastStepApplicationInfo(ai)
  956. if ai == TC.FAILED and self.incUpdates:
  957. ''' hergin :: motif-integration modify (which rule is not succeeded) '''
  958. self._aswPrintReq(TC.RULE_FAILURE_MSG+" ("+self._mtContexts[-1]._lastStep['alias']+":"+self._mtContexts[-1]._lastStep['name']+")")
  959. elif ai == TC.EXCEPTION and self.incUpdates:
  960. self._aswPrintReq(TC.RULE_EXCEPTION_MSG + res)
  961. else :
  962. ''' hergin :: motif-integration :: start '''
  963. mtc = self._mtContexts[-1]
  964. if self.incUpdates:
  965. if mtc.metamodel == TC.MOTIFMM or mtc.metamodel == TC.TCOREMM:
  966. self._aswPrintReq(TC.RULE_SUCCESS_MSG+" ("+self._mtContexts[-1]._lastStep['alias']+":"+self._mtContexts[-1]._lastStep['name']+")")
  967. else:
  968. self._aswPrintReq(TC.RULE_SUCCESS_MSG)
  969. self._mtContexts[-1].setLastStepFeedbackReceived()
  970. ''' hergin :: motif-integration :: end '''
  971. return True
  972. '''
  973. stop()
  974. sets self._execmode to STOPPING and schedules a call to _stop() for when
  975. feedback from the last step is received... being in STOPPING mode implies
  976. that the _play() loop, if any, will be broken, and that incoming user
  977. requests will be rejected... in the case where there is already another
  978. thread currently paused on _debugProgrammedBreak, it is unpaused (via
  979. _stopDebugProgrammedBreak()) which leads to the transformation being
  980. stopped from within that thread; the current thread terminates immediately
  981. _stop()
  982. 1. restores self._mtContexts to right after the user loaded his
  983. transformation(s) (i.e., to before we actually [partially] ran it)
  984. 2. resets self._mtContexts2debugClients
  985. 3. sends a console message to notify the user that the transformation has
  986. stopped
  987. 4. sets self._execmode to STOPPED (i.e., we're done STOPPING and can
  988. handle new requests) '''
  989. def isStopped(self) : return self._execmode == 'STOPPED'
  990. def isStopping(self) : return self._execmode == 'STOPPING'
  991. def stop(self) :
  992. if not self.incUpdates:
  993. req = self.buildEditHttpReq(self.globalDeltas)
  994. self.globalDeltas = []
  995. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  996. if not utils.isHttpSuccessCode(resp['statusCode']) :
  997. self.stop()
  998. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  999. return
  1000. self._handleChangelogs()
  1001. self._execmode = 'STOPPING'
  1002. #Used for enactment, prevents open being append.
  1003. self.loadedModel = False
  1004. if not self._stopDebugProgrammedBreak() :
  1005. self._doWhenLastStepFeedbackReceived(self._stop)
  1006. def _stop(self) :
  1007. self._mtContexts = []
  1008. for fname in self._userTransfs :
  1009. self._loadTransform(fname)
  1010. self._mtContexts2debugClients = {}
  1011. self._aswPrintReq(TC.TRANSFORMATION_STOPPED)
  1012. self._execmode = 'STOPPED'
  1013. '''
  1014. enter a "programmed debugging pause" (i.e. unset the _debugProgrammedBreak
  1015. flag and block until another thread resets it '''
  1016. def _startDebugProgrammedBreak(self) :
  1017. self._debugProgrammedBreak.clear()
  1018. self._debugProgrammedBreak.wait()
  1019. '''
  1020. if the _debugProgrammedBreak flag is not set (this can only happen when
  1021. another thread unset it to enter a programmed debugging pause), set it and
  1022. return true, otherwise do nothing and return false
  1023. NOTE:: this function returns true iff there is a thread waiting on
  1024. _debugProgrammedBreak '''
  1025. def _stopDebugProgrammedBreak(self) :
  1026. if not self._debugProgrammedBreak.isSet() :
  1027. self._debugProgrammedBreak.set()
  1028. return True
  1029. return False
  1030. '''
  1031. toggle the _debugOn flag and report debugger status to atompm... when
  1032. disabling debugging, any current programmed debugging pauses are stopped
  1033. (and execution resumes normally)
  1034. while in DEBUG MODE,
  1035. . entering a ModelTransformationContext for the first time triggers
  1036. a) setting self._execmode to PAUSE
  1037. b) notifying the user of the "programmed debugging pause"
  1038. c) spawning of a new atompm instance loaded with the relevant
  1039. transformation model
  1040. d) entering programmed debugging pause that can be broken by
  1041. _stopDebugProgrammedBreak() which is triggered by
  1042. i. disabling transformation debugging; OR
  1043. ii. pressing "play", "step" or "stop"
  1044. . before running a rule, it or its enclosing ExhaustContext's associated
  1045. atompm node is highlighted '''
  1046. def toggleDebugMode(self) :
  1047. self._debugOn = not self._debugOn
  1048. if self._debugOn :
  1049. self._aswPrintReq(TC.DEBUGGING_ON)
  1050. else :
  1051. self._stopDebugProgrammedBreak()
  1052. self._aswPrintReq(TC.DEBUGGING_OFF)
  1053. '''
  1054. #Perform flat reachability analysis
  1055. #This is what creates the simple reachability graph.
  1056. #If the Petri nets are disjoint there will be several PNs "modules" that will be calculated in parallel.
  1057. def PNFull(self,fname='',dot=False):
  1058. #if not self.modules:
  1059. self._handleChangelogs()
  1060. self.modules = {}
  1061. self.modStart = time()
  1062. disjoint = self._M.decompose(mode=ig.WEAK)
  1063. #Let's compile PN graph out of generic himesis graph and create dictionary with unique IDs
  1064. barier = barrier(len(disjoint)+1) #barier, modules + synch thread
  1065. for mod in disjoint:
  1066. queue = Queue()
  1067. uid = uuid.uuid4()
  1068. key = str(uid)[:str(uid).index('-')]
  1069. module = PnModule(self.toPN(mod),queue, barier,True)
  1070. self.modules [ key] = module
  1071. module.start()
  1072. barier.wait()
  1073. print 'Time elapsed flat analysis %f'%(time() - self.modStart)
  1074. self.modules [ key].summary()
  1075. self.modStart = time()
  1076. #do the test of reachability here.
  1077. #self.reachabilityTestFlat()
  1078. print 'Find place elapsed flat analysis %f'%(time() - self.modStart)
  1079. for key,mod in self.modules.items():
  1080. if dot:
  1081. #can take too long to plot for large state space and/or hand dot binary
  1082. #here we output the reacability graph in svg
  1083. mod.graph(key=key,fname=fname)
  1084. else:
  1085. #here we output reachability graph in xml
  1086. mod.reachtoxml(fname,key)
  1087. #peroform modular analysis, use at your own risk. Toolbar does not enable this, experimental
  1088. def analyzePN(self):
  1089. #First lets break model into submodels
  1090. self.modStart = time()
  1091. #if not self.modules:
  1092. self.modules = {}
  1093. disjoint = self._M.decompose(mode=ig.WEAK)
  1094. barier = barrier(len(disjoint)+1) #barier, modules + synch thread
  1095. for mod in disjoint:
  1096. queue = Queue()
  1097. module = PnModule(self.toPN(mod),queue, barier)
  1098. self.modules [ module.getKey()] = module
  1099. module.start()
  1100. barier.wait() #wait till all threads stop doing first phase.
  1101. M0 = []
  1102. TFenabled = []
  1103. TF = {}
  1104. work = []
  1105. tofire = []
  1106. for key,mod in self.modules.items():
  1107. TF[key] = mod.TFS
  1108. M0.append('%s-%d'%(key,0))
  1109. ind=0
  1110. self.sg = synchgraph(len(self.modules),M0)
  1111. work.append(M0)
  1112. res={}
  1113. while work:
  1114. M = work.pop()
  1115. for key,mod in self.modules.items():
  1116. TFenabled.append( mod.getEnabledTFs())
  1117. tofire = reduce(set.intersection,map(set,TFenabled))
  1118. for key,mod in self.modules.items():
  1119. mod.que.put(tofire)
  1120. barier.wait() #lets wait for threads, they may building local graphs still
  1121. end = False
  1122. for key,mod in self.modules.items():
  1123. if not mod.result:
  1124. end = True
  1125. res[key] = mod.result #got results now produce new states for syngraph and archs.
  1126. if not end:
  1127. work.append(M)
  1128. else:
  1129. #self.sg.graph()
  1130. for key,mod in self.modules.items():
  1131. mod.SC()
  1132. mod.graph()
  1133. mod.que.put(['@exit'])
  1134. self.sg.markSCC(self.modules)
  1135. self.sg.graph()
  1136. print '---------------------------'
  1137. print 'Time elapsed modular analysis %f'%(time() - self.modStart)
  1138. for key,mod in self.modules.items():
  1139. mod.summary()
  1140. print '---------------------------'
  1141. print 'Synch graph:'
  1142. self.sg.summary()
  1143. print '---------------------------'
  1144. self.modStart = time()
  1145. self.reachabilityTestModular()
  1146. print 'Find place elapsed modular analysis %f'%(time() - self.modStart)
  1147. return
  1148. #main result
  1149. fr ={}
  1150. to = {}
  1151. for key,value in res.items():
  1152. for k,v in value.items():
  1153. if not k in fr:
  1154. fr[k] = []
  1155. fr[k].append([])
  1156. fr[k].append([])
  1157. fr[k][0].append(v[0])
  1158. fr[k][1].append(v[1])
  1159. from_prod=[]
  1160. to_prod = []
  1161. T=None
  1162. for key,value in fr.items():
  1163. T = key
  1164. #res = list(list(itertools.product(*value[0]))[0])
  1165. from_prod.append(list( list(itertools.product(*value[0]))))
  1166. to_prod.append(list (list(itertools.product (*value[1]))))
  1167. self.sg.addMarkingBatch(T,from_prod,to_prod)
  1168. #ENABLE
  1169. self.sg.graph(ind)
  1170. ind+=1;
  1171. #
  1172. # self.sg.addMarking(from_prod[i],to_prod[i],T)
  1173. res.clear()
  1174. TFenabled = []
  1175. #ENABLE
  1176. #self.sg.graph()
  1177. TM = {} #dict by tf transition inside slists of lists of start and end states
  1178. #compile into our PN representation
  1179. def toPN(self,mod):
  1180. oldtonew = {}
  1181. g = ig.Graph(0,directed=True)
  1182. for node in mod.vs:
  1183. if not node['$type'].find('Place') == -1 or not node['$type'].find('Transition') == -1:
  1184. g.add_vertices(1)
  1185. index = g.vcount()-1
  1186. oldtonew[node.index]=index
  1187. g.vs[index]['name'] = node['name']
  1188. if not node['$type'].find('Place') == -1:
  1189. g.vs[index]['type'] = 'P'
  1190. g.vs[index]['nbTokens'] = node['nbTokens']
  1191. elif not node['$type'].find('Transition') == -1:
  1192. g.vs[index]['type'] = 'T'
  1193. g.vs[index]['fusion'] = node['fusion']
  1194. elif not node['$type'].find('P2T') == -1:
  1195. node['type'] = 'P2T'
  1196. elif not node['$type'].find('T2P') == -1:
  1197. node['type'] = 'T2P'
  1198. #Let's connect
  1199. P2T = mod.vs.select(type_eq = 'P2T')
  1200. T2P = mod.vs.select(type_eq = 'T2P')
  1201. for p2t in P2T:
  1202. to = mod.successors(p2t.index)
  1203. fr = mod.predecessors(p2t.index)
  1204. try:
  1205. p2tid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1206. except:
  1207. g.add_edges([(oldtonew[fr[0]],oldtonew[to[0]])])
  1208. p2tid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1209. g.es[p2tid]['weight'] = p2t['weight']
  1210. else:
  1211. old = int(g.es[p2tid]['weight'])
  1212. g.es[p2tid]['weight'] = old + int(p2t['weight'])
  1213. for t2p in T2P:
  1214. to = mod.successors(t2p.index)
  1215. fr = mod.predecessors(t2p.index)
  1216. try:
  1217. t2pid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1218. except:
  1219. g.add_edges([(oldtonew[fr[0]],oldtonew[to[0]])])
  1220. t2pid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1221. g.es[t2pid]['weight'] = t2p['weight']
  1222. else:
  1223. old = int(g.es[t2pid]['weight'])
  1224. g.es[t2pid]['weight'] = old + int(t2p['weight'])
  1225. #dot graph of our petri net, not quite himesis.
  1226. #self.graphPN('pn', g)
  1227. return g
  1228. def isReachableFlat(self,state,key=None):
  1229. if not key:
  1230. if self.modules.values()[0].reachable(state):
  1231. return True
  1232. else:
  1233. return False
  1234. else:
  1235. if self.modules[key].reachable(state):
  1236. return True
  1237. else:
  1238. return False
  1239. def reachabilityTestModular(self):
  1240. aa = 'a'
  1241. bb = 'b'
  1242. moda = {}
  1243. modb = {}
  1244. moda['a2'] = 1
  1245. #moda['a4'] = 4
  1246. #moda['a3'] = 1
  1247. modb['b1'] = 1
  1248. modb['b3'] = 1
  1249. statea = []
  1250. stateb = []
  1251. statea.append(moda)
  1252. stateb.append(modb)
  1253. if not self.isReachableFlat(statea,aa):
  1254. print 'Modular state %s%s not reachable'%(statea,stateb)
  1255. return False
  1256. if not self.isReachableFlat(stateb,bb):
  1257. print 'Modular state %s%s not reachable'%(statea,stateb)
  1258. return False
  1259. scca = self.modules[aa].reachableMod(statea)
  1260. print 'A SCC of ancestors %s'%scca
  1261. sccb = self.modules[bb].reachableMod(stateb)
  1262. print 'B SCC of ancestors %s'%sccb
  1263. result = list( list(itertools.product(scca,sccb)))
  1264. for node in result:
  1265. v = []
  1266. a = 'a-%d'%node[0]
  1267. b = 'b-%d'%node[1]
  1268. v.append(a)
  1269. v.append(b)
  1270. id = self.sg.statePresentReach(v)
  1271. if not id == -1:
  1272. print 'Modular state %s%s reachable'%(statea,stateb)
  1273. return True
  1274. print 'Modular state %s%s not reachable'%(statea,stateb)
  1275. return False
  1276. def reachabilityTestFlat(self):
  1277. moda = {}
  1278. modb = {}
  1279. moda['a1'] = 1
  1280. moda['a4'] = 2
  1281. #moda['a3'] = 1
  1282. #modb['b3'] = 2
  1283. modb['b5'] = 3
  1284. state = []
  1285. state.append(moda)
  1286. state.append(modb)
  1287. if self.isReachableFlat(state):
  1288. print 'Flat state %s reachable'%state
  1289. else:
  1290. print 'Flat state %s not reachable'%state
  1291. def graph(self,key,g):
  1292. vattr=''
  1293. eattr = ''
  1294. nodes = {}
  1295. graph = pydot.Dot(key, graph_type='digraph')
  1296. dateTag = datetime.datetime.now().strftime("%Y-%b-%d_%H-%M-%S")
  1297. for v in g.vs:
  1298. vattr +='('
  1299. i = len(v['M'])
  1300. for key,value in v['M'].items():
  1301. vattr += '%s-%s'%(key,value)
  1302. if not i-1 == 0:
  1303. vattr+=','
  1304. i -=1
  1305. vattr +=')'
  1306. nodes[v.index] = pydot.Node(vattr)
  1307. graph.add_node(nodes[v.index])
  1308. vattr = ''
  1309. for e in g.es:
  1310. graph.add_edge(pydot.Edge(nodes[e.source],nodes[e.target],label=e['T']))
  1311. graph.write_svg('graphs/STATE%s%s.svg'%(key,dateTag))
  1312. #graph.write_png('graphs/STATE%s%s.png'%(key,dateTag))
  1313. #use this one to output your PN net in a svg graph to analyze structure
  1314. #and verify that compilation from Himesis to PN went fine, since we collaps
  1315. #repeated edges.
  1316. def graphPN(self,key,g):
  1317. vattr=''
  1318. eattr = ''
  1319. nodes = {}
  1320. graph = pydot.Dot(key, graph_type='digraph')
  1321. dateTag = datetime.datetime.now().strftime("%Y-%b-%d_%H-%M-%S")
  1322. for v in g.vs:
  1323. for at in v.attributes():
  1324. if not v[at] == None:
  1325. vattr += '%s->%s\n'%(at,v[at])
  1326. nodes[v.index] = pydot.Node(vattr)
  1327. graph.add_node(nodes[v.index])
  1328. vattr = ''
  1329. for e in g.es:
  1330. graph.add_edge(pydot.Edge(nodes[e.source],nodes[e.target],label=e['weight']))
  1331. graph.write_svg('graphs/PN%s%s.svg'%(key,dateTag))
  1332. '''