ptcal.py 64 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529
  1. '''*****************************************************************************
  2. AToMPM - A Tool for Multi-Paradigm Modelling
  3. Copyright (c) 2011 Raphael Mannadiar (raphael.mannadiar@mail.mcgill.ca)
  4. This file is part of AToMPM.
  5. AToMPM is free software: you can redistribute it and/or modify it under the
  6. terms of the GNU Lesser General Public License as published by the Free Software
  7. Foundation, either version 3 of the License, or (at your option) any later
  8. version.
  9. AToMPM is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
  11. PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public License along
  13. with AToMPM. If not, see <http://www.gnu.org/licenses/>.
  14. *****************************************************************************'''
  15. import re, json, uuid, cPickle as pickle, threading, itertools, traceback, logging
  16. from random import Random
  17. from utils import Utilities as utils
  18. from tcontext import ModelTransformationContext, ExhaustContext
  19. from tconstants import TConstants as TC
  20. from compiler import ModelAndRuleCompiler
  21. from pytcore.core.himesis import HConstants as HC
  22. from pytcore.rules.ndarule import NDARule
  23. from pytcore.tcore.messages import Packet
  24. from accurate_time import time as clock
  25. from accurate_time import set_start_time
  26. set_start_time()
  27. import cProfile, pstats, StringIO
  28. ''' hergin :: motif-integration start '''
  29. from motifcontext import MotifContext
  30. from tcorecontext import TCoreContext
  31. from pytcore.tcore.messages import Pivots
  32. ''' hergin :: motif-integration end '''
  33. import igraph as ig
  34. #import pydot
  35. import datetime
  36. from random import *
  37. from threading import *
  38. from Queue import *
  39. from barrier import *
  40. from synchgraph import *
  41. from itertools import *
  42. #from petrinet import *
  43. from _abcoll import Iterable
  44. from pprint import isreadable
  45. from math import *
  46. import os
  47. '''
  48. py-t-core abstraction layer
  49. _lock used to synchronize access to self._changelogs
  50. _changelogs used to hold asworker changelogs
  51. _M the himesis graph we're transforming
  52. _mtContexts the current transformation contexts
  53. _transfData a mapping between transformation filenames and
  54. their data
  55. _userTransfs stores the transformations loaded by the user
  56. (needed to return to the pre-run state after
  57. running)
  58. _execmode the current execution mode
  59. _debugOn indicates whether or not debugging is enabled
  60. _debugProgrammedBreak a blocking flag used to freeze the execution of a
  61. transformation until the user explicitly resumes,
  62. at which point we continue precisely where we left
  63. off
  64. _mtContexts2debugClients used to map ModelTransformationContexts to their
  65. associated atompm debugging window, if any
  66. _aswCommTools bundle of properties and functions that enable and
  67. facilitate sending requests to our parent
  68. mtworker's asworker
  69. _aswNextSequenceNumber used to determine if a changelog is received out
  70. of order, and if a pending changelog is now ready
  71. to be handled
  72. username the user's username
  73. defaultDCL the user's preferred designer code language '''
  74. class PyTCoreAbstractionLayer :
  75. def __init__(self,aswCommTools,mtwid) :
  76. self._lock = threading.Condition()
  77. self._changelogs = []
  78. self._M = None
  79. ''' hergin :: motif-integration start '''
  80. self.packet = None
  81. self.globalDeltas = []
  82. self.incUpdates = True
  83. self.sendDeltas = True
  84. ''' hergin :: motif-integration end '''
  85. self._mtContexts = []
  86. self._transfData = {}
  87. self._userTransfs = []
  88. self._execmode = 'STOPPED'
  89. self._debugOn = False
  90. self._debugProgrammedBreak = threading.Event()
  91. self._debugProgrammedBreak.set()
  92. self._mtContexts2debugClients = {}
  93. self._aswCommTools = aswCommTools
  94. self._aswNextSequenceNumber = None
  95. self.username = None
  96. self.defaultDCL = TC.PYTHON
  97. self._mtwid = mtwid
  98. ''' Used only in COMP 522 and comp 621
  99. Petri Net Modules, docomposition of disconnected
  100. _M graph '''
  101. self.modules = {}
  102. ''' Synchronization graph for modular state spaces '''
  103. self.SG = None
  104. self.modStart = None
  105. self.modEnd = None
  106. self.flatStart = None
  107. self.flatEnd = None
  108. ''' State spaces for individual modules '''
  109. self.modIDtoPOS= {}
  110. self.SSs = {}
  111. #Enactment vars
  112. self.bdapiQueue = Queue()
  113. #Enactment, If this is true the OpenModel will append model to the canvas, else reload.
  114. self.loadedModel=False
  115. #Enactment, set of formalisms we load automatically to do the cleanups.
  116. self._loadedMMs = set()
  117. def processQuery(self,query):
  118. result = ""
  119. query = query['query'].replace('"',"'")
  120. qValue = query.split('(')[0].strip()
  121. if qValue == "getCount":
  122. typ = query.split("'")[1].strip()
  123. def f(e):
  124. return e['$ft__'].endswith(typ)
  125. result = "Number of '" + typ +"' in the resulting graph: " + str(len(filter(f,self.packet.graph.vs)))
  126. # I tried to use self.packet.graph.vs.select(tokens_eq=3) but the attribute names starting with $ didnt let me
  127. elif qValue == "toggleSendDelta":
  128. self.sendDeltas = not self.sendDeltas
  129. if self.sendDeltas:
  130. result = "Deltas will be sent at the end!"
  131. else:
  132. result = "Deltas won't be sent at the end!"
  133. elif qValue == "toggleIncUpdate":
  134. self.incUpdates = not self.incUpdates
  135. if self.incUpdates:
  136. result = "Incremental updates are on!"
  137. else:
  138. result = "Incremental updates are off!"
  139. self._aswPrintReq(result)
  140. ''' Use this to put the BDAPI response from client'''
  141. def _queueBDAPI(self,resp):
  142. self.bdapiQueue.put(resp)
  143. ''' send a PUT /GET/console request to the asworker '''
  144. def _aswPrintReq(self,msg) :
  145. return self._aswCommTools['httpReq']('PUT','/GET/console',{'text':msg})
  146. '''
  147. do 'callback()' once we've received the feedback for the last step that
  148. ran, if any, checking for the said feedback and handling any newly
  149. received feedback every TC.WAIT_ON_CHLOG_DELAY seconds '''
  150. def _doWhenLastStepFeedbackReceived(self,callback) :
  151. if self.incUpdates:
  152. def condition() :
  153. self._handleChangelogs()
  154. return self._mtContexts[-1].isLastStepFeedbackReceived()
  155. utils.doWhen(
  156. condition,
  157. TC.WAIT_ON_CHLOG_DELAY,
  158. callback)
  159. else:
  160. callback()
  161. '''
  162. synchronously verify if any changelogs have been received, and if we are
  163. ready to handle them, remove them from self._changelogs and handle them
  164. 1. sort changelogs by ascending sequence#
  165. 2. do nothing if no self._changelogs is empty, if no model has been loaded
  166. yet (i.e., self._aswNextSequenceNumber == None), or if the oldest
  167. changelog is still too new to be handled
  168. 3. crash if we encouter an invalid sequence#
  169. 4. otherwise (we're ready to handle the oldest changelog), handle the
  170. oldest changelog, increment self._aswNextSequenceNumber and recurse
  171. back to step 1
  172. TBI: performance would benefit greatly from caches that map atompm ids to
  173. GUIDs '''
  174. def _handleChangelogs(self) :
  175. '''
  176. handle a single changelog '''
  177. def _handleChangelog(changelog) :
  178. def eq(a,b) : return str(a) == str(b)
  179. for c in changelog :
  180. if c['op'] == 'MKEDGE' :
  181. node1 = \
  182. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id1']))[0]
  183. node2 = \
  184. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id2']))[0]
  185. self._M.add_edges((node1.index, node2.index))
  186. elif c['op'] == 'RMEDGE' :
  187. pass
  188. elif c['op'] == 'MKNODE' :
  189. self._compiler.addNode(self._M, json.loads(c['node']), c['id'])
  190. elif c['op'] == 'RMNODE' :
  191. node = \
  192. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id']))[0]
  193. self._M.delete_nodes([node.index])
  194. elif c['op'] == 'CHATTR' :
  195. node = \
  196. self._M.vs.select(lambda v : eq(v['$atompmId'],c['id']))[0]
  197. self._M.vs[node.index][c['attr']] = c['new_val']
  198. elif c['op'] == 'LOADMM' :
  199. self._compiler.parseMetamodel(
  200. c['name'],
  201. utils.fread(
  202. '/users/%s/%s.metamodel'%(self.username,c['name'])),
  203. loadMM=True)
  204. elif c['op'] == 'DUMPMM' :
  205. self._compiler.unloadMetamodel(c['name'])
  206. elif c['op'] == 'RESETM' :
  207. self._M = self._compiler.compileModel(c['new_model'])
  208. self._M.mmTypeData = self._compiler.getMMTypeData()
  209. elif c['op'] == 'SYSOUT' :
  210. ''' hergin :: motif-integration :: modify :: added startsWith functions '''
  211. if c['text'].startswith(TC.RULE_SUCCESS_MSG) or \
  212. c['text'].startswith(TC.RULE_NOT_APPLICABLE_MSG) or \
  213. c['text'].startswith(TC.RULE_FAILURE_MSG) or \
  214. c['text'].startswith(TC.TRANSFORMATION_DONE) or \
  215. c['text'].startswith(TC.REMOTE_APPLICATION_FAILURE) :
  216. self._mtContexts[-1].setLastStepFeedbackReceived()
  217. self._lock.acquire()
  218. self._changelogs.sort(key=lambda c : utils.sn2int(c['sequence#']))
  219. if len(self._changelogs) == 0 or \
  220. self._aswNextSequenceNumber == None or \
  221. utils.sn2int(self._changelogs[0]['sequence#']) > \
  222. utils.sn2int(self._aswNextSequenceNumber) :
  223. self._lock.release()
  224. else :
  225. sn = self._changelogs[0]['sequence#']
  226. if utils.sn2int(sn) < utils.sn2int(self._aswNextSequenceNumber) :
  227. raise ValueError('invalid changelog sequence# :: '+sn)
  228. else :
  229. logging.debug('++ ('+sn+') '+str(self._changelogs[0]['changelog']))
  230. _handleChangelog(self._changelogs.pop(0)['changelog'])
  231. self._aswNextSequenceNumber = \
  232. utils.incrementSequenceNumber(self._aswNextSequenceNumber)
  233. self._lock.release()
  234. self._handleChangelogs()
  235. '''
  236. load a model (and its metamodels)
  237. 1. compile the provided model into a himesis graph and save it to self._M
  238. 2. synchronize self._M's mmTypeData with that of self._compiler's so that
  239. it gets updated as new metamodels are loaded
  240. 3. initialize self._aswNextSequenceNumber based on the sequence# 'sn' of
  241. the provided model, and forget any already received out-of-date
  242. changelogs
  243. NOTE: this function should only get called once (when the asworker
  244. initially sets up this mtworker) '''
  245. def loadModel(self,m,mms,sn) :
  246. assert self._M == None, 'ptcal.loadModel() should only be called once'
  247. self._compiler = ModelAndRuleCompiler(
  248. self.username,
  249. self._aswCommTools['wid'],
  250. self.defaultDCL,
  251. self._mtwid)
  252. self._M = self._compiler.compileModel(m,mmsData=mms)
  253. self._M.mmTypeData = self._compiler.getMMTypeData()
  254. ''' hergin :: motif-integration start '''
  255. self.packet = Packet(self._M)
  256. ''' hergin :: motif-integration end '''
  257. self._aswNextSequenceNumber = utils.incrementSequenceNumber(sn)
  258. self._lock.acquire()
  259. self._changelogs = \
  260. filter(lambda c : utils.sn2int(sn) < utils.sn2int(c['sequence#']),
  261. self._changelogs)
  262. self._lock.release()
  263. '''
  264. load a PN model (and its metamodels)
  265. 1. compile the provided model into a himesis graph and save it to self._M
  266. 2. synchronize self._M's mmTypeData with that of self._compiler's so that
  267. it gets updated as new metamodels are loaded
  268. 3. initialize self._aswNextSequenceNumber based on the sequence# 'sn' of
  269. the provided model, and forget any already received out-of-date
  270. changelogs
  271. NOTE: this function should only get called once (when the asworker
  272. initially sets up this mtworker) '''
  273. def loadModelPN(self,m,mms,sn) :
  274. assert self._M == None, 'ptcal.loadModel() should only be called once'
  275. self._compiler = ModelAndRuleCompiler(self.username)
  276. self._M = self._compiler.compileModelPN(m,mmsData=mms)
  277. disjoint = self._M.decompose(mode=ig.WEAK)
  278. #create dictionary of modules
  279. for mod in disjoint:
  280. self.modules [ uuid.uuid4()] = mod
  281. self._M.mmTypeData = self._compiler.getMMTypeData()
  282. self._aswNextSequenceNumber = utils.incrementSequenceNumber(sn)
  283. self._lock.acquire()
  284. self._changelogs = \
  285. filter(lambda c : utils.sn2int(sn) < utils.sn2int(c['sequence#']),
  286. self._changelogs)
  287. self._lock.release()
  288. ''' setup internal state to reflect given runtime configuration '''
  289. def _loadRuntimeConfiguration(self,rc) :
  290. if 'looseSubtypingMM' in rc :
  291. self._compiler.RC__looseSubtypingMM = rc['looseSubtypingMM']
  292. '''
  293. read in some json that describes a model transformation from file 'fname',
  294. store in self._transfData, and push a new ModelTransformationContext onto
  295. self._mtContexts '''
  296. def _loadTransform(self,fname) :
  297. if fname not in self._transfData :
  298. self._transfData[fname] = \
  299. utils.fread('/users/%s/%s'%(self.username,fname))
  300. ''' hergin :: motif-integration start '''
  301. if TC.MOTIFMM in self._transfData[fname]['metamodels']:
  302. self._mtContexts.append(MotifContext(fname,self))
  303. elif TC.TCOREMM in self._transfData[fname]['metamodels']:
  304. self._mtContexts.append(TCoreContext(fname,self))
  305. elif TC.TRANSFMM in self._transfData[fname]['metamodels']:
  306. self._mtContexts.append(ModelTransformationContext(self._transfData[fname],fname))
  307. ''' hergin :: motif-integration end '''
  308. '''
  309. load a set of user-specified transformations and forget anything
  310. previously loaded
  311. 1. forget previously loaded transformations and compiled rules
  312. 2. reset 'session' area
  313. 3. load transformations
  314. 4. remember loaded transformations '''
  315. def loadTransforms(self,fnames) :
  316. self._transfData = {}
  317. self._mtContexts = []
  318. self._compiler.forgetCompiledRules()
  319. self._M.session = {}
  320. for fname in fnames :
  321. self._loadTransform(fname)
  322. self._userTransfs = fnames
  323. '''
  324. returns the filename and compiled form of the next rule to run
  325. 1. fetch the current transformation context
  326. 2. retrieve and load runtime configuration options, if any
  327. 3. if transformatio debugging is enabled and we're entering a
  328. ModelTransformationContext for the first time, we
  329. a) remember self._execmode and set self._execmode to PAUSE
  330. b) notify the user of the impending "programmed debugging pause"
  331. c) request a new atompm instance loaded with the transformation model
  332. corresponding to the current transformation context
  333. d) unset the _debugProgrammedBreak flag and make a blocking call that
  334. waits for another thread to reset it (via
  335. self._startDebugProgrammedBreak())... this may occur in a number of
  336. cases if when this occurs
  337. i. user presses stop --> self._execmode == 'STOPPING' :
  338. _nextRule() returns an error which in turn triggers the
  339. stopping of the current transformation
  340. ii. user disables debugging --> self._execmode == 'PAUSE' :
  341. self._execmode is restored to its previous value and execution
  342. continues as planned (i.e., _nextRule() returns with the next
  343. rule to run)
  344. iii. user presses play/step --> self._execmode == 'PLAY/STEP' :
  345. same behaviour as ii.
  346. 4. call its nextStep() function to get the next transformation step
  347. a) if the step is not an object (i.e., is an application code)
  348. i. if there is more than 1 loaded transformation context,
  349. j. pop the newly completed transformation context
  350. jj. if the next context (which may be a "parent" transformation
  351. or the next of multiple loaded transformations) is a parent,
  352. update its last step application info
  353. jjj. make a recursive call to get the next rule to run from
  354. within the "next" context
  355. ii. otherwise, simply return the said application code
  356. b) if the step is an error, return it
  357. c) otherwise, determine the type of the step (via the node's $type
  358. attribute or by inspecting the 'metamodels' array in the provided
  359. .model file)
  360. i. Rules get stored in 'nr' to be later returned
  361. ii. Transformations and Exhausts[Randoms] cause the pushing of a
  362. new transformation context onto self._mtContexts, and of a
  363. recursive call to get the next rule to run from within that
  364. new context
  365. 5. return the rule name and compiled form
  366. NOTE:: in step 4c), while in debug mode, we highlight transformations,
  367. exhausts and rules before recursing on them or returning them,
  368. respectively '''
  369. def _nextRule(self) :
  370. mtc = self._mtContexts[-1]
  371. self._loadRuntimeConfiguration(mtc.getRuntimeConfiguration())
  372. if self._debugOn and not mtc.isTransformationUnderWay() and \
  373. (type(mtc) == MotifContext or type(mtc) == TCoreContext) : # hergin :: motif-integration modify
  374. _execmode = self._execmode
  375. self._execmode = 'PAUSE'
  376. self._aswPrintReq(TC.DEBUGGING_HALT)
  377. self._requestClientDebugWindow(mtc.fname)
  378. self._startDebugProgrammedBreak()
  379. if self._execmode == 'STOPPING' :
  380. return {'$err':'transformation stopped during debugging pause'}
  381. elif self._execmode == 'PAUSE' or \
  382. self._execmode == 'PLAY' or \
  383. self._execmode == 'STEP' :
  384. self._execmode = _execmode
  385. self.bdapiQueue = Queue()
  386. ns = mtc.nextStep()
  387. if ns.__class__ != {}.__class__ :
  388. if len(self._mtContexts) > 1 :
  389. self._mtContexts = self._mtContexts[:-1]
  390. if self._mtContexts[-1].isTransformationUnderWay() :
  391. self._mtContexts[-1].setLastStepApplicationInfo(ns)
  392. return self._nextRule()
  393. else :
  394. return ns
  395. elif '$err' in ns :
  396. return ns['$err']
  397. else :
  398. def highlightUpcomingStep() :
  399. for _mtc in reversed(self._mtContexts) :
  400. if id(_mtc) in self._mtContexts2debugClients :
  401. debugClient = self._mtContexts2debugClients[id(_mtc)]
  402. self._requestNodeHighlight(
  403. debugClient['host'],
  404. debugClient['aswid'],
  405. _mtc.getCurrentStepId())
  406. break
  407. if 'id' in ns :
  408. fulltype = mtc.t['nodes'][ns['id']]['$type']
  409. ''' hergin :: motif-integration start '''
  410. if fulltype == mtc.metamodel+"/CRule":
  411. if self._debugOn :
  412. highlightUpcomingStep()
  413. self._loadTransform(ns['rule'])
  414. return self._nextRule()
  415. elif fulltype.startswith(TC.TCOREMM) or\
  416. fulltype.startswith(TC.MOTIFMM):
  417. if self._debugOn :
  418. highlightUpcomingStep()
  419. return ns
  420. ''' hergin :: motif-integration end '''
  421. elif fulltype == TC.TRANSFMM+'/Rule' :
  422. if self._debugOn :
  423. highlightUpcomingStep()
  424. return {'fname':ns['fname'],
  425. 'cr':self._compiler.compileRule(None,ns['fname'])}
  426. #Enactment OpenModel blob, pathToFormalism is present is MM, but not used here,
  427. #for functionality of opening window with formalisms is in WriteModel.
  428. #pathToFormalism should be removed from MM for OpenModel (was not removed due to
  429. #language evolution).
  430. elif fulltype == TC.TRANSFMM+'/OpenModel' :
  431. if self._debugOn :
  432. highlightUpcomingStep()
  433. fname = mtc.t['nodes'][ns['id']]['pathToModel']['value']
  434. #formalism = mtc.t['nodes'][ns['id']]['pathToFormalism']['value']
  435. formalism = ""
  436. return {'fname':fname,'formalism':formalism,'rtype':'OpenModel'}
  437. #Enactment WriteModel blob
  438. elif fulltype == TC.TRANSFMM+'/WriteModel' :
  439. if self._debugOn :
  440. highlightUpcomingStep()
  441. fname = mtc.t['nodes'][ns['id']]['pathToModel']['value']
  442. formalism = mtc.t['nodes'][ns['id']]['pathToFormalism']['value']
  443. return {'fname':fname,'formalism':formalism,'rtype':'WriteModel'}
  444. elif fulltype == TC.TRANSFMM+'/Transformation' :
  445. if self._debugOn :
  446. highlightUpcomingStep()
  447. self._loadTransform(ns['fname'])
  448. return self._nextRule()
  449. elif fulltype == TC.TRANSFMM+'/Exhaust' :
  450. self._mtContexts.append( ExhaustContext(mtc.t,ns['id']) )
  451. return self._nextRule()
  452. elif fulltype == TC.TRANSFMM+'/ExhaustRandom' :
  453. self._mtContexts.append( ExhaustContext(mtc.t,ns['id'],self._randomGen) )
  454. return self._nextRule()
  455. else :
  456. ''' hergin :: motif-integration start '''
  457. if 'trafoResult' in ns:
  458. return ns;
  459. ''' hergin :: motif-integration end '''
  460. contents = utils.fread('/users/%s/%s'%(self.username,ns['fname']))
  461. if self._debugOn :
  462. highlightUpcomingStep()
  463. if TC.RULEMM in contents['metamodels'] :
  464. return {'fname':ns['fname'],
  465. 'cr':self._compiler.compileRule(contents,ns['fname'])}
  466. elif TC.TRANSFMM in contents['metamodels'] :
  467. self._transfData[ns['fname']] = contents
  468. self._loadTransform(ns['fname'])
  469. return self._nextRule()
  470. raise ValueError(\
  471. 'file does not contain valid rule or transformation '+\
  472. 'model :: '+ns['fname'])
  473. ''' Enactment do OpenModel magic
  474. '''
  475. def runOpenModelRule(self, fname="",formalism=""):
  476. unload = ""
  477. if not fname:
  478. return (None,TC.FAILED)
  479. else:
  480. if not formalism:
  481. self._aswPrintReq('auto loading model :: '+fname)
  482. try:
  483. with open(os.getcwd()+'/users/'+self.username+fname) as f:
  484. pass
  485. except IOError as e:
  486. self._aswPrintReq('failed opening a file :: '+fname)
  487. return (None,TC.FAILED)
  488. if not self.loadedModel:
  489. method = '_loadModelForTransform'
  490. if len(self._loadedMMs) == 0:
  491. self._loadedMMs = self._compiler._loadedMMs.copy()
  492. diff = self._compiler._loadedMMs.difference(self._loadedMMs)
  493. for u in diff:
  494. unload += u+'.defaultIcons.metamodel,'
  495. else:
  496. method = '_appendModelForTransform'
  497. resp = self._aswCommTools['httpReq'](
  498. 'PUT',
  499. '/GET/console',
  500. {'text':'CLIENT_BDAPI :: '+
  501. '{"func":"'+method+'",'+
  502. ' "args":'+
  503. '{"fname":"'+fname+'",'+
  504. '"unload":"'+unload+'",'+
  505. ' "callback-url":"/__mt/bdapiresp?wid='+
  506. self._aswCommTools['wid']+'"}}'})
  507. resp = self.bdapiQueue.get(block=True,timeout=5000)
  508. if not resp['resp'] == 'ok':
  509. return (None,TC.FAILED)
  510. else:
  511. if not self.loadedModel:
  512. self.loadedModel = True
  513. return (None,TC.SUCCEEDED)
  514. #Was used to open new window with loaded formalisms and pause the transform, now
  515. #this functionality is in WriteMOdel.
  516. else:
  517. pass
  518. # Keep for now....
  519. # self._aswPrintReq('pausing transform')
  520. # self._execmode = 'PAUSE'
  521. # self._aswPrintReq('opening new window for manual step:: '+fname)
  522. # try:
  523. # with open(os.getcwd()+'/users/'+self.username+fname) as f:
  524. # exists = 'true'
  525. # except IOError as e:
  526. # exists = 'false'
  527. # resp = self._aswCommTools['httpReq'](
  528. # 'PUT',
  529. # '/GET/console',
  530. # {'text':'CLIENT_BDAPI :: '+
  531. # '{"func":"_createEmptyModelInNewWindow",'+
  532. # ' "args":'+
  533. # '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
  534. # ' "callback-url":"/__mt/bdapiresp?wid='+
  535. # self._aswCommTools['wid']+'"}}'})
  536. # self.loadedModel = False
  537. # return (None,TC.SUCCEEDED)
  538. ''' Enactment do WriteModel magic
  539. '''
  540. def runWriteModelRule(self,fname="",formalism=""):
  541. if not fname:
  542. #this makes next openmodel call a load model instead of append.
  543. #basically use this trick to clean up canvas and have new model:
  544. #first place WriteModel blob without fname,
  545. #followed by OpenModel.
  546. self.loadedModel = False
  547. return (None,TC.SUCCEEDED)
  548. else:
  549. #No formalism specified, save model
  550. if not formalism:
  551. self._aswPrintReq('auto saving model :: '+fname)
  552. resp = self._aswCommTools['httpReq'](
  553. 'PUT',
  554. '/GET/console',
  555. {'text':'CLIENT_BDAPI :: '+
  556. '{"func":"_writeModelAfterTransform",'+
  557. ' "args":'+
  558. '{"fname":"'+fname+'",'+
  559. ' "callback-url":"/__mt/bdapiresp?wid='+
  560. self._aswCommTools['wid']+'"}}'})
  561. #Need to wait for the model to load.
  562. resp = self.bdapiQueue.get(block=True,timeout=5000)
  563. if resp['resp'] == 'ok':
  564. self.loadedModel = False
  565. return (None,TC.SUCCEEDED)
  566. else:
  567. (None,TC.FAILED)
  568. #Formalism specified, open new window, with loaded formalism and/or model.
  569. else:
  570. self._aswPrintReq('pausing transform')
  571. self._execmode = 'PAUSE'
  572. self._aswPrintReq('opening new window for manual step:: '+fname)
  573. try:
  574. with open(os.getcwd()+'/users/'+self.username+fname) as f:
  575. #open existing model
  576. exists = 'true'
  577. except IOError as e:
  578. #or save model with the fname provided
  579. exists = 'false'
  580. resp = self._aswCommTools['httpReq'](
  581. 'PUT',
  582. '/GET/console',
  583. {'text':'CLIENT_BDAPI :: '+
  584. '{"func":"_createEmptyModelInNewWindow",'+
  585. ' "args":'+
  586. '{"fname":"'+fname+'","exists":"'+exists+'",'+'"formalism":"'+formalism+'",'
  587. ' "callback-url":"/__mt/bdapiresp?wid='+
  588. self._aswCommTools['wid']+'"}}'})
  589. self.loadedModel = False
  590. return (None,TC.SUCCEEDED)
  591. '''
  592. synchronously save 1 changelog into self._changelogs '''
  593. def onchangelog(self,c) :
  594. self._lock.acquire()
  595. self._changelogs.append(c)
  596. self._lock.release()
  597. '''
  598. causes the execution of the current transformation(s) to pause (by
  599. preventing _play()'s next call to _step(), if any) '''
  600. def pause(self) :
  601. self._execmode = 'PAUSE'
  602. if not self.incUpdates:
  603. req = self.buildEditHttpReq(self.globalDeltas)
  604. self.globalDeltas = []
  605. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  606. if not utils.isHttpSuccessCode(resp['statusCode']) :
  607. self.stop()
  608. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  609. return
  610. self._handleChangelogs()
  611. '''
  612. play()
  613. calls _play() if it isn't already running (i.e., if we're already in PLAY
  614. mode, a timed call to _play() has already been placed) and if there isn't
  615. already another thread currently paused on _debugProgrammedBreak... if
  616. there is such a thread, it is unpaused (via _stopDebugProgrammedBreak())
  617. and the current thread terminates immediately
  618. _play()
  619. schedules an action for when feedback from the last step is received...
  620. the action is
  621. 1. return if we're no longer in PLAY mode
  622. 2. take one _step()
  623. 3. schedule a recursive call to _play() in TC.INTER_RULE_DELAY
  624. seconds '''
  625. def play(self) :
  626. self.start_time = clock()
  627. if self._execmode == 'STOPPED':
  628. self._randomGen = Random(0)
  629. if self._execmode != 'PLAY' :
  630. self._execmode = 'PLAY'
  631. if not self._stopDebugProgrammedBreak() :
  632. self._play()
  633. def _play(self) :
  634. if self.incUpdates:
  635. self._doWhenLastStepFeedbackReceived(
  636. lambda : self._execmode == 'PLAY' and \
  637. self._step() and \
  638. utils.setTimeout(TC.INTER_RULE_DELAY,self._play))
  639. else:
  640. self._doWhenLastStepFeedbackReceived(
  641. lambda : self._execmode == 'PLAY' and \
  642. self._step() and \
  643. self._play())
  644. '''
  645. associate the newly created debugging window described by 'clientInfo'
  646. to the ModelTransformationContext that requested its creation '''
  647. def registerDebugClient(self,clientInfo) :
  648. clientInfo = json.loads(clientInfo)
  649. for mtc in reversed(self._mtContexts) :
  650. if hasattr(mtc,'fname') and mtc.fname == clientInfo['fname'] :
  651. self._mtContexts2debugClients[id(mtc)] = clientInfo
  652. '''
  653. request a new atompm client via the client backdoor API... the new client
  654. will be loaded with the specified model and sufficient information to
  655. identify and communicate with the client will be POSTed to the callback
  656. url '''
  657. def _requestClientDebugWindow(self,fname) :
  658. return self._aswCommTools['httpReq'](
  659. 'PUT',
  660. '/GET/console',
  661. {'text':'CLIENT_BDAPI :: '+
  662. '{"func":"_loadModelInNewWindow",'+
  663. ' "args":'+
  664. '{"fname":"'+fname+'",'+
  665. ' "callback-url":"/__mt/debugClient?wid='+
  666. self._aswCommTools['wid']+'"}}'})
  667. '''
  668. request that the specified node from the specified atompm instance be
  669. highlighted '''
  670. def _requestNodeHighlight(self,host,aswid,asid,timeout=5000) :
  671. return utils.httpReq(
  672. 'PUT',
  673. host,
  674. '/GET/console?wid='+aswid,
  675. {'text':'CLIENT_BDAPI :: '+
  676. '{"func":"_highlight",'+
  677. ' "args":'+
  678. '{"asid":"'+asid+'",'+
  679. ' "timeout":'+str(timeout)+'}}'})
  680. ''' hergin :: motif-integration :: START :: put this to outside of step function '''
  681. ''' also added self '''
  682. '''
  683. go through a rule's deltas and (1) produce a batchEdit request, and
  684. (2) undo them
  685. NOTE: we undo the deltas for 2 reasons
  686. 1. so that changes become driven by asworker changelogs (like in
  687. csworkers)
  688. 2. so that we don't need to figure out which entries in received
  689. changelogs correspond to user-operations and which ones
  690. correspond to the effects of the constructed batchEdit
  691. NOTE: since we sometimes need to use the result from one request as the
  692. parameter of another (i.e., create a node, update *it*), we use
  693. the 'mknodes' map to remember which requests created which new
  694. nodes... this also allows to know which nodes already exist and
  695. which ones were created by the last rule
  696. NOTE: because creation of connector nodes and their linking to their
  697. ends is described in non-contiguous deltas, we use the 'mknodes'
  698. map to remember incomplete connector creation requests until the
  699. appropriate MKEDGE deltas are encountered '''
  700. def buildEditHttpReq(self,deltas) :
  701. reqs = []
  702. mknodes = {}
  703. neighborhood = None
  704. '''
  705. construct an atompmId given a node... the result will be
  706. a) a 'macro' to be replaced by the result of an earlier request
  707. within the batchEdit, if the node was created by the last rule
  708. b) the atompmId stored within the node, if the node already has
  709. a counter-part in atompm '''
  710. def atompmInstanceId(node) :
  711. if node[HC.GUID] in mknodes :
  712. return '$'+str(mknodes[node[HC.GUID]])+'$'
  713. else :
  714. return node['$atompmId']
  715. for d in deltas :
  716. if d['op'] == 'RMNODE' :
  717. reqs.append({\
  718. 'method':'DELETE',
  719. 'uri':d['attrs'][HC.FULLTYPE]+'/'+\
  720. d['attrs']['$atompmId']+'.instance'})
  721. elif d['op'] == 'MKNODE' :
  722. mknodes[d['guid']] = len(reqs)
  723. node = self._M.vs[self._M.get_node(d['guid'])]
  724. if neighborhood == None :
  725. neighborhood = map(
  726. lambda n: n[HC.FULLTYPE]+'/'+n['$atompmId']+'.instance',
  727. d['neighborhood'])
  728. if node[HC.CONNECTOR_TYPE] :
  729. reqs.append({\
  730. 'method':'POST',
  731. 'uri':node[HC.FULLTYPE]+'.type',
  732. 'reqData':
  733. {'src':None,
  734. 'dest':None,
  735. 'hitchhiker':
  736. {'segments':None,
  737. 'asSrc':None,
  738. 'asDest':None,
  739. 'neighborhood':neighborhood}}})
  740. else :
  741. reqs.append({\
  742. 'method':'POST',
  743. 'uri':node[HC.FULLTYPE]+'.type',
  744. 'reqData':{'hitchhiker':{'neighborhood':neighborhood}}})
  745. elif d['op'] == 'RMEDGE' :
  746. pass
  747. elif d['op'] == 'MKEDGE' :
  748. def isConnectorMKNODE(req):
  749. return 'dest' in req['reqData']
  750. if d['guid1'] in mknodes :
  751. req = reqs[ mknodes[d['guid1']] ]
  752. if isConnectorMKNODE(req) :
  753. node2 = self._M.vs[self._M.get_node(d['guid2'])]
  754. id = atompmInstanceId(node2)
  755. req['reqData']['dest'] = \
  756. req['reqData']['hitchhiker']['asDest'] = \
  757. node2[HC.FULLTYPE]+'/'+id+'.instance'
  758. if d['guid2'] in mknodes :
  759. req = reqs[ mknodes[d['guid2']] ]
  760. if isConnectorMKNODE(req) :
  761. node1 = self._M.vs[self._M.get_node(d['guid1'])]
  762. id = atompmInstanceId(node1)
  763. req['reqData']['src'] = \
  764. req['reqData']['hitchhiker']['asSrc'] = \
  765. node1[HC.FULLTYPE]+'/'+id+'.instance'
  766. elif d['op'] == 'CHATTR' :
  767. node = self._M.vs[self._M.get_node(d['guid'])]
  768. id = atompmInstanceId(node)
  769. reqs.append({\
  770. 'method':'PUT',
  771. 'uri':node[HC.FULLTYPE]+'/'+id+'.instance',
  772. 'reqData':{'changes':{d['attr']:d['new_val']}}})
  773. elif d['op'] == 'LOADMM' :
  774. reqs.append({\
  775. 'method':'PUT',
  776. 'uri':'/current.metamodels',
  777. 'reqData':
  778. {'mm':'/%s%s.metamodel'%(self.username,d['name'])}})
  779. if self.incUpdates:
  780. for d in reversed(deltas) :
  781. if d['op'] == 'RMNODE' :
  782. newNodeIndex = self._M.add_node(newNodeGuid=d['attrs'][HC.GUID])
  783. for attr,val in d['attrs'].iteritems() :
  784. self._M.vs[newNodeIndex][attr] = val
  785. elif d['op'] == 'MKNODE' :
  786. node = self._M.vs[self._M.get_node(d['guid'])]
  787. self._M.delete_nodes([node.index])
  788. elif d['op'] == 'RMEDGE' :
  789. node1 = self._M.vs[self._M.get_node(d['guid1'])]
  790. node2 = self._M.vs[self._M.get_node(d['guid2'])]
  791. self._M.add_edges((node1.index, node2.index))
  792. elif d['op'] == 'MKEDGE' :
  793. pass
  794. elif d['op'] == 'CHATTR' :
  795. node = self._M.vs[self._M.get_node(d['guid'])]
  796. node[d['attr']] = d['old_val']
  797. elif d['op'] == 'LOADMM' :
  798. pass
  799. ''' hergin :: motif-integration modify: succeeded rule name + time '''
  800. #reqs.append({\
  801. # 'method':'PUT',
  802. # 'uri':'/GET/console',
  803. # 'reqData':{'text':TC.RULE_SUCCESS_MSG+" ("+self._mtContexts[-1]._lastStep['alias']+":"+self._mtContexts[-1]._lastStep['name']+") in "+str(self._mtContexts[-1]._lastStep['time'])}})
  804. # 'reqData':{'text':TC.RULE_SUCCESS_MSG}})
  805. return reqs
  806. ''' hergin :: motif-integration :: END :: put this to outside of step function '''
  807. '''
  808. author: hergin
  809. sendAndApplyDelta()
  810. If debug mode:
  811. Sends and applies the inputted deltas to the model and UI instance.
  812. else:
  813. Collect deltas in a globalDeltas variable to handle later
  814. '''
  815. def sendAndApplyDelta(self,deltas):
  816. if self.incUpdates:
  817. req = self.buildEditHttpReq(deltas)
  818. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  819. if not utils.isHttpSuccessCode(resp['statusCode']) :
  820. self.stop()
  821. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  822. return
  823. self._handleChangelogs()
  824. else:
  825. self.globalDeltas.extend(deltas)
  826. self.packet.deltas = []
  827. '''
  828. step()
  829. wrapper around _step() that ensures that step requests from user are
  830. ignored when in PLAY mode, and that valid requests only go through (i.e.,
  831. actually call _step()) when feedback for the last step is received...
  832. moreover, as is the case for play(), if there is already another thread
  833. currently paused on _debugProgrammedBreak, it is unpaused (via
  834. _stopDebugProgrammedBreak()) and the current thread terminates immediately
  835. _step()
  836. fetch and run next rule
  837. 1. fetch next rule
  838. a) if next rule is not a {} (i.e., all available transformations have
  839. terminated and _nextRule() returned the resulting application code),
  840. report application code and stop()
  841. b) if an error is returned, report it and stop()
  842. c) otherwise,
  843. i. run rule (returns (deltas|error,applicationInfo))
  844. ii. set ran rule's application info
  845. iii. if rule was n/a, report this
  846. iii. if rule failed, report this and error
  847. iii. otherwise,
  848. j. construct a batchEdit operation based on the rule's effects,
  849. and unfo the said effects
  850. jj. send off the batchEdit
  851. jjj. stop() if the batchEdit fails
  852. NOTE: this function assumes that feedback for the last step has already
  853. been received '''
  854. def step(self) :
  855. if self._execmode == 'PLAY' :
  856. pass
  857. else :
  858. if self._execmode == 'STOPPED':
  859. self._randomGen = Random(0)
  860. self._execmode = 'STEP'
  861. if not self._stopDebugProgrammedBreak() :
  862. self._doWhenLastStepFeedbackReceived(self._step)
  863. def _step(self) :
  864. '''
  865. run the specified rule and return a tuple describing its execution '''
  866. def runRule(r) :
  867. ''' hergin :: motif-integration start '''
  868. #self._aswPrintReq('launching rule :: '+r['fname'])
  869. #ar = NDARule(r['cr']['lhs'],r['cr']['rhs'],rng=self._randomGen)
  870. mtc = self._mtContexts[-1]
  871. if mtc.metamodel == TC.MOTIFMM or mtc.metamodel == TC.TCOREMM:
  872. ar = r['rule']
  873. else:
  874. ar = NDARule(r['cr']['lhs'],r['cr']['rhs'],rng=self._randomGen,sendAndApplyDeltaFunc=self.sendAndApplyDelta)
  875. if mtc.nextInput == "packetIn":
  876. startTime=clock()
  877. self.packet = ar.packet_in(self.packet)
  878. mtc.setLastStepExecTime(clock()-startTime)
  879. elif mtc.nextInput == "nextIn":
  880. startTime=clock()
  881. self.packet = ar.next_in(self.packet)
  882. mtc.setLastStepExecTime(clock()-startTime)
  883. elif mtc.nextInput == "cancelIn":
  884. startTime=clock()
  885. self.packet = ar.cancelIn(self.packet)
  886. mtc.setLastStepExecTime(clock()-startTime)
  887. elif mtc.nextInput == "successIn":
  888. startTime=clock()
  889. self.packet = ar.success_in(self.packet)
  890. mtc.setLastStepExecTime(clock()-startTime)
  891. ''' hergin :: motif-integration end '''
  892. if ar.is_success :
  893. return (self.packet.deltas,TC.SUCCEEDED)
  894. elif not ar.is_success :
  895. ''' hergin :: motif-integration start (Some terminology fixed) '''
  896. if ar.exception :
  897. return (str(ar.exception),TC.EXCEPTION)
  898. else :
  899. return (None,TC.FAILED)
  900. ''' hergin :: motif-integration end '''
  901. try :
  902. nr = self._nextRule()
  903. except Exception :
  904. nr = {'$err':traceback.format_exc()}
  905. ''' hergin :: motif-integration start TRAFO RESULT: in case of a CRule_end, pop it from context and continue the rest '''
  906. while 'trafoResult' in nr:
  907. if len(self._mtContexts)==1:
  908. if not self.incUpdates and self.sendDeltas:
  909. ''' hergin TO BE MODIFIED - release mode will change '''
  910. req = self.buildEditHttpReq(self.globalDeltas)
  911. self.globalDeltas = []
  912. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  913. if not utils.isHttpSuccessCode(resp['statusCode']) :
  914. self.stop()
  915. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  916. return
  917. self._handleChangelogs()
  918. self._aswPrintReq(TC.TRANSFORMATION_DONE+nr['trafoResult']+" in "+str(self._mtContexts[-1].totalExecutionTime/1000.0)+" seconds, in total "+str((clock()-self.start_time)/1000.0))
  919. self.stop()
  920. return
  921. else:
  922. prevTrafo=self._mtContexts.pop()
  923. self._mtContexts[-1].setLastStepExecTime(prevTrafo.totalExecutionTime)
  924. self._mtContexts[-1].setLastStepApplicationInfo(nr['trafoResult'])
  925. try :
  926. nr = self._nextRule()
  927. except Exception :
  928. nr = {'$err':traceback.format_exc()}
  929. if nr.__class__ != {}.__class__ :
  930. self._aswPrintReq(TC.TRANSFORMATION_DONE + nr)
  931. self.stop()
  932. return
  933. elif '$err' in nr :
  934. self._aswPrintReq(TC.NO_NEXT_RULE+nr['$err'])
  935. self._stop()
  936. return
  937. else :
  938. if 'rtype' in nr:
  939. type = nr['rtype']
  940. if type == 'OpenModel':
  941. (res,ai) = self.runOpenModelRule(nr['fname'],nr['formalism'])
  942. elif type == 'WriteModel':
  943. (res,ai) = self.runWriteModelRule(nr['fname'],nr['formalism'])
  944. self._mtContexts[-1].setLastStepApplicationInfo(ai)
  945. self._mtContexts[-1].setLastStepFeedbackReceived()
  946. return True
  947. else:
  948. (res,ai) = runRule(nr)
  949. self._mtContexts[-1].setLastStepApplicationInfo(ai)
  950. if ai == TC.FAILED and self.incUpdates:
  951. ''' hergin :: motif-integration modify (which rule is not succeeded) '''
  952. self._aswPrintReq(TC.RULE_FAILURE_MSG+" ("+self._mtContexts[-1]._lastStep['alias']+":"+self._mtContexts[-1]._lastStep['name']+")")
  953. elif ai == TC.EXCEPTION and self.incUpdates:
  954. self._aswPrintReq(TC.RULE_EXCEPTION_MSG + res)
  955. else :
  956. ''' hergin :: motif-integration :: start '''
  957. mtc = self._mtContexts[-1]
  958. if self.incUpdates:
  959. if mtc.metamodel == TC.MOTIFMM or mtc.metamodel == TC.TCOREMM:
  960. self._aswPrintReq(TC.RULE_SUCCESS_MSG+" ("+self._mtContexts[-1]._lastStep['alias']+":"+self._mtContexts[-1]._lastStep['name']+")")
  961. else:
  962. self._aswPrintReq(TC.RULE_SUCCESS_MSG)
  963. self._mtContexts[-1].setLastStepFeedbackReceived()
  964. ''' hergin :: motif-integration :: end '''
  965. return True
  966. '''
  967. stop()
  968. sets self._execmode to STOPPING and schedules a call to _stop() for when
  969. feedback from the last step is received... being in STOPPING mode implies
  970. that the _play() loop, if any, will be broken, and that incoming user
  971. requests will be rejected... in the case where there is already another
  972. thread currently paused on _debugProgrammedBreak, it is unpaused (via
  973. _stopDebugProgrammedBreak()) which leads to the transformation being
  974. stopped from within that thread; the current thread terminates immediately
  975. _stop()
  976. 1. restores self._mtContexts to right after the user loaded his
  977. transformation(s) (i.e., to before we actually [partially] ran it)
  978. 2. resets self._mtContexts2debugClients
  979. 3. sends a console message to notify the user that the transformation has
  980. stopped
  981. 4. sets self._execmode to STOPPED (i.e., we're done STOPPING and can
  982. handle new requests) '''
  983. def isStopped(self) : return self._execmode == 'STOPPED'
  984. def isStopping(self) : return self._execmode == 'STOPPING'
  985. def stop(self) :
  986. if not self.incUpdates:
  987. req = self.buildEditHttpReq(self.globalDeltas)
  988. self.globalDeltas = []
  989. resp = self._aswCommTools['httpReq']('POST','/batchEdit',req)
  990. if not utils.isHttpSuccessCode(resp['statusCode']) :
  991. self.stop()
  992. self._aswPrintReq(TC.REMOTE_APPLICATION_FAILURE + resp['reason'])
  993. return
  994. self._handleChangelogs()
  995. self._execmode = 'STOPPING'
  996. #Used for enactment, prevents open being append.
  997. self.loadedModel = False
  998. if not self._stopDebugProgrammedBreak() :
  999. self._doWhenLastStepFeedbackReceived(self._stop)
  1000. def _stop(self) :
  1001. self._mtContexts = []
  1002. for fname in self._userTransfs :
  1003. self._loadTransform(fname)
  1004. self._mtContexts2debugClients = {}
  1005. self._aswPrintReq(TC.TRANSFORMATION_STOPPED)
  1006. self._execmode = 'STOPPED'
  1007. '''
  1008. enter a "programmed debugging pause" (i.e. unset the _debugProgrammedBreak
  1009. flag and block until another thread resets it '''
  1010. def _startDebugProgrammedBreak(self) :
  1011. self._debugProgrammedBreak.clear()
  1012. self._debugProgrammedBreak.wait()
  1013. '''
  1014. if the _debugProgrammedBreak flag is not set (this can only happen when
  1015. another thread unset it to enter a programmed debugging pause), set it and
  1016. return true, otherwise do nothing and return false
  1017. NOTE:: this function returns true iff there is a thread waiting on
  1018. _debugProgrammedBreak '''
  1019. def _stopDebugProgrammedBreak(self) :
  1020. if not self._debugProgrammedBreak.isSet() :
  1021. self._debugProgrammedBreak.set()
  1022. return True
  1023. return False
  1024. '''
  1025. toggle the _debugOn flag and report debugger status to atompm... when
  1026. disabling debugging, any current programmed debugging pauses are stopped
  1027. (and execution resumes normally)
  1028. while in DEBUG MODE,
  1029. . entering a ModelTransformationContext for the first time triggers
  1030. a) setting self._execmode to PAUSE
  1031. b) notifying the user of the "programmed debugging pause"
  1032. c) spawning of a new atompm instance loaded with the relevant
  1033. transformation model
  1034. d) entering programmed debugging pause that can be broken by
  1035. _stopDebugProgrammedBreak() which is triggered by
  1036. i. disabling transformation debugging; OR
  1037. ii. pressing "play", "step" or "stop"
  1038. . before running a rule, it or its enclosing ExhaustContext's associated
  1039. atompm node is highlighted '''
  1040. def toggleDebugMode(self) :
  1041. self._debugOn = not self._debugOn
  1042. if self._debugOn :
  1043. self._aswPrintReq(TC.DEBUGGING_ON)
  1044. else :
  1045. self._stopDebugProgrammedBreak()
  1046. self._aswPrintReq(TC.DEBUGGING_OFF)
  1047. '''
  1048. #Perform flat reachability analysis
  1049. #This is what creates the simple reachability graph.
  1050. #If the Petri nets are disjoint there will be several PNs "modules" that will be calculated in parallel.
  1051. def PNFull(self,fname='',dot=False):
  1052. #if not self.modules:
  1053. self._handleChangelogs()
  1054. self.modules = {}
  1055. self.modStart = time()
  1056. disjoint = self._M.decompose(mode=ig.WEAK)
  1057. #Let's compile PN graph out of generic himesis graph and create dictionary with unique IDs
  1058. barier = barrier(len(disjoint)+1) #barier, modules + synch thread
  1059. for mod in disjoint:
  1060. queue = Queue()
  1061. uid = uuid.uuid4()
  1062. key = str(uid)[:str(uid).index('-')]
  1063. module = PnModule(self.toPN(mod),queue, barier,True)
  1064. self.modules [ key] = module
  1065. module.start()
  1066. barier.wait()
  1067. print 'Time elapsed flat analysis %f'%(time() - self.modStart)
  1068. self.modules [ key].summary()
  1069. self.modStart = time()
  1070. #do the test of reachability here.
  1071. #self.reachabilityTestFlat()
  1072. print 'Find place elapsed flat analysis %f'%(time() - self.modStart)
  1073. for key,mod in self.modules.items():
  1074. if dot:
  1075. #can take too long to plot for large state space and/or hand dot binary
  1076. #here we output the reacability graph in svg
  1077. mod.graph(key=key,fname=fname)
  1078. else:
  1079. #here we output reachability graph in xml
  1080. mod.reachtoxml(fname,key)
  1081. #peroform modular analysis, use at your own risk. Toolbar does not enable this, experimental
  1082. def analyzePN(self):
  1083. #First lets break model into submodels
  1084. self.modStart = time()
  1085. #if not self.modules:
  1086. self.modules = {}
  1087. disjoint = self._M.decompose(mode=ig.WEAK)
  1088. barier = barrier(len(disjoint)+1) #barier, modules + synch thread
  1089. for mod in disjoint:
  1090. queue = Queue()
  1091. module = PnModule(self.toPN(mod),queue, barier)
  1092. self.modules [ module.getKey()] = module
  1093. module.start()
  1094. barier.wait() #wait till all threads stop doing first phase.
  1095. M0 = []
  1096. TFenabled = []
  1097. TF = {}
  1098. work = []
  1099. tofire = []
  1100. for key,mod in self.modules.items():
  1101. TF[key] = mod.TFS
  1102. M0.append('%s-%d'%(key,0))
  1103. ind=0
  1104. self.sg = synchgraph(len(self.modules),M0)
  1105. work.append(M0)
  1106. res={}
  1107. while work:
  1108. M = work.pop()
  1109. for key,mod in self.modules.items():
  1110. TFenabled.append( mod.getEnabledTFs())
  1111. tofire = reduce(set.intersection,map(set,TFenabled))
  1112. for key,mod in self.modules.items():
  1113. mod.que.put(tofire)
  1114. barier.wait() #lets wait for threads, they may building local graphs still
  1115. end = False
  1116. for key,mod in self.modules.items():
  1117. if not mod.result:
  1118. end = True
  1119. res[key] = mod.result #got results now produce new states for syngraph and archs.
  1120. if not end:
  1121. work.append(M)
  1122. else:
  1123. #self.sg.graph()
  1124. for key,mod in self.modules.items():
  1125. mod.SC()
  1126. mod.graph()
  1127. mod.que.put(['@exit'])
  1128. self.sg.markSCC(self.modules)
  1129. self.sg.graph()
  1130. print '---------------------------'
  1131. print 'Time elapsed modular analysis %f'%(time() - self.modStart)
  1132. for key,mod in self.modules.items():
  1133. mod.summary()
  1134. print '---------------------------'
  1135. print 'Synch graph:'
  1136. self.sg.summary()
  1137. print '---------------------------'
  1138. self.modStart = time()
  1139. self.reachabilityTestModular()
  1140. print 'Find place elapsed modular analysis %f'%(time() - self.modStart)
  1141. return
  1142. #main result
  1143. fr ={}
  1144. to = {}
  1145. for key,value in res.items():
  1146. for k,v in value.items():
  1147. if not k in fr:
  1148. fr[k] = []
  1149. fr[k].append([])
  1150. fr[k].append([])
  1151. fr[k][0].append(v[0])
  1152. fr[k][1].append(v[1])
  1153. from_prod=[]
  1154. to_prod = []
  1155. T=None
  1156. for key,value in fr.items():
  1157. T = key
  1158. #res = list(list(itertools.product(*value[0]))[0])
  1159. from_prod.append(list( list(itertools.product(*value[0]))))
  1160. to_prod.append(list (list(itertools.product (*value[1]))))
  1161. self.sg.addMarkingBatch(T,from_prod,to_prod)
  1162. #ENABLE
  1163. self.sg.graph(ind)
  1164. ind+=1;
  1165. #
  1166. # self.sg.addMarking(from_prod[i],to_prod[i],T)
  1167. res.clear()
  1168. TFenabled = []
  1169. #ENABLE
  1170. #self.sg.graph()
  1171. TM = {} #dict by tf transition inside slists of lists of start and end states
  1172. #compile into our PN representation
  1173. def toPN(self,mod):
  1174. oldtonew = {}
  1175. g = ig.Graph(0,directed=True)
  1176. for node in mod.vs:
  1177. if not node['$type'].find('Place') == -1 or not node['$type'].find('Transition') == -1:
  1178. g.add_vertices(1)
  1179. index = g.vcount()-1
  1180. oldtonew[node.index]=index
  1181. g.vs[index]['name'] = node['name']
  1182. if not node['$type'].find('Place') == -1:
  1183. g.vs[index]['type'] = 'P'
  1184. g.vs[index]['nbTokens'] = node['nbTokens']
  1185. elif not node['$type'].find('Transition') == -1:
  1186. g.vs[index]['type'] = 'T'
  1187. g.vs[index]['fusion'] = node['fusion']
  1188. elif not node['$type'].find('P2T') == -1:
  1189. node['type'] = 'P2T'
  1190. elif not node['$type'].find('T2P') == -1:
  1191. node['type'] = 'T2P'
  1192. #Let's connect
  1193. P2T = mod.vs.select(type_eq = 'P2T')
  1194. T2P = mod.vs.select(type_eq = 'T2P')
  1195. for p2t in P2T:
  1196. to = mod.successors(p2t.index)
  1197. fr = mod.predecessors(p2t.index)
  1198. try:
  1199. p2tid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1200. except:
  1201. g.add_edges([(oldtonew[fr[0]],oldtonew[to[0]])])
  1202. p2tid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1203. g.es[p2tid]['weight'] = p2t['weight']
  1204. else:
  1205. old = int(g.es[p2tid]['weight'])
  1206. g.es[p2tid]['weight'] = old + int(p2t['weight'])
  1207. for t2p in T2P:
  1208. to = mod.successors(t2p.index)
  1209. fr = mod.predecessors(t2p.index)
  1210. try:
  1211. t2pid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1212. except:
  1213. g.add_edges([(oldtonew[fr[0]],oldtonew[to[0]])])
  1214. t2pid = g.get_eid(oldtonew[fr[0]],oldtonew[to[0]])
  1215. g.es[t2pid]['weight'] = t2p['weight']
  1216. else:
  1217. old = int(g.es[t2pid]['weight'])
  1218. g.es[t2pid]['weight'] = old + int(t2p['weight'])
  1219. #dot graph of our petri net, not quite himesis.
  1220. #self.graphPN('pn', g)
  1221. return g
  1222. def isReachableFlat(self,state,key=None):
  1223. if not key:
  1224. if self.modules.values()[0].reachable(state):
  1225. return True
  1226. else:
  1227. return False
  1228. else:
  1229. if self.modules[key].reachable(state):
  1230. return True
  1231. else:
  1232. return False
  1233. def reachabilityTestModular(self):
  1234. aa = 'a'
  1235. bb = 'b'
  1236. moda = {}
  1237. modb = {}
  1238. moda['a2'] = 1
  1239. #moda['a4'] = 4
  1240. #moda['a3'] = 1
  1241. modb['b1'] = 1
  1242. modb['b3'] = 1
  1243. statea = []
  1244. stateb = []
  1245. statea.append(moda)
  1246. stateb.append(modb)
  1247. if not self.isReachableFlat(statea,aa):
  1248. print 'Modular state %s%s not reachable'%(statea,stateb)
  1249. return False
  1250. if not self.isReachableFlat(stateb,bb):
  1251. print 'Modular state %s%s not reachable'%(statea,stateb)
  1252. return False
  1253. scca = self.modules[aa].reachableMod(statea)
  1254. print 'A SCC of ancestors %s'%scca
  1255. sccb = self.modules[bb].reachableMod(stateb)
  1256. print 'B SCC of ancestors %s'%sccb
  1257. result = list( list(itertools.product(scca,sccb)))
  1258. for node in result:
  1259. v = []
  1260. a = 'a-%d'%node[0]
  1261. b = 'b-%d'%node[1]
  1262. v.append(a)
  1263. v.append(b)
  1264. id = self.sg.statePresentReach(v)
  1265. if not id == -1:
  1266. print 'Modular state %s%s reachable'%(statea,stateb)
  1267. return True
  1268. print 'Modular state %s%s not reachable'%(statea,stateb)
  1269. return False
  1270. def reachabilityTestFlat(self):
  1271. moda = {}
  1272. modb = {}
  1273. moda['a1'] = 1
  1274. moda['a4'] = 2
  1275. #moda['a3'] = 1
  1276. #modb['b3'] = 2
  1277. modb['b5'] = 3
  1278. state = []
  1279. state.append(moda)
  1280. state.append(modb)
  1281. if self.isReachableFlat(state):
  1282. print 'Flat state %s reachable'%state
  1283. else:
  1284. print 'Flat state %s not reachable'%state
  1285. def graph(self,key,g):
  1286. vattr=''
  1287. eattr = ''
  1288. nodes = {}
  1289. graph = pydot.Dot(key, graph_type='digraph')
  1290. dateTag = datetime.datetime.now().strftime("%Y-%b-%d_%H-%M-%S")
  1291. for v in g.vs:
  1292. vattr +='('
  1293. i = len(v['M'])
  1294. for key,value in v['M'].items():
  1295. vattr += '%s-%s'%(key,value)
  1296. if not i-1 == 0:
  1297. vattr+=','
  1298. i -=1
  1299. vattr +=')'
  1300. nodes[v.index] = pydot.Node(vattr)
  1301. graph.add_node(nodes[v.index])
  1302. vattr = ''
  1303. for e in g.es:
  1304. graph.add_edge(pydot.Edge(nodes[e.source],nodes[e.target],label=e['T']))
  1305. graph.write_svg('graphs/STATE%s%s.svg'%(key,dateTag))
  1306. #graph.write_png('graphs/STATE%s%s.png'%(key,dateTag))
  1307. #use this one to output your PN net in a svg graph to analyze structure
  1308. #and verify that compilation from Himesis to PN went fine, since we collaps
  1309. #repeated edges.
  1310. def graphPN(self,key,g):
  1311. vattr=''
  1312. eattr = ''
  1313. nodes = {}
  1314. graph = pydot.Dot(key, graph_type='digraph')
  1315. dateTag = datetime.datetime.now().strftime("%Y-%b-%d_%H-%M-%S")
  1316. for v in g.vs:
  1317. for at in v.attributes():
  1318. if not v[at] == None:
  1319. vattr += '%s->%s\n'%(at,v[at])
  1320. nodes[v.index] = pydot.Node(vattr)
  1321. graph.add_node(nodes[v.index])
  1322. vattr = ''
  1323. for e in g.es:
  1324. graph.add_edge(pydot.Edge(nodes[e.source],nodes[e.target],label=e['weight']))
  1325. graph.write_svg('graphs/PN%s%s.svg'%(key,dateTag))
  1326. '''