articleJens.tex 62 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540
  1. %\documentclass[10pt,a4,twocolumn]{article}
  2. \documentclass[10pt,letter,twocolumn]{article}
  3. %\input artSCSA4
  4. \input /home/hv/src/sty/artLetter
  5. \begin{document}
  6. \title{\textsf{The Cellular Automata Formalism\\
  7. and its Relationship to DEVS}}
  8. \author{Hans L.M. Vangheluwe$^{\dag,\ddag}$
  9. and Ghislain C. Vansteenkiste$^\ddag$\\ \\
  10. {\small
  11. \hfill
  12. \begin{minipage}{8cm}
  13. \begin{center}
  14. $^\dag$ School of Computer Science\\
  15. McGill University, Montr\'eal, Qu\'ebec, Canada\\
  16. e-mail: hv@cs.mcgill.ca
  17. \end{center}
  18. \end{minipage}
  19. \hfill
  20. \begin{minipage}{8cm}
  21. \begin{center}
  22. $^\ddag$ Department of Applied Mathematics,\\
  23. Biometrics, and Process Control (BIOMATH)\\
  24. Ghent University (RUG), Gent, Belgium
  25. \end{center}
  26. \end{minipage}
  27. \hfill
  28. }\\ \\
  29. Keywords: multi-formalism modelling, simulation,
  30. Cellular Automata, DEVS
  31. }
  32. \date{2000\thanks{{\underline{\textbf{Appeared as:}}}
  33. Hans~L. Vangheluwe and Ghislain~C. Vansteenkiste.
  34. The cellular automata formalism and its relationship to {DEVS}.
  35. In Rik Van~Landeghem, editor, {\em $14^{th}$ European Simulation
  36. Multi-conference (ESM)}, pages 800--810. Society for Computer
  37. Simulation International (SCS), May 2000. Gent, Belgium.}}
  38. \maketitle
  39. % \thispagestyle{empty}
  40. \section*{\centerline{Abstract}}
  41. %\parindent 0.cm
  42. %\parskip 0.1cm
  43. Cellular automata (CA) were originally conceived by Ulam and von
  44. Neumann in the 1940s to provide a formal framework for investigating
  45. the behaviour of complex, spatially distributed systems.
  46. Cellular Automata constitute a {\em dynamic, discrete space,
  47. discrete time} formalism where space is usually discretized in a
  48. grid of cells. Cellular automata are still used to study, from first
  49. principles, the behaviour of a plethora of systems.
  50. The Discrete EVent system Specification (DEVS) was first introduced by
  51. Zeigler in 1976 as a rigourous basis for discrete-event modelling.
  52. At the discrete-event level of abstraction, state variables are
  53. considered to change only at event-times. Furthermore, the number of
  54. events occurring in a bounded time-interval must be finite.
  55. The semantics of well known discrete-event formalisms such as
  56. Event Scheduling, Activity Scanning, and Process Interaction can
  57. be expressed in terms of DEVS, making it a common denominator for
  58. the representation of discrete-event models.
  59. Due to its roots in traditional sequential formalisms, DEVS offers
  60. little potential for parallel implementation. Furthermore,
  61. conflicts between simultaneous internal state transitions and
  62. external events are resolved by ignoring the internal transitions.
  63. In 1996, Chow introduced the parallel DEVS (P-DEVS) formalism which
  64. alleviates these drawbacks.
  65. Since the end of 1993, the European Commission's ESPRIT Basic
  66. Research Working Group 8467 (SiE), has identified key areas for
  67. future research in modelling and simulation. The most urgent need is
  68. to support multi-formalism modelling to correctly model complex
  69. systems with components described in diverse formalisms.
  70. To achieve this goal, formalisms need to be related.
  71. The Formalism Transformation Graph (FTG) shown in Figure~\ref{fig:ftl}
  72. \begin{figure*}
  73. \postscript{figs/ftl.bold.eps}{0.8}
  74. \caption{Formalism Transformations}
  75. \label{fig:ftl}
  76. \end{figure*}
  77. depicts behaviour-conserving transformations between formalisms.
  78. In this article, both the Cellular Automata and the DEVS and parallel
  79. DEVS formalisms are introduced. Then, a mapping between Cellular Automata
  80. and parallel DEVS is elaborated. This fills in the $CA \rightarrow DEVS$
  81. edge in the FTG. The mapping describes CA semantics in terms of parallel
  82. DEVS semantics. As such, it is a specification for automated transformation
  83. of CA models into parallel DEVS models, thus enabling efficient, parallel
  84. simulation as well as meaningful coupling with models in other
  85. formalisms.
  86. %\end{abstract}
  87. \section{The Cellular Automata Formalism} \label{sec:CA}
  88. Cellular automata (CA) were originally conceived by Ulam and von
  89. Neumann in the 1940s to provide a formal framework for investigating
  90. the behaviour of complex, spatially distributed systems
  91. \cite{vonNeumann:automata}.
  92. Cellular Automata constitute a {\em dynamic, discrete space,
  93. discrete time} formalism.
  94. Space in Cellular Automata is partitioned into discrete volume
  95. elements called {\em cells} and time progresses in discrete steps.
  96. Each cell can be in one of a finite number of states at any given time.
  97. The ``physics'' of this logical universe is {\em deterministic} and
  98. {\em local}.
  99. {\em Deterministic} means that once a local physics and an initial state of
  100. a Cellular Automaton has been chosen, its future evolution is
  101. {\em uniquely} determined. {\em Local} means that the state of a
  102. cell at time $t+1$ is determined only by its own state and the states of
  103. neighbouring cells at the previous time $t$.
  104. The {\em operational semantics} of a CA as prescribed in a
  105. {\em simulation procedure} and implemented in a CA solver dictates
  106. that values are updated {\em synchronously}: all new values are calculated
  107. simultaneously.
  108. The local physics is typically determined by an {\em explicit mapping}
  109. from all possible local states of a predefined neighbourhood template (\eg
  110. the cells bordering on a cell, including the cell itself), to the state
  111. of that cell after the next time-step.
  112. For example, for a 2-state ($\{0,1\}$), 1-D Cellular Automaton
  113. with a neighbourhood template that includes a cell and its
  114. immediate neighbours to the left and right, there will be
  115. $2^3$ possible neighbourhood states \{$000, \ldots, 111$\}.
  116. For each of these, we must prescribe whether
  117. a transition of the center cell state to a $1$ or to a $0$ will occur.
  118. For an 8-state nearest neighbour 2-D Cellular Automaton, there will be
  119. $8^5$ possible neighbourhood states, and a choice of 8 states to map to
  120. for each of those.
  121. The Cellular Automata formalism CA fits the
  122. general structure of deterministic, causal systems in classical
  123. systems theory \cite{Wymore:systems,Zeigler:theory_integr}:
  124. \[ CA \equiv \langle T, X, \Omega, S, \delta, Y, \lambda \rangle.\]
  125. The formalism is specified by elaboration of the elements of the
  126. $CA$ 7-tuple, as described below:
  127. \par
  128. The {\em discrete} time base $T$:
  129. \[
  130. T = \bb{N}\ \mbox{(or isomorphic with $\bb{N}$)}.
  131. \]
  132. The input set $X$:
  133. \[
  134. X = \{TIME\_TICK\}.
  135. \]
  136. The Cellular Automata formalism can
  137. easily be extended to non-trivial inputs (see further comments
  138. on extensions).
  139. The set of all input segments $\omega$:
  140. \[
  141. \Omega.
  142. \]
  143. An input segment $\omega$ may be restricted to a domain
  144. ($\subseteq T$) such as $]n,n+1]$:
  145. \begin{eqnarray*}
  146. \omega &: &T \rightarrow X,\\
  147. \omega_{]n,n+1]} &:& ]n,n+1] \rightarrow X.
  148. \end{eqnarray*}
  149. The state set $S$ is the product of all the {\em finite} state sets
  150. $V_i$ (also called {\em cell value sets}) of the individual cells.
  151. $C$ is the {\em cell index set}.
  152. \[
  153. S = \times_{i \in C} V_i.
  154. \]
  155. In the usual case of Cellular Automata realized on a $D$-dimensional
  156. grid, $C$ consists of $D$-tuples of indices from a coordinate set $I$:
  157. \[
  158. C = I^D.
  159. \]
  160. In the standard Cellular Automata formalism, the cell space is
  161. assumed {\em homogeneous}: all cell value sets are identical:
  162. \[
  163. \forall i \in C, V_i = V.
  164. \]
  165. The cell value function $v$ maps a cell index $i$ onto its value
  166. $v(i)$:
  167. \[
  168. v: C \rightarrow V.
  169. \]
  170. The total transition function $\delta$ is constructed
  171. from the transition functions $\delta_i$ for each of the cells.
  172. \[
  173. \begin{array}{lcccc}
  174. \delta & : & \Omega \times S & \rightarrow & S,\\
  175. & & (\omega_{]n,n+1]}, \times_{i \in C} v(i))
  176. & \rightarrow &
  177. \times_{i \in C} \delta_i(i).
  178. \end{array}
  179. \]
  180. The {\em uniformity} of Cellular Automata requires
  181. the $\delta_i$ to be based on a {\em single} local transition
  182. function $\delta_l$ for all cells $i$:
  183. \[
  184. \forall i \in C, \delta_i(i) = \delta_l(v(\sigma_{NT}(i))),
  185. \]
  186. where the various operators and quantities are explained below.
  187. A neighbourhood template $NT$, a vector of {\em finite}
  188. size $\xi$ containing {\em offsets} from an ``origin'' $0$,
  189. encodes the relative positions of neighbouring cells influencing
  190. the future state of a cell.
  191. Usually, the {\em nearest (adjacent)} neighbours (including the cell itself)
  192. are used.
  193. For one-dimensional Cellular Automata, a cell is connected to $r$ local
  194. neighbours (cells) on either side, where $r$ is a parameter referred to as
  195. the {\em radius} (thus, there are $2r+1$ cells in each neighbourhood).
  196. For two-dimensional Cellular Automata, two types of neighbourhoods
  197. are usually considered:
  198. \begin{itemize}
  199. \item The {\em von Neumann} neighbourhood of radius $r$
  200. \[ NT = \{(k,l) \in C |\ |k| + |l| \leq r\}. \]
  201. For $r=1$, this yields 5-cells, consisting of a cell along with
  202. its four immediate non-diagonal neighbours.
  203. \item The {\em Moore} neighbourhood of radius $r$
  204. \[ NT = \{(k,l) \in C |\ |k| \leq r \ \textsf{and}\ |l| \leq r\}. \]
  205. For $r=1$, this yields 9-cells, consisting of the cell
  206. along with its eight surrounding neighbours.
  207. \end{itemize}
  208. The {\em local} nature of the Cellular Automata models
  209. lies in the fact that {\em only} near neighbours influence the
  210. behaviour of a state, {\em not all} cells as the general form of
  211. $\delta$ allows. From a modelling point of view, physical arguments
  212. in disciplines ranging from physics to biology and artificial life
  213. support this assumption \cite{Wolfram:CA}. From a simulation point of view,
  214. efficient solvers for the Cellular Automata formalism can be constructed
  215. which take advantage of locality.
  216. Note how the $NT[j]$ are offsets between $C$ elements which are
  217. again elements of $C$ (with an appropriate $C$ such as $\bb{N}^D$).
  218. \[
  219. NT \in C^\xi.
  220. \]
  221. The function $\sigma_{NT}$ {\em shifts} the neighbourhood template
  222. $NT$ to be centered over $i$:
  223. \[
  224. \begin{array}{lcccl}
  225. \sigma_{NT} &:& C &\rightarrow& C^\xi,\\
  226. & & i &\rightarrow& \tau \\
  227. & & & &
  228. \mbox{where}\ \forall j \in \{1, \ldots, \xi\}:
  229. \tau[j] = i + NT[j].
  230. \end{array}
  231. \]
  232. For all possible combinations of size $\xi$ of cell values,
  233. the local transition function $\delta_l$ prescribes the transition
  234. to a new value:
  235. \[
  236. \begin{array}{lcccc}
  237. \delta_l &:& V^\xi & \rightarrow & V,\\
  238. & & [v_1, \ldots, v_\xi] & \rightarrow & v_{new}.
  239. \end{array}
  240. \]
  241. Thanks to the uniformity of Cellular Automata,
  242. the same $\delta_l$ is used for each element of the cell space.
  243. The number of possible combinations of cell values is
  244. $\#V^\xi$ and the number of distinct results of $\delta_l$ is $\#V$
  245. ($\#$ is the cardinality function).
  246. In the above, $\delta$ was constructed for an elementary time advance
  247. $n \rightarrow n+1$. The composition (or semigroup) requirement
  248. for deterministic system models:
  249. \[\forall t_x \in [t_i, t_f]; s_i \in S,\]
  250. \[\delta(\omega_{[t_i, t_f]}, s_i) =
  251. \delta(\omega_{[t_x, t_f]},
  252. \delta(\omega_{[t_i, t_x]}, s_i))
  253. \]
  254. is satisfied by {\em construction} (\ie the definition, combined with
  255. iteration over $n$).
  256. It is possible to ``observe'' the Cellular Automaton by projecting
  257. the total state $S$ onto an output set $Y$ by means of an
  258. output function $\lambda$
  259. \[\lambda: S \rightarrow Y,\]
  260. where $Y$ commonly has a structure similar to that of $S$.
  261. We shall now discuss the {\em structure} of the cell space.
  262. Usually, a $D$-dimensional {\em grid}, centered around the origin,
  263. is used. Most common choices are $D \in \{1,2,3\}$.
  264. In one dimension, a linear array of cells is the only possible geometry
  265. for the grid. In two dimensions, there are three choices:
  266. \begin{itemize}
  267. \item \textbf{triangular grid}: has a small number of nearest neighbours
  268. but is hard to visualize on square grid oriented computers.
  269. \item \textbf{square grid}: is easy to visualize, but (computationally)
  270. {\em anisotropic} (\ie a wave propagates faster along the primary
  271. axes than along the diagonals).
  272. \item \textbf{hexagonal grid}: has the lowest anisotropy of all grids but
  273. computer visualization is hard to implement.
  274. \end{itemize}
  275. Arbitrary cell space structures are possible (and corresponding cell
  276. shapes when visualizing), though not practical.
  277. Although the above mathematical formalism is perfectly valid, it can
  278. not be simulated {\em in practice}. For simulation to become possible, the
  279. cell space needs to be {\em finite}. In particular, the cell index set
  280. $C$ must be finite. $L$, the length of the grid becomes finite, leading
  281. to a cell space of $L^D$ cells.
  282. When a finite cell space is used, the application of the transition
  283. function at the edges poses a problem as values are needed outside the
  284. cell space. As shown in Figure~\ref{fig:boundary} for a 2-D cell space,
  285. \begin{figure}
  286. \postscript{figs/boundary.eps}{0.9}
  287. \caption{Boundary Conditions for Finite Cell Space}
  288. \label{fig:boundary}
  289. \end{figure}
  290. {\em boundary conditions} need to be specified in the form of
  291. cell values {\em outside} the cell space.
  292. Two common approaches --also used in the specification and
  293. solution of partial differential equations \cite{Farlow:PDE}-- are
  294. \begin{itemize}
  295. \item \textbf{explicit} boundary conditions. Extra cells
  296. outside the perimeter of the cell space hold boundary values
  297. (\ie $C$ and $v$ are extended).
  298. The number of extra border cells is determined by the
  299. size of (the perimeter of) the cell space as well as by the size
  300. (and shape) of
  301. the neighbourhood template.
  302. Boundary conditions may be time varying.
  303. \item \textbf{periodic} boundary conditions. The cell space is
  304. assumed to ``wrap around'': the cells at opposite ends
  305. of the grid act as each other's neighbours. In 1-D, this results
  306. in a (2-D) circle, in 2-D, in a (3-D) torus. By construction, the
  307. boundary conditions are time-varying.
  308. \end{itemize}
  309. \subsection{Implementation of a CA Solver}
  310. \subsubsection{The Solver Structure}
  311. In Figure~\ref{fig:simproc}
  312. \begin{figure}[h]
  313. \hrule
  314. \vspace{2mm}
  315. {\sf
  316. \begin{algorithmic}%[1]
  317. \STATE $\forall i \in C$: initialise $v(i)$
  318. \COMMENT {Initialize Cell Space}
  319. \IF {explicit boundary conditions}
  320. \STATE $\forall i \in$ boundary($C$): initialize $v(i)$
  321. \COMMENT {Boundary extension of $v()$}
  322. \ENDIF
  323. \IF {periodic boundary conditions}
  324. \STATE $\forall i \in C \cup$ boundary($C$):
  325. $v(i) \leftarrow v(i\ \textsf{mod}\ L)$
  326. \COMMENT {Modulo extension of $v()$; assume $0 \ldots L-1$ indexing}
  327. \ENDIF
  328. \FOR {$n := n_s$ to $n_f$}
  329. %\COMMENT {from start ``time'' $n_s$ to end ``time'' $n_f$}
  330. \STATE $\forall i \in C$: $v_{new}(i)
  331. \leftarrow
  332. \delta_l(v(\sigma_{NT}(i)))$
  333. \COMMENT {One-step state transition computation}
  334. \STATE $v \leftarrow v_{new}$
  335. \COMMENT {Switch value buffers}
  336. \STATE $n \leftarrow n+1$
  337. \COMMENT {Time Advancement}
  338. \ENDFOR
  339. \end{algorithmic}
  340. }
  341. \vspace{2mm}
  342. \hrule
  343. \caption{CA Simulation Procedure}
  344. \label{fig:simproc}
  345. \end{figure}
  346. the backbone algorithm of any cellular automata solver is given.
  347. As the definition requires synchronous calculation,
  348. whereby new values only depend on old values (and not on
  349. new values) of neighbouring cells, a second value function
  350. $v_{new}$ is needed to hold copies of the previous value.
  351. Note how a value function is usually efficiently implemented as
  352. a lookup in a value {\em array}.
  353. % \subsubsection{Improving Solver Performance}
  354. %
  355. % % Contributions by:
  356. % % Rudy Rucker <rucker@sjsumcs.SJSU.EDU>
  357. % % Richard Ottolini <stgprao@st.unocal.COM>
  358. % % J Dana Eckart <dana@rucs.faculty.cs.runet.edu>
  359. %
  360. % The performance of a Cellular Automaton solver is obviously
  361. % related to an appropriate choice of data structures and algorithms.
  362. % There are four main techniques \cite{Alife:FAQ} for
  363. % improving solver performance:
  364. % \begin{enumerate}
  365. %
  366. % \item Lookup table:
  367. %
  368. % generally, a cell takes on a value $v_{new}$ which is computed
  369. % on the basis of information in the cell's neighbourhood.
  370. % One may attempt to pack the neighbourhood value information bitwise
  371. % into an integer $neigh$ which can subsequently be used as an {\em index}
  372. % into a {\em lookup table}. The {\em lookup} table {\em encodes} the
  373. % local transition function $\delta_l$:
  374. % \[
  375. % v_{new}(i) = lookup[neigh].
  376. % \]
  377. % The {\em lookup} values are pre-computed from a $\delta_l$
  378. % specification before simulating the Cellular Automaton.
  379. % The {\em lookup} vector may also serve as an efficient means
  380. % of model storage.
  381. %
  382. % \item Neighbourhood shifting:
  383. %
  384. % in stepping through the cells, one repeatedly computes a cell's
  385. % $neigh$, then computes the $neigh$ of the next cell, and so on.
  386. % Because the neighbourhoods overlap, a lot of the
  387. % information in the next cell's $neigh$ is the same as in the old
  388. % cell's. With an appropriate representation, it is possible
  389. % to {\em left shift} out the old info and {\em OR} in the new info.
  390. %
  391. % \item Pointer swap:
  392. %
  393. % to run a CA, one needs two buffers, one for the current cell space
  394. % ($v$ at $t$), and one for the updated cell space ($v_{new}$
  395. % at $t+1$). After the update, the updated cell space should
  396. % {\em not} be {\em copied} into the current one
  397. % (though a na\"{\i}ve implementation of line 10 in
  398. % Figure~\ref{fig:simproc} would do so).
  399. % Assuming the value functions $v$ and $v_{new}$ are encoded
  400. % as arrays, swapping pointers is ${\cal O}(L^D)$ faster.
  401. %
  402. % \item Assembly language ``inner loop'':
  403. %
  404. % when processing a 2-D Cellular Automaton of \textsf{VGA}
  405. % size, the cell space has approximately $300,000$ cells.
  406. % This implies $neigh$ will be assembled and {\em lookup}
  407. % applied about $300,000$ times per time-step (one screen).
  408. % This means the {\em inner loop} must be as efficient as possible.
  409. % In particular, coding this in assembly language, reducing the
  410. % total number of clock cycles of the instructions in the inner
  411. % loop, can lead to a significant performance gain.
  412. %
  413. % \item Sparse \vs dense configurations:
  414. %
  415. % if the rule set is known to lead to {\em sparse} configurations,
  416. % as in the case of the Game of Life with a small initial pattern,
  417. % one can use {\em sparse matrix techniques}. That is, one can just compute
  418. % in the vicinity of occupied cells.
  419. % Generally, these techniques are not compiles as efficiently as a full
  420. % matrix method, because there is more indirect addressing and
  421. % branching. However, one can include both a sparse and full matrix
  422. % method in the same program, and switch when the {\em cross-over
  423. % density} is reached.
  424. %
  425. % \item Periodic boundary conditions:
  426. %
  427. % there are two basic methods for handling periodic boundary
  428. % conditions efficiently:
  429. % \begin{enumerate}
  430. % \item Coding for fast modulo arithmetic.
  431. %
  432. % The brute force method of doing modulo arithmetic on index variable
  433. % \verb|i| for a range of $0 \dots R-1$ in \verb|C|\ is
  434. % \begin{verbatim}
  435. % (i + offset) % R
  436. % \end{verbatim}
  437. % \vspace{-4mm}
  438. % On some architectures (\eg some Sun SPARC stations)
  439. % it is actually faster to do
  440. % \begin{verbatim}
  441. % register int tmp = i + offset;
  442. % (tmp >= R) ? tmp - R : tmp
  443. % \end{verbatim}
  444. % \vspace{-4mm}
  445. % if \verb|offset| is positive and similarly if it is negative. \\
  446. % If \verb|R| is a power of 2, better performance can be obtained by
  447. % means of
  448. % \begin{verbatim}
  449. % (i + offset) & R
  450. % \end{verbatim}
  451. % \vspace{-4mm}
  452. % when offset is positive and
  453. % \begin{verbatim}
  454. % (i + offset + R) & R
  455. % \end{verbatim}
  456. % \vspace{-4mm}
  457. % when offset is negative.
  458. % \item Using a larger array and copying the boundary cells
  459. % between iterations
  460. % \end{enumerate}
  461. % \end{enumerate}
  462. \subsection{Examples}
  463. The \href{http://www.santafe.edu/}{Santa Fe Institute} \cite{CAweb}
  464. hosts a plethora of Cellular Automata examples.
  465. The site is mainly devoted to the study of Artificial Life,
  466. one of the prominent uses of Cellular Automata.
  467. Artificial Life research tries to explain and reproduce, ab-initio,
  468. many physical and biological phenomena.\\
  469. \subsubsection{Simple 2-state, 1-D Cellular Automaton}
  470. Figure~\ref{fig:simplecell} demonstrates the simulation procedure
  471. \begin{figure}
  472. \postscript{figs/simplecell.eps}{1.0}
  473. \caption{2-state, 1-D Cellular Automaton of length 4}
  474. \label{fig:simplecell}
  475. \end{figure}
  476. for a simple 2-state ($\{0,1\}$), 1-D Cellular Automaton of length 4
  477. with periodic Boundary Conditions and initial condition $1011$.
  478. The local transition function (a 1-D version
  479. of the Game of Life) is \\
  480. \begin{tabular}{lll}
  481. $\delta_l$\ :\ & $000 \rightarrow 0$ & $100 \rightarrow 0$ \\
  482. & $001 \rightarrow 0$ & $101 \rightarrow 1$ \\
  483. & $010 \rightarrow 0$ & $110 \rightarrow 1$ \\
  484. & $011 \rightarrow 1$ & $111 \rightarrow 0$ \\
  485. \end{tabular}\\
  486. For a 1-D Cellular Automaton, ``animation'' of the cell space can
  487. be visualized by colour coding the cell values
  488. (here: $0$ by white and $1$ by black) and by mapping $t$
  489. onto the vertical axis which leads to the 2-D image in
  490. Figure~\ref{fig:simplecell}.\\
  491. \subsubsection{The Game of Life}
  492. Developed by Cambridge mathematician John Conway and
  493. popularized by Martin Gardner in his Mathematical Games
  494. column in Scientific American in 1970 \cite{Gardner:Life}, the
  495. game of Life is one of the simplest examples of a Cellular Automaton.
  496. Each cell is either alive ($1$) or dead ($0$). To determine its status
  497. for the next time step, each cell counts the number of neighbouring cells
  498. which are alive. If the cell is alive and has 2 or 3 alive
  499. neighbours, then the cell is alive during the next time step.
  500. With fewer alive neighbours, a living cell dies of loneliness,
  501. with more, it dies of overcrowding.
  502. Many interesting patterns and behaviours have been investigated
  503. over the years.
  504. An example of a high-level {\em cellang}
  505. \cite{Eckart:cellular} specification (\ie $\delta_l$ is written
  506. implicitly) of the Cellular Automata model
  507. is given in Figure~\ref{fig:life}.
  508. \begin{figure}
  509. \hrule
  510. \vspace{2mm}
  511. \begin{verbatim}
  512. # 2 dimensional game of life
  513. 2 dimensions of 0..1
  514. sum := [ 0, 1] + [ 1, 1] + [ 1, 0] +
  515. [-1, 1] + [-1, 0] + [-1, -1] +
  516. [ 0, -1] + [ 1, -1]
  517. cell := 1 when (sum = 2 & cell = 1) | sum = 3
  518. := 0 otherwise
  519. \end{verbatim}
  520. \vspace{-0.7cm}
  521. \hrule
  522. \caption{``cellang'' specification of Conway's game of Life}
  523. \label{fig:life}
  524. \end{figure}
  525. In combination with boundary conditions and an initial condition,
  526. this specification allows for model solving.
  527. \subsection{Formalism Extensions}
  528. The Cellular Automata formalism can easily be extended
  529. in different ways:
  530. \begin{enumerate}
  531. \item Addition of {\em inputs}. The formalism as presented above
  532. is {\em autonomous}: there are no (non-trivial)
  533. inputs into the system.
  534. An intuitively appealing way of adding inputs is to associate
  535. an input with each cell. The input set $X$ will thus have
  536. a structure similar to that of the state set $S$.
  537. \item The requirement of having the same cell value set for each
  538. cell can be relaxed to obtain {\em heterogeneous} Cellular
  539. Automata whereby not necessarily $\forall i \in C: V_i = V$.
  540. The homogeneous case can always be emulated by constructing
  541. $V$ as the union of all individual cell value sets:
  542. \[
  543. V = \bigcup_{i \in C} V_i.
  544. \]
  545. \item The requirement of having the same local transition function
  546. $\delta_l$ for each cell can be relaxed to obtain {\em non-uniform}
  547. Cellular Automata.
  548. % Obviously, in that case, it is no longer possible
  549. % to use the performance-enhancing techniques described above.
  550. \item As is demonstrated in Figure~\ref{fig:life}, a modelling
  551. language may allow for high-level representations.
  552. Agents %\cite{CAagents}
  553. are a typical example of such a high-level
  554. construct. Here, $\delta_l$ is no longer specified explicitly.
  555. \end{enumerate}
  556. The ``grid of cells'' discretized representation of space can also
  557. be used for continuous models.
  558. In particular, the local dynamics of cells can be described by
  559. System Dynamics models rather than finite state automata.
  560. Simulation will be carried out by transforming
  561. the model to a flat DAE model and subsequently (numerically)
  562. solving that continuous model, given initial conditions.
  563. % implementation is however synchronous
  564. \section{The DEVS Formalism} \label{sec:DEVS}
  565. The DEVS formalism was conceived by Zeigler
  566. \cite{Zeigler:DEVS,zeigler:theory} to provide a rigorous common basis for
  567. discrete-event modelling and simulation.
  568. For the class of formalisms denoted as {\em discrete-event}
  569. \cite{Nance:TimeState}, system models are described at an abstraction
  570. level where the time base is continuous ($\mathbb{R}$), but during a bounded
  571. time-span, only a {\em finite number} of relevant {\em events} occur.
  572. These events can cause the state of the system to change.
  573. In between events, the state of the system does {\em not} change.
  574. This is unlike {\em continuous} models in which the state of the
  575. system may change continuously over time.
  576. Just as Cellular Automata, the DEVS formalism fits the
  577. general structure of deterministic, causal systems in classical
  578. systems theory mentioned in section~\ref{sec:CA}.
  579. DEVS allows for the description of system behaviour at two levels.
  580. At the lowest level, an {\em atomic DEVS} describes the autonomous
  581. behaviour of a discrete-event system as a sequence of deterministic
  582. transitions between sequential states as well as how it reacts to
  583. external input (events) and how it generates output (events).
  584. At the higher level, a {\em coupled DEVS} describes a system as
  585. a {\em network} of coupled components. The components can be atomic
  586. DEVS models or coupled DEVS in their own right. The connections denote
  587. how components influence each other. In particular, output events
  588. of one component can become, via a network connection, input events
  589. of another component. It is shown in \cite{Zeigler:DEVS} how the
  590. DEVS formalism is {\em closed under coupling}: for each coupled DEVS,
  591. a {\em resultant} atomic DEVS can be constructed. As such, any DEVS
  592. model, be it atomic or coupled, can be replaced by an equivalent
  593. atomic DEVS. The construction procedure of a resultant atomic DEVS is
  594. also the basis for the implementation of an {\em abstract simulator}
  595. or solver capable of simulating any DEVS model.
  596. As a coupled DEVS may have coupled DEVS components,
  597. {\em hierarchical} modelling is supported.
  598. In the following, the different aspects of the DEVS formalism are
  599. explained in more detail.
  600. \subsection{The atomic DEVS Formalism}
  601. The atomic DEVS formalism is a structure describing the different
  602. aspects of the discrete-event behaviour of a system:
  603. \[
  604. atomic DEVS \equiv
  605. \langle
  606. S, ta, \delta_{int}, X, \delta_{ext}, Y, \lambda
  607. \rangle.
  608. \]
  609. The {\em time base} $T$ is continuous and is not mentioned
  610. explicitly
  611. \[
  612. T = \mathbb{R}.
  613. \]
  614. The {\em state set} $S$ is the set of admissible {\em sequential
  615. states}: the DEVS dynamics consists of an ordered sequence
  616. of states from $S$.
  617. Typically, $S$ will be a {\em structured} set (a product set)
  618. \[
  619. S = \times_{i=1}^n S_i.
  620. \]
  621. This formalizes multiple ($n$) {\em concurrent} parts of a system.
  622. It is noted how a structured state set is often synthesized from
  623. the state sets of concurrent components in a coupled DEVS model.
  624. The time the system {\em remains} in a sequential state
  625. before making a transition to the next sequential state
  626. is modelled by the {\em time advance function}
  627. \[
  628. ta: S \rightarrow \mathbb{R}^{+}_{\mbox{$0,+\infty$}}.
  629. \]
  630. As time in the real world always advances, the image of $ta$ must be
  631. non-negative numbers. $ta = 0$ allows for the representation of
  632. {\em instantaneous} transitions: no time elapses before transition
  633. to a new state. Obviously, this is an abstraction of reality which
  634. may lead to simulation {\em artifacts} such as infinite instantaneous
  635. loops which do not correspond to real physical behaviour.
  636. If the system is to stay in an end-state $s$ {\em forever},
  637. this is modelled by means of $ta(s) = +\infty$.
  638. The internal transition function
  639. \[
  640. \delta_{int}: S \rightarrow S
  641. \]
  642. models the transition from one state to the next sequential state.
  643. $\delta_{int}$ describes the behaviour of a Finite State Automaton;
  644. $ta$ adds the progression of time.
  645. It is possible to {\em observe} the system output. The output set $Y$
  646. denotes the set of admissible {\em outputs}.
  647. Typically, $Y$ will be a {\em structured} set (a product set)
  648. \[
  649. Y = \times_{i=1}^l Y_i.
  650. \]
  651. This formalizes multiple ($l$) output ports. Each port is identified
  652. by its unique index $i$.
  653. In a user-oriented modelling language, the indices would be derived
  654. from unique port {\em names}.
  655. The output function
  656. \[
  657. \lambda: S \rightarrow Y \cup \{\phi\}
  658. \]
  659. maps the internal state onto the output set. Output events are {\em
  660. only} generated by a DEVS model at the time of an {\em internal}
  661. transition. At that time, the state {\em before} the transition
  662. is used as input to $\lambda$. At all other times, the non-event
  663. $\phi$ is output.
  664. To describe the {\em total state} of the system at each point in
  665. time, the sequential state $s \in S$ is not sufficient. The
  666. {\em elapsed} time $e$ since the system made a transition to the
  667. current state $s$ needs also to be taken into account to construct the
  668. total state set
  669. \[
  670. Q = \{(s,e)| s \in S, 0 \leq e \leq ta(s)\}
  671. \]
  672. The elapsed time $e$ takes on values ranging from $0$ (transition
  673. just made) to $ta(s)$ (about to make transition to the next
  674. sequential state). Often, the {\em time left} $\sigma$ in a state
  675. is used:
  676. \[
  677. \sigma = ta(s) - e
  678. \]
  679. Up to now, only an {\em autonomous} system was described: the system
  680. receives no external inputs. Hence, the {\em input set} $X$ denoting
  681. all admissible input values is defined.
  682. Typically, $X$ will be a {\em structured} set (a product set)
  683. \[
  684. X = \times_{i=1}^m X_i
  685. \]
  686. This formalizes multiple ($m$) input ports. Each port is identified
  687. by its unique index $i$. As with the output set, port indices may
  688. denote {\em names}.
  689. The set $\Omega$ contains all admissible input segments $\omega$
  690. \[
  691. \omega: T \rightarrow X \cup \{\phi\}
  692. \]
  693. In discrete-event system models, an input segment generates an input
  694. {\em event} different from the {\em non-event} $\phi$ only at a finite
  695. number of instants in a bounded time-interval.
  696. These {\em external events}, inputs $x$ from $X$ cause the system to
  697. interrupt its autonomous behaviour and react in a way prescribed
  698. by the external transition function
  699. \[
  700. \delta_{ext}: Q \times X \rightarrow S
  701. \]
  702. The reaction of the system to an external event depends on the
  703. sequential state the system is in, the particular input {\em and}
  704. the elapsed time. Thus, $\delta_{ext}$ allows for the description of
  705. a large class of behaviours typically found in discrete-event
  706. models (including synchronization, preemption, suspension
  707. and re-activation).
  708. When an input event $x$ to an atomic model is not listed in the
  709. $\delta_{ext}$ specification, the event is {\em ignored}.
  710. In Figure~\ref{fig:DEVStraj},
  711. \begin{figure}
  712. \postscript{figs/devs.external.eps}{1}
  713. \caption{State Trajectory of a DEVS-specified Model}
  714. \label{fig:DEVStraj}
  715. \end{figure}
  716. an example state trajectory is given for an atomic DEVS model.
  717. In the figure, the system made an internal transition to
  718. state $s2$. In the absence of external input events, the
  719. system stays in state $s2$ for a duration $ta(s2)$. During
  720. this period, the elapsed time $e$ increases from $0$ to
  721. $ta(s2)$, with the total state $= (s2,e)$. When the elapsed time
  722. reaches $ta(s2)$, first an output is generated: $y2=\lambda(s2)$,
  723. then the system transitions instantaneously to the new state
  724. $s4=\delta_{int}(s2)$. In autonomous mode, the system would stay
  725. in state $s4$ for $ta(s4)$ and then transition (after generating
  726. output) to $s1=\delta_{int}(s4)$. Before $e$ reaches $ta(s4)$
  727. however, an external input event $x$ arrives. At that time, the
  728. system forgets about the scheduled internal transition and
  729. transitions to $s3=\delta_{ext}((s4,e),x)$. Note how an external
  730. transition does not give rise to an output. Once in state $s3$,
  731. the system continues in autonomous mode.
  732. \subsection{The coupled DEVS Formalism}
  733. The coupled DEVS formalism describes a discrete-event
  734. system in terms of a network of coupled components.
  735. \[
  736. coupled DEVS \equiv
  737. \langle
  738. X_{self}, Y_{self}, D, \{M_i\}, \{I_i\}, \{Z_{i,j}\}, select
  739. \rangle
  740. \]
  741. The component $self$ denotes the coupled model itself.
  742. $X_{self}$ is the (possibly structured) set of allowed external
  743. inputs to the coupled model.
  744. $Y_{self}$ is the (possibly structured) set of allowed (external)
  745. outputs of the coupled model. $D$ is a set of unique component
  746. references (names). The coupled model itself is referred to by
  747. means of $self$, a unique reference not in $D$.
  748. The set of {\em components} is
  749. \[
  750. \{M_i | i \in D\}.
  751. \]
  752. Each of the components must be an atomic DEVS
  753. \[
  754. M_i = \langle
  755. S_i, ta_i, \delta_{int,i},
  756. X_i, \delta_{ext,i}, Y_i, \lambda_i
  757. \rangle, \forall i \in D.
  758. \]
  759. The set of {\em influencees} of a component,
  760. the components influenced by $i \in D \cup \{self\}$,
  761. is $I_i$. The set of all influencees describes the coupling
  762. network structure
  763. \[
  764. \{I_i | i \in D \cup \{self\}\}.
  765. \]
  766. For modularity reasons, a component (including $self$) may not
  767. influence components outside its scope --the coupled model--,
  768. rather only other components of the coupled model,
  769. or the coupled model $self$:
  770. \[
  771. \forall i \in D \cup \{self\}: I_i \subseteq D \cup \{self\}.
  772. \]
  773. This is further restricted by the requirement that none of the
  774. components (including $self$) may influence themselves directly as this
  775. could cause an instantaneous dependency cycle (in case of a $0$
  776. time advance inside such a component) akin to an algebraic loop
  777. in continuous models:
  778. \[
  779. \forall i \in D \cup \{self\}: i \notin I_i.
  780. \]
  781. Note how one can always encode a self-loop ($i \in I_i$)
  782. in the internal transition function.
  783. To translate an output event of one component (such as a departure of
  784. a customer) to a corresponding input event (such as the arrival of
  785. a customer) in influencees of that component,
  786. {\em output-to-input translation functions} $Z_{i,j}$ are defined:
  787. \[
  788. \{Z_{i,j} | i \in D \cup \{self\}, j \in I_i\},
  789. \]
  790. \[
  791. \begin{array}{rcll}
  792. Z_{self,j} &: &X_{self} \rightarrow X_j &, \forall j \in D,\\
  793. Z_{i,self} &: &Y_i \rightarrow Y_{self} &, \forall i \in D,\\
  794. Z_{i,j} &: &Y_i \rightarrow X_j &, \forall i,j \in D.
  795. \end{array}
  796. \]
  797. Together, $I_i$ and $Z_{i,j}$ completely specify the coupling
  798. (structure and behaviour).
  799. As a result of coupling of concurrent components, multiple state
  800. transitions may occur at the same simulation time.
  801. This is an artifact of the discrete-event abstraction and may lead to
  802. behaviour not related to real-life phenomena.
  803. A logic-based foundation to study the {\em semantics} of these
  804. artifacts was introduced by Radiya and Sargent
  805. \cite{Radiya:logicFound}.
  806. In sequential simulation systems, such transition {\em collisions}
  807. are resolved by means of some form of {\em selection} of which of
  808. the components' transitions should be handled first. This corresponds
  809. to the introduction of priorities in some simulation languages.
  810. The coupled DEVS formalism explicitly represents a $select$ function
  811. for {\em tie-breaking} between simultaneous events:
  812. \[
  813. select: 2^D \rightarrow D
  814. \]
  815. $select$ chooses a unique component from any non-empty
  816. subset $E$ of $D$:
  817. \[
  818. select(E) \in E.
  819. \]
  820. The subset $E$ corresponds to the set of all components having
  821. a state transition simultaneously.
  822. % \subsection{Closure of DEVS under coupling} \label{sec:DEVSclosure}
  823. %
  824. % As mentioned at the start of section~\ref{sec:DEVS},
  825. % it is possible to construct a {\em resultant}
  826. % atomic DEVS model for each coupled DEVS. This
  827. % {\em closure under coupling} of atomic DEVS models makes {\em any}
  828. % coupled DEVS equivalent to an atomic DEVS. By induction,
  829. % any {\em hierarchically} coupled DEVS can thus be flattened to an
  830. % atomic DEVS. As a result, the requirement that each of the components
  831. % of a coupled DEVS be an atomic DEVS can be relaxed to be atomic {\em
  832. % or} coupled as the latter can always be replaced by an equivalent
  833. % atomic DEVS.
  834. %
  835. % The core of the closure procedure is the selection of the most
  836. % {\em imminent} (\ie soonest to occur) event from all the components'
  837. % scheduled events \cite{Zeigler:DEVS}. In case of simultaneous
  838. % events, the $select$ function is used. In the sequel,
  839. % the resultant construction is described.
  840. %
  841. % From the coupled DEVS
  842. % \[
  843. % \langle
  844. % X_{self}, Y_{self}, D, \{M_i\}, \{I_i\}, \{Z_{i,j}\}, select
  845. % \rangle,
  846. % \]
  847. % with all components $M_i$ atomic DEVS models
  848. % \[
  849. % M_i = \langle
  850. % S_i, ta_i, \delta_{int,i}, X_i, \delta_{ext,i},
  851. % Y_i, \lambda_i
  852. % \rangle, \forall i \in D
  853. % \]
  854. % the atomic DEVS
  855. % \[
  856. % \langle
  857. % S, ta, \delta_{int}, X, \delta_{ext}, Y, \lambda
  858. % \rangle
  859. % \]
  860. % is constructed.
  861. %
  862. % The resultant set of sequential states
  863. % is the product of the total state sets of all the components
  864. % \[
  865. % S = \times_{i \in D} Q_i,
  866. % \]
  867. % where
  868. % \[
  869. % Q_i = \{(s_i, e_i)| s \in S_i, 0 \leq e_i \leq ta_i(s_i)\},
  870. % \forall i \in D.
  871. % \]
  872. % The time advance function $ta$
  873. % \[
  874. % ta: S \rightarrow \mathbb{R}^{+}_{\mbox{$0,+\infty$}}
  875. % \]
  876. % is constructed by selecting the {\em most imminent} event time, of all
  877. % the components. This means, finding the smallest time {\em remaining}
  878. % until internal transition, of all the components
  879. % \[
  880. % ta(s) = min \{\sigma_i = ta_i(s_i) - e_i|i \in D\}.
  881. % \]
  882. % A number of {\em imminent} components may be scheduled for a
  883. % {\em simultaneous} internal transition. These components are
  884. % collected in a set
  885. % \[
  886. % IMM(s) = \{i \in D| \sigma_i=ta(s)\}.
  887. % \]
  888. % From $IMM$, a set of elements of $D$, {\em one} component $i^*$
  889. % is chosen by means of the $select$ {\em tie-breaking} function of
  890. % the coupled model
  891. % \[
  892. % \begin{array}{lcccc}
  893. % select &:& 2^D & \rightarrow & D\\
  894. % & & IMM(s) & \rightarrow & i^*
  895. % \end{array}
  896. % \]
  897. % %
  898. % % in parallel DEVS: no select, rather process a bag of events
  899. % %
  900. % Output of the selected component is generated {\em before}
  901. % it makes its internal transition. Note also how, as in a
  902. % Moore machine %\cite{FSA}
  903. % , input does not directly influence output.
  904. % In DEVS models, {\em only} an internal transition produces output.
  905. % An input can only influence/generate output via an internal
  906. % transition similar to the presence of
  907. % {\em memory} in the form of integrating elements in continuous models.
  908. % Allowing an external transition to produce output could lead to
  909. % infinite instantaneous loops. This is equivalent to algebraic loops
  910. % in continuous systems.
  911. % The output of the component is translated into coupled model output
  912. % by means of the coupling information
  913. % \[
  914. % \lambda(s) = Z_{i^*,self}(\lambda_{i^*}(s_{i^*})),
  915. % \textrm{if}\ self \in I_{i^*}.
  916. % \]
  917. % If the output of $i^*$ is not connected to the output of the coupled
  918. % model, the non-event $\phi$ can be generated as output of the coupled
  919. % model. As $\phi$ literally stands for no event, the output can also
  920. % be ignored without changing the meaning (but increasing performance
  921. % of simulator implementations).
  922. %
  923. % The internal transition function transforms the different
  924. % parts of the total state as follows:
  925. % \[
  926. % \delta_{int}(s) = ( \ldots, (s'_j,e'_j), \ldots),\ \textrm{where}
  927. % \]
  928. % \[
  929. % \begin{array}{lcll}
  930. % (s'_j,e'_j)
  931. % & = & (\delta_{int,j}(s_j),0)
  932. % & , \textrm{for}\ j=i^*,\\
  933. % & = & (\delta_{ext,j}(s_j,e_j+ta(s),Z_{i^*,j}(\lambda_{i^*}(s_{i^*}))),0)
  934. % & , \textrm{for}\ j \in I_{i^*},\\
  935. % & = & (s_j, e_j+ta(s))
  936. % & , \textrm{otherwise}.
  937. % \end{array}
  938. %% \begin{array}{lcl}
  939. %% (s'_j,e'_j)
  940. %% &=&(\delta_{int,j}(s_j),0)
  941. %% ,\textrm{for}\ j=i^*,\\
  942. %% &=&(\delta_{ext,j}(s_j,e_j+ta(s),Z_{i^*,j}(\lambda_{i^*}(s_{i^*}))),0)
  943. %% ,\textrm{for}\ j \in I_{i^*},\\
  944. %% &=&(s_j, e_j+ta(s))
  945. %% ,\textrm{otherwise}.
  946. %% \end{array}
  947. % \]
  948. % The selected imminent component $i^*$ makes an internal transition
  949. % to sequential state $\delta_{int,i^*}(s_{i^*})$.
  950. % Its elapsed time is reset to $0$.
  951. % All the influencees of $i^*$ change their state due to an external
  952. % transition prompted by an input which is the output-to-input
  953. % translated output of $i^*$, with an elapsed time adjusted
  954. % for the time advance $ta(s)$. The influencees' elapsed time is reset to $0$.
  955. % Note how $i^*$ is not allowed to be an influencee of $i^*$ in DEVS.
  956. % The state of all other components is not affected and their elapsed time
  957. % is merely adjusted for the time advance $ta(s)$.
  958. %
  959. % The external transition function transforms the different
  960. % parts of the total state as follows:
  961. % \[
  962. % \delta_{ext}(s,e,x) = ( \ldots, (s'_i,e'_i), \ldots),\ \textrm{where}
  963. % \]
  964. % \[
  965. % \begin{array}{lcll}
  966. % (s'_i,e'_i)
  967. % & = & (\delta_{ext,i}(s_i,e_j+e,Z_{self,i}(x)),0)
  968. % & , \textrm{for}\ i \in I_{self},\\
  969. % & = & (s_i, e_j+e)
  970. % & , \textrm{otherwise}.
  971. % \end{array}
  972. % \]
  973. % An incoming external event is routed, with an adjustment for elapsed time,
  974. % to each of the components connected to the coupled model input (after
  975. % the appropriate input-to-input translation).
  976. % For all those components, the elapsed time is reset to $0$.
  977. % All other components are not affected and only the elapsed time
  978. % is adjusted.
  979. %
  980. % Some limitations of DEVS are that
  981. % \begin{itemize}
  982. % \item a conflict due to simultaneous internal and external events
  983. % is resolved by ignoring the internal event. It should be
  984. % possible to explicitly specify behaviour in case of conflicts;
  985. % \item there is limited potential for parallel implementation;
  986. % \item the $select$ function is an artificial legacy of the semantics
  987. % of traditional sequential simulators based on an event list;
  988. % \item it is not possible to describe variable structure.
  989. % \end{itemize}
  990. % Some of these are compensated for in parallel DEVS (see further).
  991. \subsection{Implementation of a DEVS Solver}
  992. The algorithm in Figure~\ref{fig:DEVSsolver} is based on
  993. the closure under coupling construction
  994. % of section~\ref{sec:DEVSclosure}
  995. and can be used as a specification of a --possibly parallel--
  996. implementation of a DEVS solver or ``abstract simulator''
  997. \cite{Zeigler:DEVS, Kim:distrDEVS}.
  998. \begin{figure*}
  999. {\sf
  1000. \begin{center}
  1001. \begin{tabular}{lll}\toprule
  1002. \textbf{message} $m$&
  1003. \multicolumn{1}{c}{\textbf{simulator}} &
  1004. \multicolumn{1}{c}{\textbf{coordinator}}\\
  1005. \midrule
  1006. \addlinespace
  1007. $(*, from, t)$
  1008. & \multicolumn{2}{c}{simulator correct only if $t = t_N$}\\
  1009. \addlinespace
  1010. & $y \leftarrow \lambda(s)$
  1011. & \textbf{send} $(*, self, t)$ to $i^*$, where\\
  1012. & \textbf{if} $y \neq \phi:$
  1013. & \ \ \ \ $i^* = select(imm\_children)$\\
  1014. & \ \ \ \ \textbf{send} $(\lambda(s), self, t)$ to $parent$
  1015. & \ \ \ \ $imm\_children = \{i \in D| M_i.t_N = t\}$\\
  1016. % $t_N$ should be $= t^* = min \{m.t_N| m \in M\}
  1017. & $s \leftarrow \delta_{int}(s)$
  1018. & $active\_children \leftarrow active\_children \cup \{i^*\}$\\
  1019. & $t_L \leftarrow t$
  1020. & \\
  1021. & $t_N \leftarrow t_L + ta(s)$
  1022. & \\
  1023. & \textbf{send} $(done, self, t_N)$ to $parent$
  1024. & \\
  1025. \addlinespace
  1026. \midrule
  1027. \addlinespace
  1028. $(x, from, t)$
  1029. & \multicolumn{2}{c}{simulator correct only if $t_L \leq t \leq t_N$
  1030. (ignore $\delta_{int}$ to resolve a $t = t_N$ {\em conflict})}\\
  1031. \addlinespace
  1032. & $e \leftarrow t - t_L$
  1033. & $\forall i \in I_{self}:$\\
  1034. & $s \leftarrow \delta_{ext}(s,e,x)$
  1035. & \ \ \ \ \textbf{send} $(Z_{self,i}(x), self, t)$ to $i$\\
  1036. & $t_L \leftarrow t$
  1037. & \ \ \ \ $active\_children \leftarrow active\_children \cup \{i\}$\\
  1038. & $t_N \leftarrow t_L + ta(s)$
  1039. & \\
  1040. & \textbf{send} $(done, self, t_N)$ to $parent$
  1041. & \\
  1042. \addlinespace
  1043. \midrule
  1044. \addlinespace
  1045. $(y, from, t)$
  1046. & & $\forall i \in I_{from} \setminus \{self\}:$\\ % $from = i^*$
  1047. & & \ \ \ \ \textbf{send} $(Z_{from,i}(y), from, t)$ to $i$\\
  1048. & & \ \ \ \ $active\_children \leftarrow active\_children \cup \{i\}$\\
  1049. & & \textbf{if} $self \in I_{from}:$\\
  1050. & & \ \ \ \ \textbf{send} $(Z_{from, self}(y), self, t)$ to $parent$\\
  1051. \addlinespace
  1052. \midrule
  1053. \addlinespace
  1054. $(done, from, t)$
  1055. & & $active\_children \leftarrow active\_children \setminus \{from\}$\\
  1056. & & \textbf{if} $active\_children = \emptyset$:\\
  1057. & & \ \ \ \ $t_L \leftarrow t$\\
  1058. & & \ \ \ \ $t_N \leftarrow min \{M_i.t_N| i \in D\}$\\
  1059. % & & \ \ \ \ put children with minimum $t_N$ in $imm\_children$\\ ????
  1060. & & \ \ \ \ \textbf{send} $(done, self, t_N)$ to $parent$\\
  1061. \addlinespace
  1062. \bottomrule
  1063. \end{tabular}
  1064. \end{center}
  1065. }
  1066. \caption{DEVS Simulation Procedure}
  1067. \label{fig:DEVSsolver}
  1068. \end{figure*}
  1069. In an atomic DEVS solver, the last event time $t_L$ as well as the
  1070. local state $s$ are kept. In a coordinator, only the last event time $t_L$
  1071. is kept. The next-event-time $t_N$ is sent as output of either
  1072. solver. It is possible to also keep $t_N$ in the solvers. This
  1073. requires consistent (recursive) initialization of the $t_N$s.
  1074. If kept, the $t_N$ allows one to check whether the solvers are
  1075. appropriately synchronized.
  1076. The operation of an abstract simulator involves handling four
  1077. types of messages.
  1078. The $(x, from, t)$ message carries external input information.
  1079. The $(y, from, t)$ message carries external output information.
  1080. The $(*, from, t)$ and $(done, from, t_N)$ messages are used for
  1081. scheduling (synchronizing) the abstract simulators.
  1082. In these messages, $t$ is the simulation time
  1083. and $t_N$ is the next-event-time.
  1084. The $(*, from, t)$ message indicates an internal event $*$ is due.
  1085. When a coordinator receives a $(*, from, t)$ message, it selects
  1086. an imminent component $i^*$ by means of the tie-breaking function
  1087. $select$ specified for the coupled model and routes the message to
  1088. $i^*$. Selection is necessary as there may be more than one imminent
  1089. component (with minimum next remaining time).\\
  1090. When an atomic simulator
  1091. receives a $(*, from, t)$ message, it generates an output message
  1092. $(y, from, t)$ based on the old state $s$.
  1093. It then computes the new state by means of the internal
  1094. transition function. Note how in DEVS, output messages
  1095. are only produced while executing internal events.
  1096. When a simulator outputs a $(y, from, t)$ message, it is sent
  1097. to its parent coordinator.
  1098. The coordinator sends the output, after appropriate
  1099. output-to-input translation, to each of the influencees of $i^*$
  1100. (if any). If the coupled model itself is an influencee of $i^*$,
  1101. the output, after appropriate output-to-output translation, is sent
  1102. to the the coupled model's parent coordinator.
  1103. When a coordinator receives an $(x, from, t)$ message from its parent
  1104. coordinator, it routes the message, after appropriate input-to-input
  1105. translation, to each of the affected components. \\
  1106. When an atomic simulator receives an $(x, from, t)$ message, it executes
  1107. the external transition function of its associated atomic model.\\
  1108. After executing an $(x, from, t)$ or $(y, from, t)$ message, a
  1109. simulator sends a $(done, from, t_N)$ message to its parent
  1110. coordinator to prepare a new schedule. When a coordinator has
  1111. received $(done, from, t_N)$ messages from all its components, it sets
  1112. its next-event-time $t_N$ to the minimum $t_N$ of all its components and
  1113. sends a $(done, from, t_N)$ message to its parent coordinator.
  1114. This process is recursively applied until the top-level coordinator or
  1115. {\em root coordinator} receives a $(done, from, t_N)$ message.
  1116. As the simulation procedure is synchronous, it does not support
  1117. a-synchronously arriving (real-time) external input.
  1118. Rather, the environment or Experimental Frame should also be
  1119. modelled as a DEVS component.
  1120. To run a simulation experiment, the {\em initial conditions} $t_L$
  1121. and $s$ must first be set in {\em all} simulators of the hierarchy.
  1122. If $t_N$ is kept in the simulators, it must be recursively set too.
  1123. Once the initial conditions are set, the main loop described in
  1124. Figure~\ref{fig:mainDEVSloop} is executed.
  1125. \begin{figure}
  1126. {\sf
  1127. \begin{center}
  1128. \begin{tabular}{l}\toprule
  1129. \addlinespace
  1130. $t \leftarrow t_N$ of topmost coordinator\\
  1131. \textbf{repeat until} $t = t_{end}$\\
  1132. \ \ \ \ \textbf{send} $(*, meta, t)$ to topmost coupled model $top$\\
  1133. \ \ \ \ \textbf{wait} for $(done, top, t_N)$\\
  1134. \ \ \ \ $t \leftarrow t_N$\\
  1135. \addlinespace
  1136. \bottomrule
  1137. \end{tabular}
  1138. \end{center}
  1139. }
  1140. \caption{DEVS Simulation Procedure Main Loop}
  1141. \label{fig:mainDEVSloop}
  1142. \end{figure}
  1143. % The above algorithm can be simplified for sequential implementation.
  1144. % Figure~\ref{fig:seqDEVSsolver} is a specification for an
  1145. % object-oriented sequential implementation.
  1146. % Atomic and coupled DEVS simulators are implemented as {\em classes}.
  1147. % Subclassing (inheritance) is used to add model information.
  1148. % Instantiated {\em objects} correspond to each of the (sub-)model's
  1149. % and their abstract simulators.
  1150. % In a sequential implementation, sending a message to an object
  1151. % is implemented by calling the object's $receive()$ method.
  1152. % This method returns the response message
  1153. % after sequential processing of the message.
  1154. % Messages are of the general form $(type, from, t)$.
  1155. % \begin{figure*}
  1156. % {\sf
  1157. % \begin{center}
  1158. % \begin{tabular}{lll}\toprule
  1159. % \textbf{receive}$(m)$&
  1160. % \multicolumn{1}{c}{\textbf{simulator}} &
  1161. % \multicolumn{1}{c}{\textbf{coordinator}}\\
  1162. % \midrule
  1163. % \addlinespace
  1164. % $(*, from, t)$
  1165. % & \multicolumn{2}{c}{simulator correct only if $t = t_N$}\\
  1166. % \addlinespace
  1167. % & $y \leftarrow \lambda(s)$
  1168. % & $(y, i^*, t_N) \leftarrow i^*.receive((*, self, t))$, where\\
  1169. % & $s \leftarrow \delta_{int}(s)$
  1170. % & \ \ \ \ $i^* = select(imm\_children),$\\
  1171. % & $t_L \leftarrow t$
  1172. % & \ \ \ \ $imm\_children = \{i \in D| M_i.t_N = t\}$\\
  1173. % % $t_N$ should be $= t^* = min \{m.t_N| m \in M\}
  1174. % & $t_N \leftarrow t_L + ta(s)$
  1175. % & $t_L \leftarrow t$\\
  1176. % & \textbf{return} $(y, self, t_N)$
  1177. % & \textbf{if} $y = \phi$:\\
  1178. % & & \ \ \ \ \textbf{return} $(done, self, t_N)$ \\
  1179. % & & \textbf{else}\\
  1180. % & & \ \ \ \ $\forall i \in I_{i^*} \setminus \{self\}:$\\
  1181. % & & \ \ \ \ \ \ \ \ $(done, i, i.t_N) \leftarrow
  1182. % i.receive((Z_{from,i}(y), i^*, t))$\\
  1183. % & & \ \ \ \ $t_N \leftarrow min \{i.t_N| i \in D\}$\\
  1184. % & & \ \ \ \ \textbf{if} $self \in I_{from}:$ \\
  1185. % & & \ \ \ \ \ \ \ \ \textbf{return} $(Z_{i^*, self}(y), self, t_N)$\\
  1186. % & & \ \ \ \ \textbf{else}\\
  1187. % & & \ \ \ \ \ \ \ \ \textbf{return} $(done, self, t_N)$ \\
  1188. % \addlinespace
  1189. % \midrule
  1190. % \addlinespace
  1191. % $(x, from, t)$
  1192. % & \multicolumn{2}{c}{simulator correct only if $t_L \leq t \leq t_N$
  1193. % (ignore $\delta_{int}$ to resolve a $t = t_N$ {\em conflict})}\\
  1194. % \addlinespace
  1195. % & $e \leftarrow t - t_L$
  1196. % & $\forall i \in I_{self}:$\\
  1197. % & $s \leftarrow \delta_{ext}(s,e,x)$
  1198. % & \ \ \ \ $(done, n, t_N)
  1199. % \leftarrow i.receive((Z_{self,i}(x), self, t))$\\
  1200. % & $t_L \leftarrow t$
  1201. % & $t_L \leftarrow t$\\
  1202. % & $t_N \leftarrow t_L + ta(s)$
  1203. % & $t_N \leftarrow min \{i.t_N| i \in D\}$\\
  1204. % & \textbf{return} $(done, self, t_N)$
  1205. % & \textbf{return} $(done, self, t_N)$\\
  1206. % \addlinespace
  1207. % \bottomrule
  1208. % \end{tabular}
  1209. % \end{center}
  1210. % }
  1211. % \caption{Sequential DEVS Simulation Procedure}
  1212. % \label{fig:seqDEVSsolver}
  1213. % \end{figure*}
  1214. % Simulation is achieved by repeatedly sending $(*, meta, t_N)$ messages
  1215. % to the root coordinator as in Figure~\ref{fig:mainDEVSloop}.
  1216. % % HV: add detailed description
  1217. % % HV: implement and test in Python
  1218. \section{The parallel DEVS Formalism}
  1219. As DEVS is a formalization and generalization of sequential
  1220. discrete-event simulator semantics, it does not allow for drastic
  1221. parallelization. In particular, simultaneously occurring internal
  1222. transitions are serialized by means of a tie-breaking $select$
  1223. function. Also, in case of {\em collisions} between simultaneously
  1224. occurring internal transitions and external input, DEVS
  1225. ignores the internal transition and applies the external transition
  1226. function. Chow \cite{Chow:parallelDEVS} proposed the parallel DEVS
  1227. (P-DEVS) formalism which alleviates some of the DEVS drawbacks.
  1228. In an atomic P-DEVS
  1229. \[
  1230. atomic\ P-DEVS \equiv
  1231. \langle
  1232. S, ta, \delta_{int}, X, \delta_{ext}, \delta_{conf}, Y, \lambda
  1233. \rangle,
  1234. \]
  1235. the model can explicitly define collision behaviour by using
  1236. a so-called {\em confluent} transition function $\delta_{conf}$.
  1237. Only $\delta_{ext}$, $\delta_{conf}$, and $\lambda$ are different
  1238. from DEVS.
  1239. The external transition function
  1240. \[
  1241. \delta_{ext}: Q \times X^b \rightarrow S
  1242. \]
  1243. now accepts a {\em bag} of simultaneous inputs, elements of
  1244. the input set $X$, rather than a single input.
  1245. The confluent transition function
  1246. \[
  1247. \delta_{conf}: S \times X^b \rightarrow S
  1248. \]
  1249. describes the state transition when a scheduled internal state
  1250. transition and simultaneous external inputs collide.
  1251. An atomic P-DEVS model can generate multiple simultaneous outputs
  1252. \[
  1253. \lambda: S \rightarrow Y^b
  1254. \]
  1255. in the form of a bag of output values from the output set $Y$.
  1256. As conflicts are handled explicitly in the confluent transition
  1257. function, the $select$ tie-breaking function can be eliminated
  1258. from the coupled DEVS structure:
  1259. \[
  1260. coupled\ P-DEVS \equiv
  1261. \langle
  1262. X_{self}, Y_{self}, D, \{M_i\}, \{I_i\}, \{Z_{i,j}\}\}
  1263. \rangle.
  1264. \]
  1265. In this structure, all components $M_i$ are atomic P-DEVS
  1266. \[
  1267. M_i =
  1268. \langle
  1269. S_i, ta_i, \delta_{int,i}, X_i, \delta_{ext,i},
  1270. \delta_{conf,i}, Y_i, \lambda_i
  1271. \rangle, \forall i \in D.
  1272. \]
  1273. For the proof of closure under coupling of P-DEVS as well as
  1274. for the description of an efficient parallel abstract simulator,
  1275. see \cite{Chow:parallelDEVS}.
  1276. \section{Formalism Transformation}
  1277. Cellular Automata are a simple form of discrete-event models,
  1278. of DEVS in particular.
  1279. Describing a Cellular Automaton as a simple atomic DEVS is thus
  1280. straightforward. Thanks to the general nature of DEVS,
  1281. all extensions of the Cellular Automata formalism mentioned before
  1282. can also be mapped onto DEVS.
  1283. It is more rewarding however to map a CA onto a coupled model,
  1284. whereby every CA cell's dynamics is represented as an
  1285. atomic model and the dependency between one cell and
  1286. its neighbourhood is represented by the coupled model's
  1287. coupling information.
  1288. In what follows, a CA is mapped onto a coupled P-DEVS as this
  1289. mapping is more elegant than that onto the original DEVS.
  1290. In addition, the resultant parallel DEVS holds more
  1291. potential for parallel implementation.
  1292. The coupled parallel DEVS representation presented
  1293. here, corresponds to the Cellular Automaton specification
  1294. in section~\ref{sec:CA}:
  1295. \[
  1296. P-DEVS-CA = \langle
  1297. X_{self}, Y_{self}, D, \{M_i\}, \{I_i\}, \{Z_{i,j}\}
  1298. \rangle.
  1299. \]
  1300. As the Cellular Automaton in section~\ref{sec:CA} did not incorporate
  1301. external input,
  1302. \[
  1303. X_{self} = \emptyset.
  1304. \]
  1305. Typical output of the Cellular Automaton consists of all the
  1306. cell values
  1307. \[
  1308. Y_{self} = \times_{i \in D} V.
  1309. \]
  1310. Components in the coupled parallel DEVS model correspond to
  1311. CA cells. Indexing uses the CA index set:
  1312. \[
  1313. D = C.
  1314. \]
  1315. The $\{M_i\}$ are atomic P-DEVS components, described in more detail later.
  1316. The set of influencees of a component/cell $i$ is constructed
  1317. by means of the CA's neighbourhood template $NT$.
  1318. The $NT$ contains influencer rather than influencee information.
  1319. Thus, the offset information needs to be mirrored with respect to the origin
  1320. (\ie its inverse with respect to addition of offsets needs to
  1321. be calculated) to obtain the influencees of component $i$:
  1322. \[
  1323. I_i = \{j \in C |j=i-offset,\ offset \in
  1324. \{NT[k] |k = 1, \ldots, \xi\}
  1325. \setminus \{0\}
  1326. \}
  1327. \]
  1328. As DEVS does not allow $i \in I_i$, the offset $0$, if
  1329. present in $NT$, is not included.
  1330. A state transition of a CA cell usually does depend on the
  1331. old state of the cell itself. This is encoded in the atomic
  1332. P-DEVS's (confluent) transition function rather than by means of
  1333. an external self-coupling.
  1334. Note how $j=i-offset \in C$ may need to be relaxed depending
  1335. on the particular boundary conditions. Cells outside $C$ may need to
  1336. be considered (and initialized).
  1337. As there is no external input, the input-to-input translation
  1338. $Z_{self,i}$ is not needed.
  1339. The $i,j$ output-to-input translation converts the output of a cell
  1340. (\ie the cell's value) into a tuple containing that value and
  1341. the offset between the two cells:
  1342. \[
  1343. \begin{array}{lcccc}
  1344. Z_{i,j} &:& Y_i &\rightarrow& X_j\\
  1345. & & s_i &\rightarrow& (s_i, i-j).
  1346. \end{array}
  1347. \]
  1348. The output-to-output translation translates the output of each
  1349. cell into a tuple with the output in the position corresponding
  1350. to the cell's index:
  1351. \[
  1352. \begin{array}{lcccc}
  1353. Z_{i,self} &:& Y_i &\rightarrow& Y_{self}\\
  1354. & & s_i &\rightarrow& (\ldots, s_i, \ldots).
  1355. \end{array}
  1356. \]
  1357. Individual cells are mapped onto atomic P-DEVS components:
  1358. \[
  1359. M_i = \langle
  1360. S_i, ta_i, \delta_{int,i},
  1361. X_i, \delta_{ext,i}, \delta_{confl,i}, Y_i, \lambda_i
  1362. \rangle, \forall i \in D.
  1363. \]
  1364. Values of the components/cells are those from the cell value set
  1365. \[
  1366. S_i = V.
  1367. \]
  1368. The time advance is set to the same arbitrary non-zero value $\Delta$
  1369. for all cells to allow for synchronous operation:
  1370. \[
  1371. ta_i(s_i) = \Delta.
  1372. \]
  1373. The internal transition function does not modify the component's
  1374. state
  1375. \[
  1376. \delta_{int}(s_i) = s_i.
  1377. \]
  1378. The output function sends out a set containing only the cell value:
  1379. \[
  1380. \lambda(s_i) = \{s_i\},\ \textrm{where}\ s_i \in Y_i = V.
  1381. \]
  1382. The external transition function is not used as there is
  1383. no global external input into the CA. Due to the synchronous
  1384. operation, whereby internal transitions and external inputs
  1385. will always collide, {\em only} the confluent transition function
  1386. is used.
  1387. \[
  1388. \delta_{conf,i}(s_i, e_i, x_i^b) = \delta_l(v_i),
  1389. \]
  1390. where $v_i$ is a vector with the same dimensions as the
  1391. neighbourhood template $NT$ with values
  1392. \[
  1393. % \begin{array}{rcll}
  1394. % v_i[\eta] &=& s_i,
  1395. % &\textrm{for the}\ \eta\ \textrm{for which}\ NT[\eta]=0; \\
  1396. % v_i[proj_{offset}(x)] &=& proj_{value}(x),
  1397. % &\forall x \in x_i^b.
  1398. % \end{array}
  1399. \begin{array}{rcl}
  1400. v_i[\eta] &=& s_i,\
  1401. \textrm{for the}\ \eta\ \textrm{for which}\ NT[\eta]=0; \\
  1402. v_i[proj_{offset}(x)] &=& proj_{value}(x),\
  1403. \forall x \in x_i^b.
  1404. \end{array}
  1405. \]
  1406. Messages communicate values of neighbouring cells, as well as
  1407. offsets from the current cell:
  1408. \[
  1409. X_i^b = \{(v,offset)| v \in V, offset \in C\}.
  1410. \]
  1411. As an example of the above mapping, Murato coded the Game of Life
  1412. CA in his \verb|a-DEVS-0.2| \cite{Nutaro:aDEVS} parallel DEVS
  1413. implementation.
  1414. \section{Conclusions}
  1415. Since the end of 1993, the European Commission's ESPRIT Basic
  1416. Research Working Group 8467 (SiE) \cite{SiE:simulation},
  1417. has identified key areas for future research in modelling and simulation.
  1418. The most urgent need was deemed for multi-formalism modelling, to
  1419. correctly model complex systems with components described in
  1420. diverse formalisms.
  1421. To correctly model such systems, formalisms need to be related.
  1422. The Formalism Transformation Graph (FTG) shown in Figure~\ref{fig:ftl}
  1423. depicts meaningful mappings between formalisms.
  1424. In this article, a mapping between Cellular Automata and parallel
  1425. DEVS was elaborated. This fills in the $CA \rightarrow DEVS$ edge
  1426. in the FTG. The mapping describes CA semantics in terms of parallel
  1427. DEVS semantics and is a basis for automated transformation of CA
  1428. models into parallel DEVS models, thus enabling efficient, parallel
  1429. simulation as well as meaningful coupling with models in other
  1430. formalisms.
  1431. % \vfill
  1432. % \pagebreak
  1433. % From here, should be 2 column wide
  1434. % \begin{minipage}{\textwidth}
  1435. \bibliography{bib/abbrev,bib/simulation,bib/math,bib/hv.journals}
  1436. % \bibliographystyle{alpha}
  1437. \bibliographystyle{plain}
  1438. % \end{minipage}
  1439. \end{document}