summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAndrew Butterfield <Andrew.Butterfield@scss.tcd.ie>2023-01-13 15:37:36 +0000
committerSebastian Huber <sebastian.huber@embedded-brains.de>2023-01-18 12:42:32 +0100
commit5d08ea3aa8cbe12683f126d32805bba82e4340c5 (patch)
tree4ce60ec7d92bf3c4e944a57639f137fcd2d367ce
parentforked from https://github.com/quic/comment-filter/commits/master (diff)
downloadrtems-central-5d08ea3aa8cbe12683f126d32805bba82e4340c5.tar.bz2
forked from https://github.com/johnyf/promela,
commit 32d14184a50e920a92201058e4f601329be8c9c7
-rw-r--r--formal/promela/src/src/modules/promela_yacc/.gitignore20
-rw-r--r--formal/promela/src/src/modules/promela_yacc/LICENSE31
-rw-r--r--formal/promela/src/src/modules/promela_yacc/MANIFEST.in4
-rw-r--r--formal/promela/src/src/modules/promela_yacc/README.md27
-rw-r--r--formal/promela/src/src/modules/promela_yacc/doc.md100
-rw-r--r--formal/promela/src/src/modules/promela_yacc/promela/__init__.py6
-rw-r--r--formal/promela/src/src/modules/promela_yacc/promela/ast.py1035
-rw-r--r--formal/promela/src/src/modules/promela_yacc/promela/lex.py211
-rw-r--r--formal/promela/src/src/modules/promela_yacc/promela/yacc.py955
-rw-r--r--formal/promela/src/src/modules/promela_yacc/setup.py65
-rw-r--r--formal/promela/src/src/modules/promela_yacc/tests/yacc_test.py759
11 files changed, 3213 insertions, 0 deletions
diff --git a/formal/promela/src/src/modules/promela_yacc/.gitignore b/formal/promela/src/src/modules/promela_yacc/.gitignore
new file mode 100644
index 00000000..f4cd3f85
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/.gitignore
@@ -0,0 +1,20 @@
+build/*
+dist/*
+promela/_version.py
+*parsetab.py
+.coverage
+.DS_Store
+Icon?
+
+*.*~
+__pycache__
+*.pyc
+*.swp
+*.pdf
+*.PDF
+*.txt
+*.log
+*.egg-info
+*.html
+*.css
+
diff --git a/formal/promela/src/src/modules/promela_yacc/LICENSE b/formal/promela/src/src/modules/promela_yacc/LICENSE
new file mode 100644
index 00000000..bebe3694
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/LICENSE
@@ -0,0 +1,31 @@
+Copyright (c) 2014-2018 by California Institute of Technology
+Copyright (c) 2014-2018 by Ioannis Filippidis
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the California Institute of Technology nor
+ the names of its contributors may be used to endorse or promote
+ products derived from this software without specific prior
+ written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH OR THE
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/formal/promela/src/src/modules/promela_yacc/MANIFEST.in b/formal/promela/src/src/modules/promela_yacc/MANIFEST.in
new file mode 100644
index 00000000..bbacf5bf
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/MANIFEST.in
@@ -0,0 +1,4 @@
+include tests/*.py
+include README.md
+include LICENSE
+include doc.md
diff --git a/formal/promela/src/src/modules/promela_yacc/README.md b/formal/promela/src/src/modules/promela_yacc/README.md
new file mode 100644
index 00000000..5340e23b
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/README.md
@@ -0,0 +1,27 @@
+[![Build Status][build_img]][travis]
+[![Coverage Status][coverage]][coveralls]
+
+
+About
+=====
+
+A parser for the Promela modeling language.
+[PLY](https://pypi.python.org/pypi/ply/3.4) (Python `lex`-`yacc`) is used to generate the parser.
+Classes for a Promela abstract tree are included and used for representing the result of parsing.
+A short tutorial can be found in the file `doc.md`.
+To install:
+
+```
+pip install promela
+```
+
+
+License
+=======
+[BSD-3](http://opensource.org/licenses/BSD-3-Clause), see `LICENSE` file.
+
+
+[build_img]: https://travis-ci.org/johnyf/promela.svg?branch=master
+[travis]: https://travis-ci.org/johnyf/promela
+[coverage]: https://coveralls.io/repos/johnyf/promela/badge.svg?branch=master
+[coveralls]: https://coveralls.io/r/johnyf/promela?branch=master \ No newline at end of file
diff --git a/formal/promela/src/src/modules/promela_yacc/doc.md b/formal/promela/src/src/modules/promela_yacc/doc.md
new file mode 100644
index 00000000..02d73431
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/doc.md
@@ -0,0 +1,100 @@
+This package provides a lexer, parser, and abstract syntax tree (AST) for the [Promela](http://en.wikipedia.org/wiki/Promela) modeling language.
+The lexer and parser are generated using [PLY](https://pypi.python.org/pypi/ply/3.4) (Python `lex`-`yacc`).
+The [grammar](http://spinroot.com/spin/Man/grammar.html) is based on that used in the [SPIN](http://spinroot.com/spin/whatispin.html) model checker (in the files `spin.y` and `spinlex.c` of SPIN's source distribution), with modifications where needed.
+
+To instantiate a Promela parser:
+
+```python
+from promela.yacc import Parser
+parser = Parser()
+```
+
+Then Promela code, as a string, can be parsed by:
+
+```python
+s = '''
+active proctype foo(){
+ int x;
+ do
+ :: x = x + 1;
+ od
+}
+'''
+program = parser.parse(s)
+```
+
+then
+
+```python
+>>> print(program)
+active [1] proctype foo(){
+ int x
+ do
+ :: x = (x + 1)
+ od;
+}
+```
+
+The parser returns the result as an abstract syntax tree using classes from the `promela.ast` module.
+The top production rule returns a `Program` instance, which itself is a `list`.
+The contents of this list
+
+There are two categories of AST classes: those that represent control flow constructs:
+
+- `Proctype`, (`Init`, `NeverClaim`), `Node`, (`Expression`, `Assignment`, `Assert`, `Options` (if, do), `Else`, `Break`, `Goto`, `Label`, `Call`, `Return`, `Run`), `Sequence`
+
+and those that represent only syntax inside an expression:
+
+- `Terminal`, (`VarRef`, `Integer`, `Bool`), `Operator`, (`Binary`, `Unary`)
+
+The classes in parentheses are subclasses of the last class preceding the parentheses.
+Each control flow class has a method `to_pg` that recursively converts the abstract syntax tree to a program graph.
+
+A program graph is a directed graph whose edges are labeled with statements from the program.
+Nodes represent states of the program.
+Note the difference with a control flow graph, whose nodes are program statements and edges are program states.
+AST node classes correspond to nodes of the control flow graph and edges of the program graph (possibly with branching).
+
+For some node classes like `Expression` and `Assignment`, the `to_pg` method returns themselves, a.
+Almost all statements are represented as either an `Expression` or an `Assignment`.
+These label edges in the program graph, using the edge attribute `"stmt"`.
+
+The program graph is represented as a [multi-digraph](http://en.wikipedia.org/wiki/Multigraph) using [`networkx.MultiDiGraph`](https://networkx.github.io/documentation/latest/reference/classes.multidigraph.html).
+A multi-digraph is necessary, because two nodes in the program graph may be connected by two edges, each edge labeled with a different statement.
+For example, this is the case in the code fragment:
+
+```promela
+bit x;
+do
+:: x == 0
+:: x == 1
+od
+```
+
+The above defines a program graph with a single node and two self-loops, one labeled with the statement `x == 0` and another with the statement `x == 1`.
+These two statements here are guards, so they only determine whether the edge can be traversed, without affecting the program's data state (variable values).
+
+Program graph nodes are labeled with a `"context"` attribute that can take the values:
+- `"atomic"`
+- `"d_step"`
+- `None`
+The values `"atomic"` and `"d_step"` signify that the state is inside an atomic or deterministic step block.
+
+Continuing our earlier example:
+
+```python
+>>> g = program[0].to_pg()
+>>> g.nodes(data=True)
+[(0, {'context': None}), (1, {'context': None})]
+>>> g.edges(data=True)
+[(0, 1, {'stmt': Assignment(
+ VarRef('x', None, None),
+ Expression(
+ Operator('+',
+ VarRef('x', None, None),
+ Integer('1'))))}),
+ (1, 0, {'stmt': Expression(
+ Operator('==',
+ VarRef('x', None, None),
+ Integer('2')))})]
+```
diff --git a/formal/promela/src/src/modules/promela_yacc/promela/__init__.py b/formal/promela/src/src/modules/promela_yacc/promela/__init__.py
new file mode 100644
index 00000000..5cb6d2c4
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/promela/__init__.py
@@ -0,0 +1,6 @@
+"""Promela parser and syntax tree."""
+from .yacc import Parser
+try:
+ from ._version import version as __version__
+except:
+ __version__ = None
diff --git a/formal/promela/src/src/modules/promela_yacc/promela/ast.py b/formal/promela/src/src/modules/promela_yacc/promela/ast.py
new file mode 100644
index 00000000..16baaa05
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/promela/ast.py
@@ -0,0 +1,1035 @@
+"""Abstract syntax tree for Promela."""
+from __future__ import absolute_import
+from __future__ import division
+import logging
+import copy
+import ctypes
+import pprint
+import networkx as nx
+from networkx.utils import misc
+
+
+logger = logging.getLogger(__name__)
+DATATYPES = {
+ 'bit': ctypes.c_bool,
+ 'bool': ctypes.c_bool,
+ 'byte': ctypes.c_ubyte,
+ 'pid': ctypes.c_ubyte,
+ 'short': ctypes.c_short,
+ 'int': ctypes.c_int,
+ 'unsigned': None,
+ 'chan': None,
+ 'mtype': None}
+N = 0
+
+
+def generate_unique_node():
+ """Return a fresh integer to be used as node."""
+ global N
+ N += 1
+ return N
+
+
+def _indent(s, depth=1, skip=0):
+ w = []
+ for i, line in enumerate(s.splitlines()):
+ indent = '' if i < skip else depth * '\t'
+ w.append(indent + line)
+ return '\n'.join(w)
+
+
+def to_str(x):
+ try:
+ return x.to_str()
+ except AttributeError:
+ return str(x)
+
+
+class Proctype(object):
+ def __init__(self, name, body, args=None,
+ active=None, d_proc=False,
+ priority=None, provided=None):
+ self.name = name
+ self.body = body
+ self.args = args
+ if active is None:
+ active = 0
+ else:
+ active = int(active.value)
+ if priority is not None:
+ priority = int(priority.value)
+ self.active = active
+ self.priority = priority
+ self.provided = provided
+
+ def __str__(self):
+ return "Proctype('{name}')".format(name=self.name)
+
+ def to_str(self):
+ return (
+ '{active} proctype {name}({args}){{\n'
+ '{body}\n'
+ '}}\n\n').format(
+ active=self._active_str(),
+ name=self.name,
+ args=self._args_str(),
+ body=_indent(to_str(self.body)))
+
+ def _active_str(self):
+ if self.active is None:
+ active = ''
+ else:
+ active = 'active [' + to_str(self.active) + '] '
+ return active
+
+ def _args_str(self):
+ if self.args is None:
+ args = ''
+ else:
+ args = to_str(self.args)
+ return args
+
+ def to_pg(self, syntactic_else=False):
+ """Return program graph of proctype.
+
+ @param syntactic_else: if `True`, then "else"
+ statements in directly nested "if" or "do"
+ take precedence based on syntactic context.
+ The Promela language reference defines
+ "else" semantically, with respect to
+ the program graph.
+ """
+ global N
+ N = 1
+ g = nx.MultiDiGraph(name=self.name)
+ g.locals = set()
+ ine, out = self.body.to_pg(g, syntactic_else=syntactic_else)
+ # root: explicit is better than implicit
+ u = generate_unique_node()
+ g.add_node(u, color='red', context=None)
+ g.root = u
+ for v, d in ine:
+ g.add_edge(u, v, **d)
+ # rm unreachable nodes
+ S = nx.descendants(g, g.root)
+ S.add(g.root)
+ nodes_to_rm = {x for x in g.nodes() if x not in S}
+ [g.remove_node(x) for x in nodes_to_rm]
+ if logger.getEffectiveLevel() == 1:
+ dump_graph(
+ g, 'dbg.pdf', node_label='context',
+ edge_label='stmt', relabel=True)
+ # contract goto edges
+ assert_gotos_are_admissible(g)
+ for u in sorted(g.nodes(), key=str):
+ contract_goto_edges(g, u)
+ h = map_uuid_to_int(g)
+ # other out-edges of each node with an `else`
+ if not syntactic_else:
+ semantic_else(h)
+ return h
+
+
+def contract_goto_edges(g, u):
+ """Identify nodes connected with `goto` edge."""
+ assert u in g
+ assert g.root in g
+ n = g.out_degree(u)
+ if n == 0 or 1 < n:
+ return
+ assert n == 1, n
+ # single outgoing edge: safe to contract
+ _, q, d = next(iter(g.edges(u, data=True)))
+ if d['stmt'] != 'goto':
+ return
+ # goto
+ assert u != q, 'loop of `goto`s detected'
+ # the source node context (atomic or d_step) is overwritten
+ for p, _, d in g.in_edges(u, data=True):
+ g.add_edge(p, q, **d)
+ # but the source node label is preserved
+ u_label = g.node[u].get('labels')
+ if u_label is not None:
+ g.node[q].setdefault('labels', set()).update(u_label)
+ g.remove_node(u)
+ if u == g.root:
+ g.root = q
+
+
+def assert_gotos_are_admissible(g):
+ """Assert no branch node has outgoing `goto`."""
+ # branch node cannot have goto
+ # `goto` and `break` must have transformed to `true`
+ # labels must have raised `Exception`
+ for u in g:
+ if g.out_degree(u) <= 1:
+ continue
+ for _, v, d in g.edges(u, data=True):
+ assert 'stmt' in d
+ stmt = d['stmt']
+ assert stmt != 'goto', stmt
+ for u, d in g.nodes(data=True):
+ assert 'context' in d
+ for u, v, d in g.edges(data=True):
+ assert 'stmt' in d
+
+
+def map_uuid_to_int(g):
+ """Reinplace uuid nodes with integers."""
+ umap = {u: i for i, u in enumerate(sorted(g, key=str))}
+ h = nx.MultiDiGraph(name=g.name)
+ for u, d in g.nodes(data=True):
+ p = umap[u]
+ h.add_node(p, **d)
+ for u, v, key, d in g.edges(keys=True, data=True):
+ p = umap[u]
+ q = umap[v]
+ h.add_edge(p, q, key=key, **d)
+ h.root = umap[g.root]
+ h.locals = g.locals
+ return h
+
+
+def semantic_else(g):
+ """Set `Else.other_guards` to other edges with same source."""
+ for u, v, d in g.edges(data=True):
+ stmt = d['stmt']
+ if not isinstance(stmt, Else):
+ continue
+ # is `Else`
+ stmt.other_guards = [
+ q['stmt'] for _, _, q in g.out_edges(u, data=True)
+ if q['stmt'] != stmt]
+
+
+class NeverClaim(Proctype):
+ """Subclass exists only for semantic purposes."""
+ def to_str(self):
+ name = '' if self.name is None else self.name
+ s = (
+ 'never ' + name + '{\n' +
+ _indent(to_str(self.body)) + '\n'
+ '}\n\n')
+ return s
+
+
+class Init(Proctype):
+ def to_str(self):
+ return (
+ 'init ' + '{\n' +
+ _indent(to_str(self.body)) + '\n'
+ '}\n\n')
+
+
+class Program(list):
+ def __str__(self):
+ return '\n'.join(to_str(x) for x in self)
+
+ def __repr__(self):
+ c = super(Program, self).__repr__()
+ return 'Program({c})'.format(c=c)
+
+ def to_table(self):
+ """Return global definitions, proctypes, and LTL blocks.
+
+ @rtype: 3-`tuple` of `set`
+ """
+ units = misc.flatten(self)
+ ltl = {x for x in units if isinstance(x, LTL)}
+ proctypes = {x for x in units if isinstance(x, Proctype)}
+ global_defs = {x for x in units
+ if x not in proctypes and x not in ltl}
+ return global_defs, proctypes, ltl
+
+
+class LTL(object):
+ """Used to mark strings as LTL blocks."""
+
+ def __init__(self, formula):
+ self.formula = formula
+
+ def __repr__(self):
+ return 'LTL({f})'.format(f=repr(self.formula))
+
+ def __str__(self):
+ return 'ltl {' + str(self.formula) + '}'
+
+
+class Sequence(list):
+ def __init__(self, iterable, context=None, is_option=False):
+ super(Sequence, self).__init__(iterable)
+ # "atomic" or "dstep"
+ self.context = context
+ self.is_option = is_option
+
+ def to_str(self):
+ if self.context is None:
+ return '\n'.join(to_str(x) for x in self)
+ else:
+ return (
+ self.context + '{\n' +
+ _indent(to_str(self)) + '\n}\n')
+
+ def __repr__(self):
+ l = super(Sequence, self).__repr__()
+ return 'Sequence({l}, context={c}, is_option={isopt})'.format(
+ l=l, c=self.context, isopt=self.is_option)
+
+ def to_pg(self, g, context=None, option_guard=None, **kw):
+ # set context
+ if context is None:
+ context = self.context
+ c = context
+ assert c in {'atomic', 'd_step', None}
+ # atomic cannot appear inside d_step
+ if context == 'd_step' and c == 'atomic':
+ raise Exception('atomic inside d_step')
+ context = c
+ # find first non-decl
+ # option guard first
+ option_guard = self.is_option or option_guard
+ assert len(self) > 0
+ stmts = iter(self)
+ t = None
+ for stmt in stmts:
+ t = stmt.to_pg(g, context=context,
+ option_guard=option_guard, **kw)
+ if t is not None:
+ break
+ # no statements ?
+ if t is None:
+ return None
+ e, tail = t
+ # guard can't be a goto or label
+ # (should have been caught below)
+ if option_guard:
+ for u, d in e:
+ assert d.get('stmt') != 'goto', self
+ # other option statements
+ for stmt in stmts:
+ t = stmt.to_pg(g, context=context, option_guard=None, **kw)
+ # decl ?
+ if t is None:
+ continue
+ ine, out = t
+ # connect tail to ine
+ assert ine
+ for v, d in ine:
+ g.add_edge(tail, v, **d)
+ # update tail
+ assert out in g
+ tail = out
+ return e, tail
+
+
+class Node(object):
+ def to_pg(self, g, context=None, **kw):
+ u = generate_unique_node()
+ g.add_node(u, context=context)
+ e = (u, dict(stmt=self))
+ return [e], u
+
+
+class Options(Node):
+ def __init__(self, opt_type, options):
+ self.type = opt_type
+ self.options = options
+
+ def to_str(self):
+ a, b = self.entry_exit
+ c = list()
+ c.append(a)
+ c.append('\n')
+ for option in self.options:
+ option_guard = _indent(to_str(option[0]), skip=1)
+ w = [_indent(to_str(x)) for x in option[1:]]
+ c.append(
+ ':: {option_guard}{tail}\n'.format(
+ option_guard=option_guard,
+ tail=(' ->\n' + '\n'.join(w)) if w else ''))
+ c.append(b)
+ c.append(';\n')
+ return ''.join(c)
+
+ @property
+ def entry_exit(self):
+ if self.type == 'if':
+ return ('if', 'fi')
+ elif self.type == 'do':
+ return ('do', 'od')
+
+ def to_pg(self, g, od_exit=None,
+ context=None, option_guard=None,
+ syntactic_else=False, **kw):
+ logger.info('-- start flattening {t}'.format(t=self.type))
+ assert self.options
+ assert self.type in {'if', 'do'}
+ # create target
+ target = generate_unique_node()
+ g.add_node(target, context=context)
+ # target != exit node ?
+ if self.type == 'do':
+ od_exit = generate_unique_node()
+ g.add_node(od_exit, context=context)
+ self_else = None
+ self_has_else = False
+ option_has_else = False
+ edges = list()
+ else_ine = None
+ for option in self.options:
+ logger.debug('option: {opt}'.format(opt=option))
+ t = option.to_pg(g, od_exit=od_exit, context=context, **kw)
+ assert t is not None # decls filtered by `Sequence`
+ ine, out = t
+ assert out in g
+ # detect `else`
+ has_else = False
+ for u, d in ine:
+ stmt = d.get('stmt')
+ if isinstance(stmt, Else):
+ has_else = True
+ self_else = stmt
+ # option cannot start with goto (= contraction)
+ assert stmt != 'goto'
+ # who owns this else ?
+ if has_else:
+ if len(ine) == 1:
+ assert not self_has_else, option
+ self_has_else = True
+ if not syntactic_else:
+ assert not option_has_else, option
+ elif len(ine) > 1:
+ option_has_else = True
+ if not syntactic_else:
+ assert not self_has_else, option
+ else:
+ raise Exception('option with no in edges')
+ # collect in edges, except for own `else`
+ if not (has_else and self_has_else):
+ edges.extend(ine)
+ else:
+ else_ine = ine # keep for later
+ # forward edges
+ # goto from last option node to target node
+ g.add_edge(out, target, stmt='goto')
+ # backward edges
+ if self.type == 'if':
+ continue
+ for u, d in ine:
+ g.add_edge(target, u, **d)
+ # handle else
+ if self_has_else and not option_has_else:
+ self_else.other_guards = [d['stmt'] for v, d in edges]
+ # add back the `else` edge
+ edges.extend(else_ine)
+ # what is the exit node ?
+ if self.type == 'if':
+ out = target
+ elif self.type == 'do':
+ out = od_exit
+ else:
+ raise Exception('Unknown type: {t}'.format(t=out))
+ # is not itself an option guard ?
+ if option_guard:
+ logger.debug('is option guard')
+ if self.type == 'do' and option_guard is None:
+ edge = (target, dict(stmt='goto'))
+ in_edges = [edge]
+ else:
+ in_edges = edges
+ logger.debug('in edges: {ie}, out: {out}\n'.format(
+ ie=in_edges, out=out))
+ logger.info('-- end flattening {t}'.format(t=self.type))
+ assert out in g
+ return in_edges, out
+
+
+class Else(Node):
+ def __init__(self):
+ self.other_guards = None
+
+ def __str__(self):
+ return 'else'
+
+
+class Break(Node):
+ def __str__(self):
+ return 'break'
+
+ def to_pg(self, g, od_exit=None, **kw):
+ if od_exit is None:
+ raise Exception('Break outside repetition construct.')
+ # like goto, but with: v = od_exit
+ # context of od_tail is determined by do loop
+ assert od_exit in g
+ v = od_exit
+ return goto_to_pg(g, v, **kw)
+
+
+class Goto(Node):
+ def __init__(self, label):
+ self.label = label
+
+ def __str__(self):
+ return 'goto {l}'.format(l=self.label)
+
+ def to_pg(self, g, context=None, **kw):
+ v = _format_label(self.label)
+ # ok, because source node context
+ # is overwritten during contraction
+ g.add_node(v, context=context)
+ return goto_to_pg(g, v, context=context, **kw)
+
+
+def goto_to_pg(g, v, option_guard=None, context=None, **kw):
+ assert v in g
+ if option_guard is None:
+ stmt = 'goto'
+ else:
+ stmt = Bool('true')
+ e = (v, dict(stmt=stmt))
+ u = generate_unique_node()
+ g.add_node(u, context=context)
+ return [e], u
+
+
+class Label(Node):
+ def __init__(self, label, body):
+ self.label = label
+ self.body = body
+
+ def to_str(self):
+ return '{l}: {b}'.format(l=self.label, b=to_str(self.body))
+
+ def to_pg(self, g, option_guard=None, context=None, **kw):
+ if option_guard is not None:
+ raise Exception('option guard cannot be labeled')
+ # add label node, with context
+ u = _format_label(self.label)
+ g.add_node(u, context=context, labels={self.label})
+ # flatten body
+ t = self.body.to_pg(g, context=context, **kw)
+ if t is None:
+ raise Exception('Label of variable declaration.')
+ ine, out = t
+ assert out in g
+ # make ine out edges of label node
+ for v, d in ine:
+ g.add_edge(u, v, **d)
+ # appear like a goto (almost)
+ e = (u, dict(stmt='goto'))
+ return [e], out
+
+
+def _format_label(label):
+ return 'label_{l}'.format(l=label)
+
+
+# TODO: check that referenced types exist, before adding typedef
+# to symbol table
+
+class VarDef(Node):
+ def __init__(self, name, vartype, length=None,
+ visible=None, bitwidth=None,
+ msg_types=None, initval=None):
+ self.name = name
+ self.type = vartype
+ if length is None:
+ l = None
+ else:
+ l = eval(str(length))
+ assert isinstance(l, int), l
+ self.length = l
+ self.visible = visible
+ if bitwidth is not None:
+ self.bitwidth = int(bitwidth.value)
+ if vartype == 'bool':
+ default_initval = Bool('false')
+ else:
+ default_initval = Integer('0')
+ if initval is None:
+ initval = Expression(default_initval)
+ self.initial_value = initval
+ # TODO message types
+
+ def __repr__(self):
+ return 'VarDef({t}, {v})'.format(t=self.type, v=self.name)
+
+ def to_str(self):
+ s = '{type} {varname}{len}'.format(
+ type=self._type_str(),
+ varname=self.name,
+ len='[{n}]'.format(n=self.len) if self.len else '')
+ return s
+
+ def _type_str(self):
+ return self.type
+
+ def to_pg(self, g, **kw):
+ # var declarations are collected before the process runs
+ # man page: datatypes, p.405
+ g.locals.add(self)
+ return None
+
+ def insert(self, symbol_table, pid):
+ """Insert variable into table of symbols.
+
+ @type symbol_table: L{SymbolTable}
+
+ @type pid: int or None
+ """
+ t = self.type
+ if t == 'chan':
+ v = MessageChannel(self.len)
+ # channels are always available globally
+ # note how this differs from having global scope:
+ # no name conflicts
+ symbol_table.channels.add(v)
+ elif t == 'mtype':
+ raise NotImplementedError
+ elif t in {'bit', 'bool', 'byte', 'pid', 'short', 'int'}:
+ if self.len is None:
+ v = DATATYPES[t]()
+ else:
+ v = [DATATYPES[t]() for i in range(self.len)]
+ elif t == 'unsigned':
+ n = self.bitwidth
+
+ class Unsigned(ctypes.Structure):
+ _fields_ = [('value', ctypes.c_uint, n)]
+
+ if self.len is None:
+ v = Unsigned()
+ else:
+ v = [Unsigned() for i in range(self.len)]
+ else:
+ raise TypeError('unknown type "{t}"'.format(t=t))
+ # global scope ?
+ if pid is None:
+ d = symbol_table.globals
+ else:
+ d = symbol_table.locals[pid]
+ name = self.name
+ if name in d:
+ raise Exception('variable "{name}" is already defined'.format(
+ name=name))
+ else:
+ d[name] = v
+
+
+class SymbolTable(object):
+ """Variable, user data and message type definitions.
+
+ Attributes:
+
+ - `globals`: `dict` of global vars
+ - `locals`: `dict` of `dicts`, keys of outer `dict` are pids
+ - `channels`: `dict` of global lists for channels
+ - `pids`: map from:
+
+ pid integers
+
+ to:
+
+ - name of proctype (name)
+ - current value of program counter (pc)
+
+ - `types`: `dict` of data types
+
+ pids are non-negative integers.
+ The type name "mtype" corresponds to a message type.
+ """
+
+ def __init__(self):
+ # see Def. 7.6, p.157
+ self.exclusive = None
+ self.handshake = None
+ self.timeout = False
+ self.else_ = False
+ self.stutter = False
+ # tables of variables
+ self.globals = dict()
+ self.channels = set()
+ self.locals = dict()
+ self.pids = dict()
+ self.types = DATATYPES
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __eq__(self, other):
+ assert(isinstance(other, SymbolTable))
+ if self.globals != other.globals:
+ return False
+ if self.channels != other.channels:
+ return False
+ if set(self.locals) != set(other.locals):
+ return False
+ for pid, d in self.locals.items():
+ if d != other.locals[pid]:
+ return False
+ if set(self.pids) != set(other.pids):
+ return False
+ for pid, d in self.pids.items():
+ q = other.pids[pid]
+ if d['name'] != q['name']:
+ return False
+ if d['pc'] != q['pc']:
+ return False
+ return True
+
+ def __str__(self):
+ return (
+ 'globals: {g}\n'
+ 'channels: {c}\n'
+ 'pids: {p}\n\n'
+ 'types: {t}\n'
+ 'locals: {l}\n'
+ 'exclusive: {e}\n').format(
+ g=self.globals,
+ l=self.locals,
+ p=pprint.pformat(self.pids, width=15),
+ t=self.types,
+ e=self.exclusive,
+ c=self.channels)
+
+ def copy(self):
+ new = SymbolTable()
+ # auxiliary
+ new.exclusive = self.exclusive
+ new.handshake = self.handshake
+ new.timeout = self.timeout
+ new.else_ = self.else_
+ new.stutter = self.stutter
+ # copy symbols
+ new.globals = copy.deepcopy(self.globals)
+ new.channels = copy.deepcopy(self.channels)
+ new.locals = copy.deepcopy(self.locals)
+ new.pids = {k: {'name': d['name'],
+ 'pc': d['pc']}
+ for k, d in self.pids.items()}
+ new.types = self.types
+ return new
+
+
+class MessageChannel(object):
+ def __init__(self, nslots):
+ self.nslots = nslots
+ self.contents = list()
+
+ def send(self, x):
+ if len(self.contents) < self.nslots:
+ self.contents.append(x)
+ else:
+ raise Exception('channel {name} is full'.format(
+ name=self.name))
+
+ def receive(self, x=None, random=False, rm=True):
+ c = self.contents
+ i = 0
+ if x and random:
+ i = c.index(x)
+ m = c[i]
+ if rm:
+ c.pop(i)
+ return m
+
+
+class TypeDef(Node):
+ def __init__(self, name, decls):
+ self.name = name
+ self.decls = decls
+
+ def __str__(self):
+ return 'typedef {name} {\ndecls\n}'.format(
+ name=self.name, decls=to_str(self.decls))
+
+ def exe(self, t):
+ t.types[self.name] = self
+
+
+class MessageType(Node):
+ def __init__(self, values, visible=None):
+ self.values = values
+
+ def __str__(self):
+ return 'mtype {{ {values} }}'.format(values=self.values)
+
+ def exe(self, t):
+ t.types[self.name] = self
+
+
+class Return(Node):
+ def __init__(self, expr):
+ self.expr = expr
+
+ def __str__(self):
+ return to_str(self.expr)
+
+
+class Run(Node):
+ def __init__(self, func, args=None, priority=None):
+ self.func = func
+ self.args = args
+ self.priority = priority
+
+ def __str__(self):
+ return 'run({f})'.format(f=self.func)
+
+
+class Inline(Node):
+ def __init__(self, name, args):
+ self.name = name
+ self.args = args
+
+
+class Call(Node):
+ def __init__(self, func, args):
+ self.func = func
+ self.args = args
+
+
+class Assert(Node):
+ def __init__(self, expr):
+ self.expr = expr
+
+ def __repr__(self):
+ return 'assert({expr})'.format(expr=repr(self.expr))
+
+
+class Expression(Node):
+ def __init__(self, expr):
+ self.expr = expr
+
+ def __repr__(self):
+ return 'Expression({expr})'.format(expr=repr(self.expr))
+
+ def __str__(self):
+ return to_str(self.expr)
+
+ def eval(self, g, l):
+ s = str(self)
+ g = dict(g)
+ for k, v in g.items():
+ if 'ctypes' in str(type(v)):
+ g[k] = int(v.value)
+ elif isinstance(v, list):
+ for x in v:
+ if 'ctypes' in str(type(x)):
+ v[v.index(x)] = int(x.value)
+ l = dict(l)
+ for k, v in l.items():
+ if 'ctypes' in str(type(v)):
+ l[k] = int(v.value)
+ elif isinstance(v, list):
+ for x in v:
+ if 'ctypes' in str(type(x)):
+ v[v.index(x)] = int(x.value)
+
+ v = eval(s, g, l)
+ return v
+
+
+class Assignment(Node):
+ def __init__(self, var, value):
+ self.var = var
+ self.value = value
+
+ def __repr__(self):
+ return 'Assignment({var}, {val})'.format(
+ var=repr(self.var), val=repr(self.value))
+
+ def __str__(self):
+ return '{var} = {val}'.format(var=self.var, val=self.value)
+
+ def exe(self, g, l):
+ logger.debug('Assign: {var} = {val}'.format(
+ var=self.var, val=self.value))
+ s = self.to_str()
+ og = g
+ ol = l
+ g = dict(g)
+ for k, v in g.items():
+ if 'ctypes' in str(type(v)):
+ g[k] = int(v.value)
+ elif isinstance(v, list):
+ for x in v:
+ if 'ctypes' in str(type(x)):
+ v[v.index(x)] = int(x.value)
+ l = dict(l)
+ for k, v in l.items():
+ if 'ctypes' in str(type(v)):
+ l[k] = int(v.value)
+ elif isinstance(v, list):
+ for x in v:
+ if 'ctypes' in str(type(x)):
+ v[v.index(x)] = int(x.value)
+ exec(s, g, l)
+ for k in og:
+ og[k] = g[k]
+ for k in ol:
+ ol[k] = l[k]
+
+
+class Receive(Node):
+ def __init__(self, varref, args=None):
+ self.var = varref
+ self.args = args
+
+ def __str__(self):
+ v = to_str(self.var)
+ return 'Rx({v})'.format(v=v)
+
+
+class Send(Node):
+ def __init__(self, varref, args=None):
+ self.varref = varref
+ self.args = args
+
+ def __str__(self):
+ v = to_str(self.var)
+ return 'Tx({v})'.format(v=v)
+
+
+class Printf(Node):
+ def __init__(self, s, args):
+ self.s = s
+ self.args = args
+
+ def __str__(self):
+ return 'printf()'.format(s=self.s, args=self.args)
+
+
+class Operator(object):
+ def __init__(self, operator, *operands):
+ self.operator = operator
+ self.operands = operands
+
+ def __repr__(self):
+ return 'Operator({op}, {xy})'.format(
+ op=repr(self.operator),
+ xy=', '.join(repr(x) for x in self.operands))
+
+ def __str__(self):
+ return '({op} {xy})'.format(
+ op=self.operator,
+ xy=' '.join(to_str(x) for x in self.operands))
+
+
+class Binary(Operator):
+ def __str__(self):
+ return '({x} {op} {y})'.format(
+ x=to_str(self.operands[0]),
+ op=self.operator,
+ y=to_str(self.operands[1]))
+
+
+class Unary(Operator):
+ pass
+
+
+class Terminal(object):
+ def __init__(self, value):
+ self.value = value
+
+ def __repr__(self):
+ return '{classname}({val})'.format(
+ classname=type(self).__name__,
+ val=repr(self.value))
+
+ def __str__(self):
+ return str(self.value)
+
+
+class VarRef(Terminal):
+ def __init__(self, name, index=None, extension=None):
+ self.name = name
+ if index is None:
+ i = None
+ else:
+ i = index
+ self.index = i
+ self.extension = extension
+ # used by some external methods
+ self.value = name
+
+ def __repr__(self):
+ return 'VarRef({name}, {index}, {ext})'.format(
+ name=repr(self.name),
+ index=repr(self.index),
+ ext=repr(self.extension))
+
+ def __str__(self):
+ if self.index is None:
+ i = ''
+ else:
+ i = '[{i}]'.format(i=to_str(self.index))
+ return '{name}{index}{ext}'.format(
+ name=self.name,
+ index=i,
+ ext='' if self.extension is None else self.extension)
+
+
+class Integer(Terminal):
+ def __bool__(self):
+ return bool(int(self.value))
+
+
+class Bool(Terminal):
+ def __init__(self, val):
+ self.value = val.upper() == 'TRUE'
+
+ def __bool__(self):
+ return self.value
+
+ def __repr__(self):
+ return 'Bool({value})'.format(value=repr(self.value))
+
+ def __str__(self):
+ return str(self.value)
+
+
+class RemoteRef(Terminal):
+ def __init__(self, proctype, label, pid=None):
+ self.proctype = proctype
+ self.label = label
+ self.pid = pid
+
+ def __repr__(self):
+ return 'RemoteRef({proc}, {label}, {pid})'.format(
+ proc=self.proctype, label=self.label, pid=self.pid)
+
+ def __str__(self):
+ if self.pid is None:
+ inst = ''
+ else:
+ inst = '[{pid}]'.format(pid=self.pid)
+ return '{proc} {inst} @ {label}'.format(
+ proc=self.proctype, inst=inst, label=self.label)
+
+
+def dump_graph(g, fname='a.pdf', node_label='label',
+ edge_label='label', relabel=False):
+ """Write the program graph, annotated with formulae, to PDF file."""
+ # map nodes to integers
+ if relabel:
+ mapping = {u: i for i, u in enumerate(g)}
+ g = nx.relabel_nodes(g, mapping)
+ inv_mapping = {v: k for k, v in mapping.items()}
+ s = list()
+ s.append('mapping of nodes:')
+ for k in sorted(inv_mapping):
+ v = inv_mapping[k]
+ s.append('{k}: {v}'.format(k=k, v=v))
+ print('\n'.join(s))
+ h = nx.MultiDiGraph()
+ for u, d in g.nodes(data=True):
+ label = d.get(node_label, u)
+ label = '"{label}"'.format(label=label)
+ h.add_node(u, label=label)
+ for u, v, d in g.edges(data=True):
+ label = d.get(edge_label, ' ')
+ label = '"{label}"'.format(label=label)
+ h.add_edge(u, v, label=label)
+ pd = nx.drawing.nx_pydot.to_pydot(h)
+ pd.write_pdf(fname)
diff --git a/formal/promela/src/src/modules/promela_yacc/promela/lex.py b/formal/promela/src/src/modules/promela_yacc/promela/lex.py
new file mode 100644
index 00000000..cd3eafe0
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/promela/lex.py
@@ -0,0 +1,211 @@
+"""Lexer for Promela, using Python Lex-Yacc (PLY)."""
+import logging
+import ply.lex
+
+
+logger = logging.getLogger(__name__)
+
+
+class Lexer(object):
+ """Lexer for the Promela modeling language."""
+
+ states = tuple([('ltl', 'inclusive')])
+ reserved = {
+ '_np': 'NONPROGRESS',
+ 'true': 'TRUE',
+ 'false': 'FALSE',
+ 'active': 'ACTIVE',
+ 'assert': 'ASSERT',
+ 'atomic': 'ATOMIC',
+ 'bit': 'BIT',
+ 'bool': 'BOOL',
+ 'break': 'BREAK',
+ 'byte': 'BYTE',
+ 'chan': 'CHAN',
+ 'd_step': 'D_STEP',
+ 'd_proctype': 'D_PROCTYPE',
+ 'do': 'DO',
+ 'else': 'ELSE',
+ 'empty': 'EMPTY',
+ 'enabled': 'ENABLED',
+ 'eval': 'EVAL',
+ 'fi': 'FI',
+ 'full': 'FULL',
+ 'get_priority': 'GET_P',
+ 'goto': 'GOTO',
+ 'hidden': 'HIDDEN',
+ 'if': 'IF',
+ 'init': 'INIT',
+ 'int': 'INT',
+ 'len': 'LEN',
+ 'local': 'ISLOCAL',
+ 'ltl': 'LTL',
+ 'mtype': 'MTYPE',
+ 'nempty': 'NEMPTY',
+ 'never': 'CLAIM',
+ 'nfull': 'NFULL',
+ 'od': 'OD',
+ 'of': 'OF',
+ 'pc_value': 'PC_VAL',
+ 'pid': 'PID',
+ 'printf': 'PRINT',
+ 'priority': 'PRIORITY',
+ 'proctype': 'PROCTYPE',
+ 'provided': 'PROVIDED',
+ 'R': 'RELEASE',
+ 'return': 'RETURN',
+ 'run': 'RUN',
+ 'short': 'SHORT',
+ 'skip': 'TRUE',
+ 'show': 'SHOW',
+ 'timeout': 'TIMEOUT',
+ 'typedef': 'TYPEDEF',
+ 'U': 'UNTIL',
+ 'unless': 'UNLESS',
+ 'unsigned': 'UNSIGNED',
+ 'X': 'NEXT',
+ 'xr': 'XR',
+ 'xs': 'XS',
+ 'W': 'WEAK_UNTIL'}
+ values = {'skip': 'true'}
+ delimiters = ['LPAREN', 'RPAREN', 'LBRACKET', 'RBRACKET',
+ 'LBRACE', 'RBRACE', 'COMMA', 'PERIOD',
+ 'SEMI', 'COLONS', 'COLON', 'ELLIPSIS']
+ # remember to check precedence
+ operators = ['PLUS', 'INCR', 'MINUS', 'DECR', 'TIMES', 'DIVIDE',
+ 'MOD', 'OR', 'AND', 'NOT', 'XOR', 'IMPLIES', 'EQUIV',
+ 'LOR', 'LAND', 'LNOT', 'LT', 'GT',
+ 'LE', 'GE', 'EQ', 'NE',
+ 'RCV', 'R_RCV', 'TX2', 'LSHIFT', 'RSHIFT', 'AT',
+ 'ALWAYS', 'EVENTUALLY']
+ misc = ['EQUALS', 'ARROW', 'STRING', 'NAME', 'INTEGER',
+ 'PREPROC', 'NEWLINE', 'COMMENT']
+
+ def __init__(self, debug=False):
+ self.tokens = (
+ self.delimiters + self.operators +
+ self.misc + list(set(self.reserved.values())))
+ self.build(debug=debug)
+
+ def build(self, debug=False, debuglog=None, **kwargs):
+ """Create a lexer.
+
+ @param kwargs: Same arguments as C{ply.lex.lex}:
+
+ - except for C{module} (fixed to C{self})
+ - C{debuglog} defaults to the module's logger.
+ """
+ if debug and debuglog is None:
+ debuglog = logger
+ self.lexer = ply.lex.lex(
+ module=self,
+ debug=debug,
+ debuglog=debuglog,
+ **kwargs)
+
+ # check for reserved words
+ def t_NAME(self, t):
+ r'[a-zA-Z_][a-zA-Z_0-9]*'
+ t.value = self.values.get(t.value, t.value)
+ t.type = self.reserved.get(t.value, 'NAME')
+ # switch to LTL context
+ if t.value == 'ltl':
+ self.lexer.level = 0
+ self.lexer.begin('ltl')
+ return t
+
+ def t_STRING(self, t):
+ r'"[^"]*"'
+ return t
+
+ # operators
+ t_PLUS = r'\+'
+ t_INCR = r'\+\+'
+ t_MINUS = r'-'
+ t_DECR = r'--'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_MOD = r'%'
+ t_OR = r'\|'
+ t_AND = r'&'
+ t_NOT = r'~'
+ t_XOR = r'\^'
+ t_LOR = r'\|\|'
+ t_LAND = r'&&'
+ t_LNOT = r'!'
+ t_TX2 = r'!!'
+ t_LT = r'<'
+ t_LSHIFT = r'<<'
+ t_GT = r'>'
+ t_RSHIFT = r'>>'
+ t_LE = r'<='
+ t_GE = r'>='
+ t_EQ = r'=='
+ t_NE = r'!='
+ t_RCV = r'\?'
+ t_R_RCV = r'\?\?'
+ t_AT = r'@'
+ t_EQUIV = r'<->'
+ # assignment
+ t_EQUALS = r'='
+ # temporal operators
+ t_ALWAYS = r'\[\]'
+ t_EVENTUALLY = r'\<\>'
+ # delimeters
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_LBRACKET = r'\['
+ t_RBRACKET = r'\]'
+ t_LBRACE = r'\{'
+ t_RBRACE = r'\}'
+ t_COMMA = r','
+ t_PERIOD = r'\.'
+ t_SEMI = r';'
+ t_COLONS = r'::'
+ t_COLON = r':'
+ t_ELLIPSIS = r'\.\.\.'
+
+ def t_ltl_LBRACE(self, t):
+ r'\{'
+ self.lexer.level += 1
+ return t
+
+ def t_ltl_RBRACE(self, t):
+ r'\}'
+ self.lexer.level -= 1
+ if self.lexer.level == 0:
+ self.lexer.begin('INITIAL')
+ return t
+
+ def t_ltl_ARROW(self, t):
+ r'->'
+ t.type = 'IMPLIES'
+ return t
+
+ t_INITIAL_ARROW = r'->'
+
+ def t_PREPROC(self, t):
+ r'\#.*'
+ pass
+
+ def t_INTEGER(self, t):
+ r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
+ return t
+
+ # t_ignore is reserved by lex to provide
+ # much more efficient internal handling by lex
+ #
+ # A string containing ignored characters (spaces and tabs)
+ t_ignore = ' \t'
+
+ def t_NEWLINE(self, t):
+ r'\n+'
+ t.lexer.lineno += t.value.count('\n')
+
+ def t_COMMENT(self, t):
+ r' /\*(.|\n)*?\*/'
+ t.lineno += t.value.count('\n')
+
+ def t_error(self, t):
+ logger.error('Illegal character "{s}"'.format(s=t.value[0]))
+ t.lexer.skip(1)
diff --git a/formal/promela/src/src/modules/promela_yacc/promela/yacc.py b/formal/promela/src/src/modules/promela_yacc/promela/yacc.py
new file mode 100644
index 00000000..d650c063
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/promela/yacc.py
@@ -0,0 +1,955 @@
+"""Parser for Promela, using Python Lex-Yacc (PLY).
+
+
+References
+==========
+
+Holzmann G.J., The SPIN Model Checker,
+ Addison-Wesley, 2004, pp. 365--368
+ http://spinroot.com/spin/Man/Quick.html
+"""
+from __future__ import absolute_import
+from __future__ import division
+import logging
+import os
+import subprocess
+import warnings
+import ply.yacc
+# inline
+#
+# import promela.ast as promela_ast
+# from promela import lex
+
+
+TABMODULE = 'promela.promela_parsetab'
+logger = logging.getLogger(__name__)
+
+
+class Parser(object):
+ """Production rules for Promela parser."""
+
+ logger = logger
+ tabmodule = TABMODULE
+ start = 'program'
+ # http://spinroot.com/spin/Man/operators.html
+ # spin.y
+ # lowest to highest
+ precedence = (
+ ('right', 'EQUALS'),
+ ('left', 'TX2', 'RCV', 'R_RCV'),
+ ('left', 'IMPLIES', 'EQUIV'),
+ ('left', 'LOR'),
+ ('left', 'LAND'),
+ ('left', 'ALWAYS', 'EVENTUALLY'),
+ ('left', 'UNTIL', 'WEAK_UNTIL', 'RELEASE'),
+ ('right', 'NEXT'),
+ ('left', 'OR'),
+ ('left', 'XOR'),
+ ('left', 'AND'),
+ ('left', 'EQ', 'NE'),
+ ('left', 'LT', 'LE', 'GT', 'GE'),
+ ('left', 'LSHIFT', 'RSHIFT'),
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE', 'MOD'),
+ ('left', 'INCR', 'DECR'),
+ ('right', 'LNOT', 'NOT', 'UMINUS', 'NEG'), # LNOT is also SND
+ ('left', 'DOT'),
+ ('left', 'LPAREN', 'RPAREN', 'LBRACKET', 'RBRACKET'))
+
+ def __init__(self, ast=None, lexer=None):
+ if ast is None:
+ import promela.ast as ast
+ if lexer is None:
+ from promela import lex
+ lexer = lex.Lexer()
+ self.lexer = lexer
+ self.ast = ast
+ self.tokens = self.lexer.tokens
+ self.build()
+
+ def build(self, tabmodule=None, outputdir='', write_tables=False,
+ debug=False, debuglog=None, errorlog=None):
+ """Build parser using `ply.yacc`.
+
+ Default table module is `self.tabmodule`.
+ Module logger used as default debug logger.
+ Default error logger is that created by PLY.
+ """
+ if tabmodule is None:
+ tabmodule = self.tabmodule
+ if debug and debuglog is None:
+ debuglog = self.logger
+ self.parser = ply.yacc.yacc(
+ method='LALR',
+ module=self,
+ start=self.start,
+ tabmodule=tabmodule,
+ outputdir=outputdir,
+ write_tables=write_tables,
+ debug=debug,
+ debuglog=debuglog,
+ errorlog=errorlog)
+
+ def parse(self, promela):
+ """Parse string of Promela code."""
+ s = cpp(promela)
+ program = self.parser.parse(
+ s, lexer=self.lexer.lexer, debug=self.logger)
+ return program
+
+ def _iter(self, p):
+ if p[2] is not None:
+ p[1].append(p[2])
+ return p[1]
+
+ def _end(self, p):
+ if p[1] is None:
+ return list()
+ else:
+ return [p[1]]
+
+ # Top-level constructs
+ # ====================
+
+ def p_program(self, p):
+ """program : units"""
+ p[0] = self.ast.Program(p[1])
+
+ def p_units_iter(self, p):
+ """units : units unit"""
+ p[0] = self._iter(p)
+
+ def p_units_end(self, p):
+ """units : unit"""
+ p[0] = self._end(p)
+
+ # TODO: events, c_fcts, ns, error
+ def p_unit_proc(self, p):
+ """unit : proc
+ | init
+ | claim
+ | ltl
+ """
+ p[0] = p[1]
+
+ def p_unit_decl(self, p):
+ """unit : one_decl
+ | utype
+ """
+ p[0] = p[1]
+
+ def p_unit_semi(self, p):
+ """unit : semi"""
+
+ def p_proc(self, p):
+ ("""proc : prefix_proctype NAME"""
+ """ LPAREN decl RPAREN"""
+ """ opt_priority opt_enabler"""
+ """ body
+ """)
+ inst = p[1]
+ name = p[2]
+ args = p[4]
+ priority = p[6]
+ enabler = p[7]
+ body = p[8]
+
+ p[0] = self.ast.Proctype(
+ name, body, args=args, priority=priority,
+ provided=enabler, **inst)
+
+ # instantiator
+ def p_inst(self, p):
+ """prefix_proctype : ACTIVE opt_index proctype"""
+ d = p[3]
+ if p[2] is None:
+ n_active = self.ast.Integer('1')
+ else:
+ n_active = p[2]
+ d['active'] = n_active
+ p[0] = d
+
+ def p_inactive_proctype(self, p):
+ """prefix_proctype : proctype"""
+ p[0] = p[1]
+
+ def p_opt_index(self, p):
+ """opt_index : LBRACKET expr RBRACKET
+ | LBRACKET NAME RBRACKET
+ """
+ p[0] = p[2]
+
+ def p_opt_index_empty(self, p):
+ """opt_index : empty"""
+
+ def p_init(self, p):
+ """init : INIT opt_priority body"""
+ p[0] = self.ast.Init(name='init', body=p[3], priority=p[2])
+
+ def p_claim(self, p):
+ """claim : CLAIM optname body"""
+ name = p[2] if p[2] else 'never'
+ p[0] = self.ast.NeverClaim(name=name, body=p[3])
+
+ # user-defined type
+ def p_utype(self, p):
+ """utype : TYPEDEF NAME LBRACE decl_lst RBRACE"""
+ seq = self.ast.Sequence(p[4])
+ p[0] = self.ast.TypeDef(p[2], seq)
+
+ def p_ltl(self, p):
+ """ltl : LTL LBRACE expr RBRACE"""
+ p[0] = self.ast.LTL(p[3])
+
+ # Declarations
+ # ============
+
+ def p_decl(self, p):
+ """decl : decl_lst"""
+ p[0] = self.ast.Sequence(p[1])
+
+ def p_decl_empty(self, p):
+ """decl : empty"""
+
+ def p_decl_lst_iter(self, p):
+ """decl_lst : one_decl SEMI decl_lst"""
+ p[0] = [p[1]] + p[3]
+
+ def p_decl_lst_end(self, p):
+ """decl_lst : one_decl"""
+ p[0] = [p[1]]
+
+ def p_one_decl_visible(self, p):
+ """one_decl : vis typename var_list
+ | vis NAME var_list
+ """
+ visible = p[1]
+ typ = p[2]
+ var_list = p[3]
+
+ p[0] = self.one_decl(typ, var_list, visible)
+
+ def p_one_decl(self, p):
+ """one_decl : typename var_list
+ | NAME var_list
+ """
+ typ = p[1]
+ var_list = p[2]
+ p[0] = self.one_decl(typ, var_list)
+
+ def one_decl(self, typ, var_list, visible=None):
+ c = list()
+ for d in var_list:
+ v = self.ast.VarDef(vartype=typ, visible=visible, **d)
+ c.append(v)
+ return self.ast.Sequence(c)
+
+ # message type declaration
+ def p_one_decl_mtype_vis(self, p):
+ """one_decl : vis MTYPE asgn LBRACE name_list RBRACE"""
+ p[0] = self.ast.MessageType(p[5], visible=p[1])
+
+ def p_one_decl_mtype(self, p):
+ """one_decl : MTYPE asgn LBRACE name_list RBRACE"""
+ p[0] = self.ast.MessageType(p[3])
+
+ def p_name_list_iter(self, p):
+ """name_list : name_list COMMA NAME"""
+ p[1].append(p[3])
+ p[0] = p[1]
+
+ def p_name_list_end(self, p):
+ """name_list : NAME"""
+ p[0] = [p[1]]
+
+ def p_var_list_iter(self, p):
+ """var_list : ivar COMMA var_list"""
+ p[0] = [p[1]] + p[3]
+
+ def p_var_list_end(self, p):
+ """var_list : ivar"""
+ p[0] = [p[1]]
+
+ # TODO: vardcl asgn LBRACE c_list RBRACE
+
+ # ivar = initialized variable
+ def p_ivar(self, p):
+ """ivar : vardcl"""
+ p[0] = p[1]
+
+ def p_ivar_asgn(self, p):
+ """ivar : vardcl asgn expr"""
+ expr = self.ast.Expression(p[3])
+ p[1]['initval'] = expr
+ p[0] = p[1]
+
+ def p_vardcl(self, p):
+ """vardcl : NAME"""
+ p[0] = {'name': p[1]}
+
+ # p.403, SPIN manual
+ def p_vardcl_unsigned(self, p):
+ """vardcl : NAME COLON const"""
+ p[0] = {'name': p[1], 'bitwidth': p[3]}
+
+ def p_vardcl_array(self, p):
+ """vardcl : NAME LBRACKET const_expr RBRACKET"""
+ p[0] = {'name': p[1], 'length': p[3]}
+
+ def p_vardcl_chan(self, p):
+ """vardcl : vardcl EQUALS ch_init"""
+ p[1].update(p[3])
+ p[0] = p[1]
+
+ def p_typename(self, p):
+ """typename : BIT
+ | BOOL
+ | BYTE
+ | CHAN
+ | INT
+ | PID
+ | SHORT
+ | UNSIGNED
+ | MTYPE
+ """
+ p[0] = p[1]
+
+ def p_ch_init(self, p):
+ ("""ch_init : LBRACKET const_expr RBRACKET """
+ """ OF LBRACE typ_list RBRACE""")
+ p[0] = {'length': p[2], 'msg_types': p[6]}
+
+ def p_typ_list_iter(self, p):
+ """typ_list : typ_list COMMA basetype"""
+ p[1].append(p[3])
+ p[0] = p[1]
+
+ def p_typ_list_end(self, p):
+ """typ_list : basetype"""
+ p[0] = [p[1]]
+
+ # TODO: | UNAME | error
+ def p_basetype(self, p):
+ """basetype : typename"""
+ p[0] = p[1]
+
+ # References
+ # ==========
+
+ def p_varref(self, p):
+ """varref : cmpnd"""
+ p[0] = p[1]
+
+ def p_cmpnd_iter(self, p):
+ """cmpnd : cmpnd PERIOD cmpnd %prec DOT"""
+ p[0] = self.ast.VarRef(extension=p[3], **p[1])
+
+ def p_cmpnd_end(self, p):
+ """cmpnd : pfld"""
+ p[0] = self.ast.VarRef(**p[1])
+
+ # pfld = prefix field
+ def p_pfld_indexed(self, p):
+ """pfld : NAME LBRACKET expr RBRACKET"""
+ p[0] = {'name': p[1], 'index': p[3]}
+
+ def p_pfld(self, p):
+ """pfld : NAME"""
+ p[0] = {'name': p[1]}
+
+ # Attributes
+ # ==========
+
+ def p_opt_priority(self, p):
+ """opt_priority : PRIORITY number"""
+ p[0] = p[2]
+
+ def p_opt_priority_empty(self, p):
+ """opt_priority : empty"""
+
+ def p_opt_enabler(self, p):
+ """opt_enabler : PROVIDED LPAREN expr RPAREN"""
+ p[0] = p[3]
+
+ def p_opt_enabler_empty(self, p):
+ """opt_enabler : empty"""
+
+ def p_body(self, p):
+ """body : LBRACE sequence os RBRACE"""
+ p[0] = p[2]
+
+ # Sequence
+ # ========
+
+ def p_sequence(self, p):
+ """sequence : sequence msemi step"""
+ p[1].append(p[3])
+ p[0] = p[1]
+
+ def p_sequence_ending_with_atomic(self, p):
+ """sequence : seq_block step"""
+ p[1].append(p[2])
+ p[0] = p[1]
+
+ def p_sequence_single(self, p):
+ """sequence : step"""
+ p[0] = self.ast.Sequence([p[1]])
+
+ def p_seq_block(self, p):
+ """seq_block : sequence msemi atomic
+ | sequence msemi dstep
+ """
+ p[1].append(p[3])
+ p[0] = p[1]
+
+ def p_seq_block_iter(self, p):
+ """seq_block : seq_block atomic
+ | seq_block dstep
+ """
+ p[1].append(p[2])
+ p[0] = p[1]
+
+ def p_seq_block_single(self, p):
+ """seq_block : atomic
+ | dstep
+ """
+ p[0] = [p[1]]
+
+ # TODO: XU vref_lst
+ def p_step_1(self, p):
+ """step : one_decl
+ | stmnt
+ """
+ p[0] = p[1]
+
+ def p_step_labeled(self, p):
+ """step : NAME COLON one_decl"""
+ raise Exception(
+ 'label preceding declaration: {s}'.format(s=p[3]))
+
+ def p_step_3(self, p):
+ """step : NAME COLON XR
+ | NAME COLON XS
+ """
+ raise Exception(
+ 'label preceding xr/xs claim')
+
+ def p_step_4(self, p):
+ """step : stmnt UNLESS stmnt"""
+ p[0] = (p[1], 'unless', p[3])
+ self.logger.warning('UNLESS not interpreted yet')
+
+ # Statement
+ # =========
+
+ def p_stmnt(self, p):
+ """stmnt : special
+ | statement
+ """
+ p[0] = p[1]
+
+ # Stmnt in spin.y
+ def p_statement_asgn(self, p):
+ """statement : varref asgn full_expr"""
+ p[0] = self.ast.Assignment(var=p[1], value=p[3])
+
+ def p_statement_incr(self, p):
+ """statement : varref INCR"""
+ one = self.ast.Integer('1')
+ expr = self.ast.Expression(self.ast.Binary('+', p[1], one))
+ p[0] = self.ast.Assignment(p[1], expr)
+
+ def p_statement_decr(self, p):
+ """statement : varref DECR"""
+ one = self.ast.Integer('1')
+ expr = self.ast.Expression(self.ast.Binary('-', p[1], one))
+ p[0] = self.ast.Assignment(p[1], expr)
+
+ def p_statement_assert(self, p):
+ """statement : ASSERT full_expr"""
+ p[0] = self.ast.Assert(p[2])
+
+ def p_statement_fifo_receive(self, p):
+ """statement : varref RCV rargs"""
+ p[0] = self.ast.Receive(p[1], p[3])
+
+ def p_statement_copy_fifo_receive(self, p):
+ """statement : varref RCV LT rargs GT"""
+ p[0] = self.ast.Receive(p[1], p[4])
+
+ def p_statement_random_receive(self, p):
+ """statement : varref R_RCV rargs"""
+ p[0] = self.ast.Receive(p[1], p[3])
+
+ def p_statement_copy_random_receive(self, p):
+ """statement : varref R_RCV LT rargs GT"""
+ p[0] = self.ast.Receive(p[1], p[4])
+
+ def p_statement_tx2(self, p):
+ """statement : varref TX2 margs"""
+ p[0] = self.ast.Send(p[1], p[3])
+
+ def p_statement_full_expr(self, p):
+ """statement : full_expr"""
+ p[0] = p[1]
+
+ def p_statement_else(self, p):
+ """statement : ELSE"""
+ p[0] = self.ast.Else()
+
+ def p_statement_atomic(self, p):
+ """statement : atomic"""
+ p[0] = p[1]
+
+ def p_atomic(self, p):
+ """atomic : ATOMIC LBRACE sequence os RBRACE"""
+ s = p[3]
+ s.context = 'atomic'
+ p[0] = s
+
+ def p_statement_dstep(self, p):
+ """statement : dstep"""
+ p[0] = p[1]
+
+ def p_dstep(self, p):
+ """dstep : D_STEP LBRACE sequence os RBRACE"""
+ s = p[3]
+ s.context = 'd_step'
+ p[0] = s
+
+ def p_statement_braces(self, p):
+ """statement : LBRACE sequence os RBRACE"""
+ p[0] = p[2]
+
+ # the stmt of line 696 in spin.y collects the inline ?
+ def p_statement_call(self, p):
+ """statement : NAME LPAREN args RPAREN"""
+ # NAME = INAME = inline
+ c = self.ast.Inline(p[1], p[3])
+ p[0] = self.ast.Sequence([c])
+
+ def p_statement_assgn_call(self, p):
+ """statement : varref asgn NAME LPAREN args RPAREN statement"""
+ inline = self.ast.Inline(p[3], p[5])
+ p[0] = self.ast.Assignment(p[1], inline)
+
+ def p_statement_return(self, p):
+ """statement : RETURN full_expr"""
+ p[0] = self.ast.Return(p[2])
+
+ def p_printf(self, p):
+ """statement : PRINT LPAREN STRING prargs RPAREN"""
+ p[0] = self.ast.Printf(p[3], p[4])
+
+ # yet unimplemented for statement:
+ # SET_P l_par two_args r_par
+ # PRINTM l_par varref r_par
+ # PRINTM l_par CONST r_par
+ # ccode
+
+ # Special
+ # =======
+
+ def p_special(self, p):
+ """special : varref RCV"""
+ p[0] = self.ast.Receive(p[1])
+
+ def p_varref_lnot(self, p):
+ """special : varref LNOT margs"""
+ raise NotImplementedError
+
+ def p_break(self, p):
+ """special : BREAK"""
+ p[0] = self.ast.Break()
+
+ def p_goto(self, p):
+ """special : GOTO NAME"""
+ p[0] = self.ast.Goto(p[2])
+
+ def p_labeled_stmt(self, p):
+ """special : NAME COLON stmnt"""
+ p[0] = self.ast.Label(p[1], p[3])
+
+ def p_labeled(self, p):
+ """special : NAME COLON"""
+ p[0] = self.ast.Label(
+ p[1],
+ self.ast.Expression(self.ast.Bool('true')))
+
+ def p_special_if(self, p):
+ """special : IF options FI"""
+ p[0] = self.ast.Options('if', p[2])
+
+ def p_special_do(self, p):
+ """special : DO options OD"""
+ p[0] = self.ast.Options('do', p[2])
+
+ def p_options_end(self, p):
+ """options : option"""
+ p[0] = [p[1]]
+
+ def p_options_iter(self, p):
+ """options : options option"""
+ p[1].append(p[2])
+ p[0] = p[1]
+
+ def p_option(self, p):
+ """option : COLONS sequence os"""
+ s = p[2]
+ s.is_option = True
+ p[0] = s
+
+ # Expressions
+ # ===========
+
+ def p_full_expr(self, p):
+ """full_expr : expr
+ | pexpr
+ """
+ p[0] = self.ast.Expression(p[1])
+
+ # probe expr = no negation allowed (positive)
+ def p_pexpr(self, p):
+ """pexpr : probe
+ | LPAREN pexpr RPAREN
+ | pexpr LAND pexpr
+ | pexpr LAND expr
+ | expr LAND pexpr
+ | pexpr LOR pexpr
+ | pexpr LOR expr
+ | expr LOR pexpr
+ """
+ p[0] = 'pexpr'
+
+ def p_probe(self, p):
+ """probe : FULL LPAREN varref RPAREN
+ | NFULL LPAREN varref RPAREN
+ | EMPTY LPAREN varref RPAREN
+ | NEMPTY LPAREN varref RPAREN
+ """
+ p[0] = 'probe'
+
+ def p_expr_paren(self, p):
+ """expr : LPAREN expr RPAREN"""
+ p[0] = p[2]
+
+ def p_expr_arithmetic(self, p):
+ """expr : expr PLUS expr
+ | expr MINUS expr
+ | expr TIMES expr
+ | expr DIVIDE expr
+ | expr MOD expr
+ """
+ p[0] = self.ast.Binary(p[2], p[1], p[3])
+
+ def p_expr_not(self, p):
+ """expr : NOT expr
+ | MINUS expr %prec UMINUS
+ | LNOT expr %prec NEG
+ """
+ p[0] = self.ast.Unary(p[1], p[2])
+
+ def p_expr_logical(self, p):
+ """expr : expr AND expr
+ | expr OR expr
+ | expr XOR expr
+ | expr LAND expr
+ | expr LOR expr
+ """
+ p[0] = self.ast.Binary(p[2], p[1], p[3])
+
+ # TODO: cexpr
+
+ def p_expr_shift(self, p):
+ """expr : expr LSHIFT expr
+ | expr RSHIFT expr
+ """
+ p[0] = p[1]
+
+ def p_expr_const_varref(self, p):
+ """expr : const
+ | varref
+ """
+ p[0] = p[1]
+
+ def p_expr_varref(self, p):
+ """expr : varref RCV LBRACKET rargs RBRACKET
+ | varref R_RCV LBRACKET rargs RBRACKET
+ """
+ p[0] = p[1]
+ warnings.warn('not implemented')
+
+ def p_expr_other(self, p):
+ """expr : LPAREN expr ARROW expr COLON expr RPAREN
+ | LEN LPAREN varref RPAREN
+ | ENABLED LPAREN expr RPAREN
+ | GET_P LPAREN expr RPAREN
+ """
+ p[0] = p[1]
+ warnings.warn('"{s}" not implemented'.format(s=p[1]))
+
+ def p_expr_run(self, p):
+ """expr : RUN aname LPAREN args RPAREN opt_priority"""
+ p[0] = self.ast.Run(p[2], p[4], p[6])
+
+ def p_expr_other_2(self, p):
+ """expr : TIMEOUT
+ | NONPROGRESS
+ | PC_VAL LPAREN expr RPAREN
+ """
+ raise NotImplementedError()
+
+ def p_expr_remote_ref_proctype_pc(self, p):
+ """expr : NAME AT NAME
+ """
+ p[0] = self.ast.RemoteRef(p[1], p[3])
+
+ def p_expr_remote_ref_pid_pc(self, p):
+ """expr : NAME LBRACKET expr RBRACKET AT NAME"""
+ p[0] = self.ast.RemoteRef(p[1], p[6], pid=p[3])
+
+ def p_expr_remote_ref_var(self, p):
+ """expr : NAME LBRACKET expr RBRACKET COLON pfld"""
+ # | NAME COLON pfld %prec DOT2
+ raise NotImplementedError()
+
+ def p_expr_comparator(self, p):
+ """expr : expr EQ expr
+ | expr NE expr
+ | expr LT expr
+ | expr LE expr
+ | expr GT expr
+ | expr GE expr
+ """
+ p[0] = self.ast.Binary(p[2], p[1], p[3])
+
+ def p_binary_ltl_expr(self, p):
+ """expr : expr UNTIL expr
+ | expr WEAK_UNTIL expr
+ | expr RELEASE expr
+ | expr IMPLIES expr
+ | expr EQUIV expr
+ """
+ p[0] = self.ast.Binary(p[2], p[1], p[3])
+
+ def p_unary_ltl_expr(self, p):
+ """expr : NEXT expr
+ | ALWAYS expr
+ | EVENTUALLY expr
+ """
+ p[0] = self.ast.Unary(p[1], p[2])
+
+ # Constants
+ # =========
+
+ def p_const_expr_const(self, p):
+ """const_expr : const"""
+ p[0] = p[1]
+
+ def p_const_expr_unary(self, p):
+ """const_expr : MINUS const_expr %prec UMINUS"""
+ p[0] = self.ast.Unary(p[1], p[2])
+
+ def p_const_expr_binary(self, p):
+ """const_expr : const_expr PLUS const_expr
+ | const_expr MINUS const_expr
+ | const_expr TIMES const_expr
+ | const_expr DIVIDE const_expr
+ | const_expr MOD const_expr
+ """
+ p[0] = self.ast.Binary(p[2], p[1], p[3])
+
+ def p_const_expr_paren(self, p):
+ """const_expr : LPAREN const_expr RPAREN"""
+ p[0] = p[2]
+
+ def p_const(self, p):
+ """const : boolean
+ | number
+ """
+ # lex maps `skip` to `TRUE`
+ p[0] = p[1]
+
+ def p_bool(self, p):
+ """boolean : TRUE
+ | FALSE
+ """
+ p[0] = self.ast.Bool(p[1])
+
+ def p_number(self, p):
+ """number : INTEGER"""
+ p[0] = self.ast.Integer(p[1])
+
+ # Auxiliary
+ # =========
+
+ def p_two_args(self, p):
+ """two_args : expr COMMA expr"""
+
+ def p_args(self, p):
+ """args : arg"""
+ p[0] = p[1]
+
+ def p_prargs(self, p):
+ """prargs : COMMA arg"""
+ p[0] = p[2]
+
+ def p_prargs_empty(self, p):
+ """prargs : empty"""
+
+ def p_args_empty(self, p):
+ """args : empty"""
+
+ def p_margs(self, p):
+ """margs : arg
+ | expr LPAREN arg RPAREN
+ """
+
+ def p_arg(self, p):
+ """arg : expr
+ | expr COMMA arg
+ """
+ p[0] = 'arg'
+
+ # TODO: CONST, MINUS CONST %prec UMIN
+ def p_rarg(self, p):
+ """rarg : varref
+ | EVAL LPAREN expr RPAREN
+ """
+ p[0] = 'rarg'
+
+ def p_rargs(self, p):
+ """rargs : rarg
+ | rarg COMMA rargs
+ | rarg LPAREN rargs RPAREN
+ | LPAREN rargs RPAREN
+ """
+
+ def p_proctype(self, p):
+ """proctype : PROCTYPE
+ | D_PROCTYPE
+ """
+ if p[1] == 'proctype':
+ p[0] = dict(d_proc=False)
+ else:
+ p[0] = dict(d_proc=True)
+
+ # PNAME
+ def p_aname(self, p):
+ """aname : NAME"""
+ p[0] = p[1]
+
+ # optional name
+ def p_optname(self, p):
+ """optname : NAME"""
+ p[0] = p[1]
+
+ def p_optname_empty(self, p):
+ """optname : empty"""
+
+ # optional semi
+ def p_os(self, p):
+ """os : empty
+ | semi
+ """
+ p[0] = ';'
+
+ # multi-semi
+ def p_msemi(self, p):
+ """msemi : semi
+ | msemi semi
+ """
+ p[0] = ';'
+
+ def p_semi(self, p):
+ """semi : SEMI
+ | ARROW
+ """
+ p[0] = ';'
+
+ def p_asgn(self, p):
+ """asgn : EQUALS
+ | empty
+ """
+ p[0] = None
+
+ def p_visible(self, p):
+ """vis : HIDDEN
+ | SHOW
+ | ISLOCAL
+ """
+ p[0] = {'visible': p[1]}
+
+ def p_empty(self, p):
+ """empty : """
+
+ def p_error(self, p):
+ raise Exception('syntax error at: {p}'.format(p=p))
+
+
+def cpp(s):
+ """Call the C{C} preprocessor with input C{s}."""
+ try:
+ p = subprocess.Popen(['cpp', '-E', '-x', 'c'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True)
+ except OSError as e:
+ if e.errno == os.errno.ENOENT:
+ raise Exception('C preprocessor (cpp) not found in path.')
+ else:
+ raise
+ logger.debug('cpp input:\n' + s)
+ stdout, stderr = p.communicate(s)
+ logger.debug('cpp returned: {c}'.format(c=p.returncode))
+ logger.debug('cpp stdout:\n {out}'.format(out=stdout))
+ return stdout
+
+
+def rebuild_table(parser, tabmodule):
+ # log details to file
+ h = logging.FileHandler('log.txt', mode='w')
+ debuglog = logging.getLogger()
+ debuglog.addHandler(h)
+ debuglog.setLevel('DEBUG')
+ import os
+ outputdir = './'
+ # rm table files to force rebuild to get debug output
+ tablepy = tabmodule + '.py'
+ tablepyc = tabmodule + '.pyc'
+ try:
+ os.remove(tablepy)
+ except:
+ print('no "{t}" found'.format(t=tablepy))
+ try:
+ os.remove(tablepyc)
+ except:
+ print('no "{t}" found'.format(t=tablepyc))
+ parser.build(tabmodule, outputdir=outputdir,
+ write_tables=True, debug=True,
+ debuglog=debuglog)
+
+
+if __name__ == '__main__':
+ rebuild_table(Parser(), TABMODULE.split('.')[-1])
+
+
+# TODO
+#
+# expr << expr
+# expr >> expr
+# (expr -> expr : expr)
+# run func(args) priority
+# len(varref)
+# enabled(expr)
+# get_p(expr)
+# var ? [rargs]
+# var ?? [rargs]
+# timeout
+# nonprogress
+# pc_val(expr)
+# name[expr] @ name
+# name[expr] : pfld
+# name @ name
+# name : pfld
diff --git a/formal/promela/src/src/modules/promela_yacc/setup.py b/formal/promela/src/src/modules/promela_yacc/setup.py
new file mode 100644
index 00000000..b58de8a0
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/setup.py
@@ -0,0 +1,65 @@
+from setuptools import setup
+# inline:
+# from promela import yacc
+
+
+description = (
+ 'Parser and abstract syntax tree for the Promela modeling language.')
+README = 'README.md'
+VERSION_FILE = 'promela/_version.py'
+MAJOR = 0
+MINOR = 0
+MICRO = 3
+version = '{major}.{minor}.{micro}'.format(
+ major=MAJOR, minor=MINOR, micro=MICRO)
+s = (
+ '# This file was generated from setup.py\n'
+ "version = '{version}'\n").format(version=version)
+install_requires = [
+ 'networkx >= 2.0',
+ 'ply >= 3.4',
+ 'pydot >= 1.1.0']
+classifiers = [
+ 'Development Status :: 2 - Pre-Alpha',
+ 'Intended Audience :: Science/Research',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Topic :: Scientific/Engineering']
+keywords = [
+ 'promela', 'parser', 'syntax tree', 'ply', 'lex', 'yacc']
+
+
+def build_parser_table():
+ from promela import yacc
+ tabmodule = yacc.TABMODULE.split('.')[-1]
+ outputdir = 'promela/'
+ parser = yacc.Parser()
+ parser.build(tabmodule, outputdir=outputdir, write_tables=True)
+
+
+if __name__ == '__main__':
+ with open(VERSION_FILE, 'w') as f:
+ f.write(s)
+ try:
+ build_parser_table()
+ except ImportError:
+ print('WARNING: `promela` could not cache parser tables '
+ '(ignore this if running only for "egg_info").')
+ setup(
+ name='promela',
+ version=version,
+ description=description,
+ long_description=open(README).read(),
+ author='Ioannis Filippidis',
+ author_email='jfilippidis@gmail.com',
+ url='https://github.com/johnyf/promela',
+ license='BSD',
+ install_requires=install_requires,
+ tests_require=['nose'],
+ packages=['promela'],
+ package_dir={'promela': 'promela'},
+ classifiers=classifiers,
+ keywords=keywords)
diff --git a/formal/promela/src/src/modules/promela_yacc/tests/yacc_test.py b/formal/promela/src/src/modules/promela_yacc/tests/yacc_test.py
new file mode 100644
index 00000000..b26d22aa
--- /dev/null
+++ b/formal/promela/src/src/modules/promela_yacc/tests/yacc_test.py
@@ -0,0 +1,759 @@
+import logging
+import networkx as nx
+import networkx.algorithms.isomorphism as iso
+from nose.tools import assert_raises
+from promela import ast, yacc
+
+
+logger = logging.getLogger(__name__)
+logger.setLevel('WARNING')
+log = logging.getLogger('promela.yacc')
+log.setLevel(logging.ERROR)
+h = logging.StreamHandler()
+log = logging.getLogger('promela.ast')
+log.setLevel('WARNING')
+log.addHandler(h)
+
+
+parser = yacc.Parser()
+
+
+def parse_proctype_test():
+ s = '''
+ active [3] proctype main(){
+ int x;
+ }
+ '''
+ tree = parser.parse(s)
+ assert isinstance(tree, ast.Program)
+ assert len(tree) == 1
+ p = tree[0]
+ assert isinstance(p, ast.Proctype)
+ assert p.name == 'main'
+ assert isinstance(p.body, ast.Sequence)
+ assert p.active == 3, type(p.active)
+ assert p.args is None
+ assert p.priority is None
+ assert p.provided is None
+
+
+def parse_if_test():
+ s = '''
+ proctype p (){
+ if
+ :: skip
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ assert isinstance(tree, ast.Program)
+ assert len(tree) == 1
+ proc = tree[0]
+ assert isinstance(proc, ast.Proctype)
+ assert isinstance(proc.body, ast.Sequence)
+ assert len(proc.body) == 1
+ if_block = proc.body[0]
+ assert isinstance(if_block, ast.Options)
+ assert if_block.type == 'if'
+ options = if_block.options
+ assert isinstance(options, list)
+ assert len(options) == 1
+ opt0 = options[0]
+ assert isinstance(opt0, ast.Sequence)
+ assert len(opt0) == 1
+ assert isinstance(opt0[0], ast.Expression)
+ e = opt0[0].expr
+ assert isinstance(e, ast.Bool)
+ assert e.value, e
+
+
+def parse_do_multiple_options_test():
+ s = '''
+ proctype p (){
+ do
+ :: x -> x = x + 1;
+ :: (y == 0) -> y = x; y == 1;
+ od
+ }
+ '''
+ tree = parser.parse(s)
+ assert isinstance(tree, ast.Program)
+ assert len(tree) == 1
+ proc = tree[0]
+ assert isinstance(proc, ast.Proctype)
+ assert isinstance(proc.body, ast.Sequence)
+ assert len(proc.body) == 1
+ do_block = proc.body[0]
+ assert isinstance(do_block, ast.Options)
+ assert do_block.type == 'do'
+ options = do_block.options
+ assert isinstance(options, list)
+ assert len(options) == 2
+ opt = options[0]
+ assert isinstance(opt, ast.Sequence)
+ assert len(opt) == 2
+ assert isinstance(opt[0], ast.Expression)
+ assert isinstance(opt[1], ast.Assignment)
+
+ opt = options[1]
+ assert isinstance(opt, ast.Sequence)
+ assert len(opt) == 3
+ assert isinstance(opt[0], ast.Expression)
+ assert isinstance(opt[1], ast.Assignment)
+ assert isinstance(opt[2], ast.Expression)
+
+
+def if_one_option_pg_test():
+ s = '''
+ proctype p (){
+ if
+ :: skip
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1)])
+ assert nx.is_isomorphic(g, h)
+
+
+def if_two_options_pg_test():
+ s = '''
+ proctype p(){
+ if
+ :: true
+ :: false
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (0, 1)])
+ assert nx.is_isomorphic(g, h)
+
+
+def do_one_option_pg_test():
+ s = '''
+ proctype p(){
+ do
+ :: skip
+ od
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 0)])
+ assert nx.is_isomorphic(g, h)
+
+
+def do_two_options_pg_test():
+ s = '''
+ proctype p(){
+ do
+ :: true
+ :: false
+ od
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 0), (0, 0)])
+ assert nx.is_isomorphic(g, h)
+
+
+def nested_if_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ if
+ :: if
+ :: true
+ :: false
+ fi
+ :: x
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (0, 1), (0, 1)])
+ assert nx.is_isomorphic(g, h)
+
+
+def nested_if_not_guard_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ if
+ :: true;
+ if
+ :: true
+ :: false
+ fi
+ :: x
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (0, 2), (2, 1), (2, 1)])
+ assert nx.is_isomorphic(g, h)
+
+
+def doubly_nested_if_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ if
+ :: if
+ :: true
+ :: if
+ :: true
+ :: skip
+ fi
+ :: false
+ fi
+ :: if
+ :: if
+ :: true
+ :: false
+ fi
+ fi
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ for i in range(6):
+ h.add_edge(0, 1)
+ assert nx.is_isomorphic(g, h)
+
+
+def nested_do_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ if
+ :: do
+ :: true
+ :: false
+ od
+ :: x
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (0, 2), (0, 2), (2, 2), (2, 2)])
+ assert nx.is_isomorphic(g, h)
+
+
+def nested_do_not_guard_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ if
+ :: true;
+ do
+ :: true
+ :: false
+ od
+ :: x
+ fi
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (0, 2), (2, 2), (2, 2)])
+ assert nx.is_isomorphic(g, h)
+
+
+def combined_if_do_program_graph_test():
+ s = '''
+ active proctype p(){
+ int x, y, z;
+ if /* 0 */
+ :: do
+ :: x = 1; /* 2 */
+ y == 5 /* 1 */
+
+ :: z = 3; /* 3 */
+ skip /* 1 */
+
+ :: if
+ :: z = (3 - x) * y; /* 4 */
+ true; /* 5 */
+ y = 3 /* 1 */
+
+ :: true /* 1 */
+ fi
+ od
+ /* 1 */
+
+ :: true; /* 6 */
+ if
+ :: true /* 7 */
+
+ :: true -> /* 8 */
+ x = y /* 7 */
+
+ fi
+ fi
+ /* 7 */
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([
+ (0, 2), (0, 3), (0, 4), (0, 1),
+ (2, 1), (3, 1), (5, 1),
+ (1, 2), (1, 3), (1, 4), (1, 1),
+ (4, 5),
+ # false; if ...
+ (0, 6),
+ (6, 7), (6, 8), (8, 7)])
+ dump(g, node_label=None)
+ assert iso.is_isomorphic(g, h)
+
+
+def invalid_label_pg_test():
+ s = '''
+ proctype p(){
+ do
+ :: S0: x = 1;
+ od
+ }
+ '''
+ tree = parser.parse(s)
+ with assert_raises(Exception):
+ tree[0].to_pg()
+
+
+def goto_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ x = 1;
+ goto S0;
+ x = 2;
+ S0: x = 3;
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (1, 2)])
+ assert nx.is_isomorphic(g, h)
+
+
+def double_goto_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ x = 1;
+ goto S0;
+ x = 2;
+ S0: goto S1;
+ x = 3;
+ S1: skip
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (1, 2)])
+ assert nx.is_isomorphic(g, h)
+
+
+def goto_inside_if_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ if
+ :: true; goto S0; x = 1;
+ :: x = 3; false
+ fi;
+ S0: skip
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (0, 2), (2, 1), (1, 3)])
+ assert nx.is_isomorphic(g, h)
+
+
+def goto_loop_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ S0: if
+ :: true; goto S0; x = 1;
+ :: x = 3;
+ fi;
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (0, 0)])
+ assert nx.is_isomorphic(g, h)
+
+
+def goto_self_loop_pg_test():
+ s = '''
+ proctype p(){
+ S0: goto S1;
+ S1: goto S0
+ }
+ '''
+ tree = parser.parse(s)
+ with assert_raises(AssertionError):
+ tree[0].to_pg()
+
+
+def break_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ do
+ :: true; x = 1;
+ :: break; x = 3;
+ od
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 1), (1, 0), (0, 2)])
+ assert nx.is_isomorphic(g, h)
+
+
+def nested_break_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ /* 0 */
+ do
+ :: true; /* 2 */
+ x == 1; /* 0 */
+
+ :: do
+ :: x == 2;
+ break /* 0 */
+
+ :: false; /* 4 */
+ x == 3 /* 5 */
+
+ od
+ /* 5 */
+
+ :: break; /* 1 */
+ x == 4;
+
+ od
+ /* 1 */
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([
+ (0, 1),
+ (0, 2), (2, 0),
+ (0, 0),
+ (0, 4), (4, 5),
+ (5, 4), (5, 0)])
+ assert nx.is_isomorphic(g, h)
+
+
+def atomic_pg_test():
+ s = '''
+ proctype p(){
+ bit x;
+ x = 1;
+ atomic { x = 2; }
+ x = 3;
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_node(0, context=None)
+ h.add_node(1, context=None)
+ h.add_node(2, context='atomic')
+ h.add_node(3, context=None)
+ h.add_edges_from([(0, 1), (1, 2), (2, 3)])
+ nm = lambda x, y: x['context'] == y['context']
+ gm = iso.GraphMatcher(g, h, node_match=nm)
+ assert gm.is_isomorphic()
+
+
+def do_atomic_dissapears_pg_test():
+ s = '''
+ proctype p(){
+ bit x, y;
+ /* 0 */
+ do
+ :: true; /* 3 */
+ atomic { x = 2; goto S0; /* 1 */ y = 1}
+ :: x == 1; /* 4 */ y == 2; /* 0 */
+ od;
+ x = 3;
+ /* 1 */
+ S0: skip
+ /* 2 */
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([
+ (0, 3), (3, 1), (1, 2),
+ (0, 4), (4, 0)])
+ for u in h:
+ h.add_node(u, context=None)
+ nm = lambda x, y: x['context'] == y['context']
+ gm = iso.GraphMatcher(g, h, node_match=nm)
+ assert gm.is_isomorphic()
+
+
+def do_atomic_pg_test():
+ s = '''
+ proctype p(){
+ bit x, y;
+ /* 0 */
+ do
+ :: true; /* 1 */
+ atomic { x = 2; /* 2 */
+ y = 1; goto S0; } /* 3 */
+ :: x == 1; /* 4 */ y == 2; /* 0 */
+ od;
+ x = 3;
+ /* 3 */
+ S0: skip
+ /* 5 */
+ }
+ '''
+ tree = parser.parse(s)
+ g = tree[0].to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([
+ (0, 1), (1, 2), (2, 3),
+ (3, 5), (0, 4), (4, 0)])
+ for u in h:
+ h.add_node(u, context=None)
+ h.add_node(2, context='atomic')
+ nm = lambda x, y: x['context'] == y['context']
+ gm = iso.GraphMatcher(g, h, node_match=nm)
+ assert gm.is_isomorphic()
+
+
+def ltl_block_test():
+ s = '''
+ bit x, y, c;
+
+ proctype p(){
+ if
+ :: x;
+ fi
+ }
+
+ ltl { (x == 1) && []<>(y != 2) && <>[](c == 1) && (x U y) }
+ '''
+ tree = parser.parse(s)
+ assert len(tree) == 3, repr(tree)
+ decl, p, ltl = tree
+ assert isinstance(p, ast.Proctype)
+ assert isinstance(ltl, ast.LTL)
+ s = str(ltl.formula)
+ assert s == (
+ '((((x == 1) && ([] (<> (y != 2)))) && '
+ '(<> ([] (c == 1)))) && (x U y))'), s
+
+
+def test_else():
+ s = '''
+ proctype p(){
+ byte x;
+ do
+ :: x == 0
+ :: x == 1
+ :: else
+ od
+ }
+ '''
+ (proc,) = parser.parse(s)
+ g = proc.to_pg()
+ dump(g)
+ for u, v, d in g.edges(data=True):
+ c = d['stmt']
+ if isinstance(c, ast.Else):
+ print(c.other_guards)
+
+
+def test_nested_else():
+ s = '''
+ proctype p(){
+ byte x;
+ do
+ ::
+ if
+ :: false
+ :: else
+ fi
+ od
+ }
+ '''
+ (proc,) = parser.parse(s)
+ g = proc.to_pg()
+ dump(g)
+ h = nx.MultiDiGraph()
+ h.add_edges_from([(0, 0), (0, 0)])
+ for u in h:
+ h.add_node(u, context=None)
+ nm = lambda x, y: x['context'] == y['context']
+ gm = iso.GraphMatcher(g, h, node_match=nm)
+ assert gm.is_isomorphic()
+
+
+def test_double_else():
+ s = '''
+ proctype foo(){
+ bit x;
+ do
+ ::
+ if
+ :: x
+ :: else
+ fi
+ :: else
+ od
+ }
+ '''
+ # syntactic else = Promela language definition
+ (proc,) = parser.parse(s)
+ with assert_raises(AssertionError):
+ proc.to_pg()
+ # different from Promela language definition
+ g = proc.to_pg(syntactic_else=True)
+ active_else = 0
+ off_else = 0
+ for u, v, d in g.edges(data=True):
+ stmt = d['stmt']
+ if isinstance(stmt, ast.Else):
+ other = stmt.other_guards
+ if other is None:
+ off_else += 1
+ else:
+ active_else += 1
+ assert len(other) == 1, other
+ (other_stmt,) = other
+ s = str(other_stmt)
+ assert s == 'x', s
+ assert active_else == 1, active_else
+ assert off_else == 1, off_else
+
+
+def test_pg_node_order():
+ s = '''
+ proctype foo(){
+ bit x;
+ if
+ ::
+ do
+ :: x > 2; x = 1
+ :: else; break
+ od;
+ x = 1
+ :: x = 2
+ fi
+ }
+ '''
+ (proc,) = parser.parse(s)
+ g = proc.to_pg()
+ dump(g)
+ # Final indexing depends on the
+ # aux goto nodes created and the contraction order.
+ # The latter depend on the intermediate indexing,
+ # which is fixed syntactically
+ # (see `generate_unique_node`).
+ edges = {(0, 1), (0, 3), (0, 4), (2, 3),
+ (2, 4), (3, 1), (4, 2)}
+ assert set(g) == set(range(5)), g.nodes()
+ assert set(g.edges()) == edges, g.edges()
+
+
+def test_labels():
+ s = '''
+ active proctype foo(){
+ progress:
+ do
+ :: true
+ od
+ }
+ '''
+ (proc,) = parser.parse(s)
+ g = proc.to_pg()
+ for u, d in g.nodes(data=True):
+ print(d.get('label'))
+
+
+def test_remote_ref():
+ s = '''
+ proctype foo(){
+ bar @ critical
+ }
+ '''
+ (proc,) = parser.parse(s)
+ g = proc.to_pg()
+ (e,) = g.edges(data=True)
+ u, v, d = e
+ s = d['stmt']
+ assert isinstance(s, ast.Expression), s
+ ref = s.expr
+ assert isinstance(ref, ast.RemoteRef), ref
+ assert ref.proctype == 'bar', ref.proctype
+ assert ref.label == 'critical', ref.label
+ assert ref.pid is None, ref.pid
+
+
+def dump(g, fname='g.pdf', node_label='context'):
+ if logger.getEffectiveLevel() >= logging.DEBUG:
+ return
+ # map nodes to integers
+ ast.dump_graph(
+ g, fname, node_label=node_label, edge_label='stmt')
+
+
+if __name__ == '__main__':
+ test_labels()