Skip to content

Commit

Permalink
Black
Browse files Browse the repository at this point in the history
  • Loading branch information
elprans committed Aug 30, 2021
1 parent 850f149 commit 468ecd0
Show file tree
Hide file tree
Showing 17 changed files with 632 additions and 396 deletions.
2 changes: 1 addition & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[flake8]
ignore = E121,E126,E127,E226,W504
ignore = E203,W503
exclude = .git,__pycache__,build,dist,.eggs,.tox
173 changes: 113 additions & 60 deletions parsing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,17 +121,38 @@
"""
__all__ = ["SpecError", "UnexpectedToken", "Nonterm",
"Precedence", "Spec", "Token", "Lr", "Glr",
"ModuleSpecSource"]

from parsing.errors import (ParsingError, SpecError, # noqa
UnexpectedToken, AnyException) # noqa
from parsing.grammar import (Precedence, Production, SymbolSpec, # noqa
NontermSpec, TokenSpec, EndOfInput, # noqa
Epsilon, epsilon, NontermStart, # noqa
ShiftAction, ReduceAction)
from parsing.ast import Symbol, Nonterm, Token # noqa
__all__ = [
"SpecError",
"UnexpectedToken",
"Nonterm",
"Precedence",
"Spec",
"Token",
"Lr",
"Glr",
"ModuleSpecSource",
]

from parsing.errors import ( # noqa: F401
ParsingError,
SpecError,
UnexpectedToken,
AnyException,
)
from parsing.grammar import ( # noqa: F401
Precedence,
Production,
SymbolSpec,
NontermSpec,
TokenSpec,
EndOfInput,
Epsilon,
epsilon,
NontermStart,
ShiftAction,
ReduceAction,
)
from parsing.ast import Symbol, Nonterm, Token # noqa: F401
from parsing.automaton import Spec
from parsing.module_spec import ModuleSpecSource

Expand Down Expand Up @@ -185,7 +206,8 @@ def __setStart(self, start):
A list of parsing results. For LR parsing, there is only ever one
result, but for compatibility with the Glr interface, start is a
list.
""")
""",
)

def __getVerbose(self):
return self._verbose
Expand Down Expand Up @@ -246,15 +268,20 @@ def _act(self, sym, symSpec):
self._printStack()

def _printStack(self):
print("STACK:", end=' ')
print("STACK:", end=" ")
for node in self._stack:
print("%r" % node[0], end=' ')
print("%r" % node[0], end=" ")
print()
print(" ", end=' ')
print(" ", end=" ")
for node in self._stack:
print("%r%s" % (
node[1], (" " * (len("%r" % node[0]) - len("%r" % node[1])))),
end=' ')
print(
"%r%s"
% (
node[1],
(" " * (len("%r" % node[0]) - len("%r" % node[1]))),
),
end=" ",
)
print()

def _reduce(self, production):
Expand Down Expand Up @@ -284,6 +311,7 @@ def _production(self, production, rhs):

return r


# ===========================================================================
# Begin graph-structured stack (GSS) classes.
#
Expand All @@ -310,8 +338,7 @@ def __repr__(self):
return "{%r}" % self.value

def __eq__(self, other):
if self.node != other.node \
or self.value != other.value:
if self.node != other.node or self.value != other.value:
return False
return True

Expand Down Expand Up @@ -378,17 +405,17 @@ def _pathsRecurse(self, pathLen, path):
path.pop(0)
path.pop(0)


#
# End graph-structured stack (GSS) classes.
# ========================================================================


class Glr(Lr):
"""
GLR parser. The Glr class uses a Spec instance in order to parse input
that is fed to it via the token() method, and terminated via the eoi()
method.
"""
GLR parser. The Glr class uses a Spec instance in order to parse input
that is fed to it via the token() method, and terminated via the eoi()
method."""

def __init__(self, spec):
Lr.__init__(self, spec)
Expand All @@ -405,8 +432,7 @@ def reset(self):

def token(self, token):
"""
Feed a token to the parser.
"""
Feed a token to the parser."""
if self._verbose:
print("%s" % ("-" * 80))
print("INPUT: %r" % token)
Expand All @@ -417,8 +443,7 @@ def token(self, token):

def eoi(self):
"""
Signal end-of-input to the parser.
"""
Signal end-of-input to the parser."""
token = EndOfInput(self)
self.token(token)

Expand Down Expand Up @@ -468,33 +493,41 @@ def _reductions(self, sym, symSpec):
if type(action) == ReduceAction:
if len(action.production.rhs) == 0:
if action.production not in epsilons:
assert len(
[path for path in top.paths(0)]) == 1
assert (
len([path for path in top.paths(0)]) == 1
)
path = [p for p in top.paths(0)][0]
epsilons[action.production] = [top]
workQ.append((path, action.production))
if self._verbose:
print(" --> enqueue(a) %r" %
action.production)
print(
" --> enqueue(a) %r"
% action.production
)
print(" %r" % path)
elif top not in epsilons[action.production]:
assert len(
[path for path in top.paths(0)]) == 1
assert (
len([path for path in top.paths(0)]) == 1
)
path = [p for p in top.paths(0)][0]
epsilons[action.production].append(top)
workQ.append((path, action.production))
if self._verbose:
print(" --> enqueue(b) %r" %
action.production)
print(
" --> enqueue(b) %r"
% action.production
)
print(" %r" % path)
else:
# Iterate over all reduction paths through stack
# and enqueue them.
for path in top.paths(len(action.production.rhs)):
workQ.append((path, action.production))
if self._verbose:
print(" --> enqueue(c) %r" %
action.production)
print(
" --> enqueue(c) %r"
% action.production
)
print(" %r" % path)
i += 1

Expand Down Expand Up @@ -526,8 +559,10 @@ def _reduce(self, workQ, epsilons, path, production, symSpec):
below = path[0]
done = False
for top in self._gss:
if top.nextState == \
self._spec._goto[below.nextState][production.lhs]:
if (
top.nextState
== self._spec._goto[below.nextState][production.lhs]
):
# top is compatible with the reduction result we want to add to
# the set of stack tops.
for edge in top.edges():
Expand All @@ -539,12 +574,18 @@ def _reduce(self, workQ, epsilons, path, production, symSpec):
value = production.lhs.nontermType.merge(edge.value, r)
if self._verbose:
if value == edge.value:
print(" %s" %
("-" * len("%r" % edge.value)))
print(
" %s"
% ("-" * len("%r" % edge.value))
)
else:
print(" %s %s" %
((" " * len("%r" % edge.value)),
"-" * len("%r" % r)))
print(
" %s %s"
% (
(" " * len("%r" % edge.value)),
"-" * len("%r" % r),
)
)
edge.value = value
done = True
break
Expand All @@ -557,17 +598,21 @@ def _reduce(self, workQ, epsilons, path, production, symSpec):
# Enqueue reduction paths that were created as a result of
# the new link.
self._enqueueLimitedReductions(
workQ, epsilons, edge, symSpec)
workQ, epsilons, edge, symSpec
)
done = True
break
if not done:
# There is no compatible stack top, so create a new one.
top = Gssn(
below, r, self._spec._goto[below.nextState][production.lhs])
below, r, self._spec._goto[below.nextState][production.lhs]
)
self._gss.append(top)
if self._verbose:
print(" --> shift(c) %r" %
self._spec._goto[below.nextState][production.lhs])
print(
" --> shift(c) %r"
% self._spec._goto[below.nextState][production.lhs]
)
self._enqueueLimitedReductions(workQ, epsilons, top.edge, symSpec)

# Enqueue paths that incorporate edge.
Expand All @@ -579,8 +624,10 @@ def _enqueueLimitedReductions(self, workQ, epsilons, edge, symSpec):
for action in self._spec._action[top.nextState][symSpec]:
if type(action) == ReduceAction:
if len(action.production.rhs) == 0:
if (gotos[top.nextState][action.production.lhs] ==
top.nextState):
if (
gotos[top.nextState][action.production.lhs]
== top.nextState
):
# Do nothing, since enqueueing a reduction
# would result in performing the same reduction
# twice.
Expand All @@ -590,16 +637,20 @@ def _enqueueLimitedReductions(self, workQ, epsilons, edge, symSpec):
epsilons[action.production] = [top]
workQ.append((path, action.production))
if self._verbose:
print(" --> enqueue(d) %r" %
action.production)
print(
" --> enqueue(d) %r"
% action.production
)
print(" %r" % path)
elif top not in epsilons[action.production]:
path = [top]
epsilons[action.production].append(top)
workQ.append((path, action.production))
if self._verbose:
print(" --> enqueue(e) %r" %
action.production)
print(
" --> enqueue(e) %r"
% action.production
)
print(" %r" % path)
else:
# Iterate over all reduction paths through stack
Expand All @@ -608,8 +659,10 @@ def _enqueueLimitedReductions(self, workQ, epsilons, edge, symSpec):
if edge in path[1::2]:
workQ.append((path, action.production))
if self._verbose:
print(" --> enqueue(f) %r" %
action.production)
print(
" --> enqueue(f) %r"
% action.production
)
print(" %r" % path)

def _shifts(self, sym, symSpec):
Expand Down Expand Up @@ -644,10 +697,10 @@ def _printStack(self):
for top in self._gss:
for path in top.paths():
if i == 0:
print("STK 0:", end=' ')
print("STK 0:", end=" ")
else:
print(" %d:" % i, end=' ')
print(" %d:" % i, end=" ")
for elm in path:
print("%r" % elm, end=' ')
print("%r" % elm, end=" ")
print()
i += 1
7 changes: 3 additions & 4 deletions parsing/ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,8 @@ def merge(self, other):
in merge().
"""
raise SyntaxError(
"No merge() for %r; merging %r <--> %r" % (
type(self), self, other))
"No merge() for %r; merging %r <--> %r" % (type(self), self, other)
)


class Token(Symbol):
Expand Down Expand Up @@ -137,8 +137,7 @@ class rparen(Token):
"%token [none]" # [none] not necessary, since it's the default.
class id(Token):
"%token"
"""
"%token" """

def __init__(self, parser):
assert is_parser(parser)
Expand Down
Loading

0 comments on commit 468ecd0

Please sign in to comment.