Source code for schrodinger.application.desmond.enhanced_sampling.mexpParser

# $ANTLR 3.1.3 Mar 18, 2009 10:09:25 mexp.g 2010-08-30 14:33:35

import sys

from schrodinger.application.desmond.antlr3 import DFA
from schrodinger.application.desmond.antlr3 import BaseRecognizer
from schrodinger.application.desmond.antlr3 import MismatchedSetException
from schrodinger.application.desmond.antlr3 import NoViableAltException
from schrodinger.application.desmond.antlr3 import Parser
from schrodinger.application.desmond.antlr3 import ParserRuleReturnScope
from schrodinger.application.desmond.antlr3 import RecognitionException
from schrodinger.application.desmond.antlr3 import RecognizerSharedState
from schrodinger.application.desmond.antlr3 import version_str_to_tuple
from schrodinger.application.desmond.antlr3.compat import frozenset
from schrodinger.application.desmond.antlr3.tree import CommonTreeAdaptor
from schrodinger.application.desmond.antlr3.tree import \
    RewriteEarlyExitException
from schrodinger.application.desmond.antlr3.tree import RewriteRuleSubtreeStream
from schrodinger.application.desmond.antlr3.tree import RewriteRuleTokenStream

# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN

# token types
POPEN = 25
SUBTROP = 30
COPEN = 37
ADDOP = 31
EOF = -1
INTERVAL = 18
ELEM = 5
IF = 9
T__55 = 55
T__56 = 56
T__57 = 57
T__58 = 58
PROG = 13
NAME = 16
PCLOSE = 27
T__51 = 51
T__52 = 52
T__53 = 53
T__54 = 54
EXP = 34
MULTOP = 32
COMMA = 23
BIND = 7
IDENT = 24
VAR = 10
DIGIT = 40
HEADER = 12
EQ = 28
COMMENT = 42
T__50 = 50
DECL_OUTPUT = 14
BCLOSE = 36
T__46 = 46
T__47 = 47
T__45 = 45
SERIES = 8
T__48 = 48
STATIC = 11
T__49 = 49
ITER = 6
LIT = 26
INITKER = 21
CCLOSE = 38
DECL_META = 15
SEMI = 22
ALPHA = 41
COLON = 39
WS = 44
NEWLINE = 43
DIVOP = 33
BLOCK = 4
DIM = 20
CUTOFF = 19
BOPEN = 35
FIRST = 17
STRING = 29

# token names
tokenNames = [
    "<invalid>", "<EOR>", "<DOWN>", "<UP>", "BLOCK", "ELEM", "ITER", "BIND",
    "SERIES", "IF", "VAR", "STATIC", "HEADER", "PROG", "DECL_OUTPUT",
    "DECL_META", "NAME", "FIRST", "INTERVAL", "CUTOFF", "DIM", "INITKER",
    "SEMI", "COMMA", "IDENT", "POPEN", "LIT", "PCLOSE", "EQ", "STRING",
    "SUBTROP", "ADDOP", "MULTOP", "DIVOP", "EXP", "BOPEN", "BCLOSE", "COPEN",
    "CCLOSE", "COLON", "DIGIT", "ALPHA", "COMMENT", "NEWLINE", "WS", "'static'",
    "'declare_output'", "'declare_meta'", "'name'", "'first'", "'inf'",
    "'interval'", "'cutoff'", "'dimension'", "'initial'", "'series'", "'if'",
    "'then'", "'else'"
]


[docs]class mexpParser(Parser): grammarFileName = "mexp.g" antlr_version = version_str_to_tuple("3.1.3 Mar 18, 2009 10:09:25") antlr_version_str = "3.1.3 Mar 18, 2009 10:09:25" tokenNames = tokenNames
[docs] def __init__(self, input, state=None, *args, **kwargs): if state is None: state = RecognizerSharedState() super(mexpParser, self).__init__(input, state, *args, **kwargs) self.dfa1 = self.DFA1(self, 1, eot=self.DFA1_eot, eof=self.DFA1_eof, min=self.DFA1_min, max=self.DFA1_max, accept=self.DFA1_accept, special=self.DFA1_special, transition=self.DFA1_transition) self.dfa15 = self.DFA15(self, 15, eot=self.DFA15_eot, eof=self.DFA15_eof, min=self.DFA15_min, max=self.DFA15_max, accept=self.DFA15_accept, special=self.DFA15_special, transition=self.DFA15_transition) self.dfa17 = self.DFA17(self, 17, eot=self.DFA17_eot, eof=self.DFA17_eof, min=self.DFA17_min, max=self.DFA17_max, accept=self.DFA17_accept, special=self.DFA17_special, transition=self.DFA17_transition) self.dfa18 = self.DFA18(self, 18, eot=self.DFA18_eot, eof=self.DFA18_eof, min=self.DFA18_min, max=self.DFA18_max, accept=self.DFA18_accept, special=self.DFA18_special, transition=self.DFA18_transition) self.dfa19 = self.DFA19(self, 19, eot=self.DFA19_eot, eof=self.DFA19_eof, min=self.DFA19_min, max=self.DFA19_max, accept=self.DFA19_accept, special=self.DFA19_special, transition=self.DFA19_transition) self.dfa20 = self.DFA20(self, 20, eot=self.DFA20_eot, eof=self.DFA20_eof, min=self.DFA20_min, max=self.DFA20_max, accept=self.DFA20_accept, special=self.DFA20_special, transition=self.DFA20_transition) self.dfa21 = self.DFA21(self, 21, eot=self.DFA21_eot, eof=self.DFA21_eof, min=self.DFA21_min, max=self.DFA21_max, accept=self.DFA21_accept, special=self.DFA21_special, transition=self.DFA21_transition) self.dfa22 = self.DFA22(self, 22, eot=self.DFA22_eot, eof=self.DFA22_eof, min=self.DFA22_min, max=self.DFA22_max, accept=self.DFA22_accept, special=self.DFA22_special, transition=self.DFA22_transition) self.dfa24 = self.DFA24(self, 24, eot=self.DFA24_eot, eof=self.DFA24_eof, min=self.DFA24_min, max=self.DFA24_max, accept=self.DFA24_accept, special=self.DFA24_special, transition=self.DFA24_transition) self.dfa25 = self.DFA25(self, 25, eot=self.DFA25_eot, eof=self.DFA25_eof, min=self.DFA25_min, max=self.DFA25_max, accept=self.DFA25_accept, special=self.DFA25_special, transition=self.DFA25_transition) self._adaptor = None self.adaptor = CommonTreeAdaptor()
[docs] def getTreeAdaptor(self): return self._adaptor
[docs] def setTreeAdaptor(self, adaptor): self._adaptor = adaptor
adaptor = property(getTreeAdaptor, setTreeAdaptor)
[docs] def reportError(self, err): import sys BaseRecognizer.reportError(self, err) sys.stderr.write("unable to parse m-expression\n") exit(1)
[docs] class prog_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.prog_return, self).__init__() self.tree = None
# $ANTLR start "prog" # mexp.g:52:1: prog : header block EOF -> ^( PROG header block ) ;
[docs] def prog(self,): retval = self.prog_return() retval.start = self.input.LT(1) root_0 = None EOF3 = None header1 = None block2 = None EOF3_tree = None stream_EOF = RewriteRuleTokenStream(self._adaptor, "token EOF") stream_block = RewriteRuleSubtreeStream(self._adaptor, "rule block") stream_header = RewriteRuleSubtreeStream(self._adaptor, "rule header") try: try: # mexp.g:52:6: ( header block EOF -> ^( PROG header block ) ) # mexp.g:52:9: header block EOF self._state.following.append(self.FOLLOW_header_in_prog158) header1 = self.header() self._state.following.pop() stream_header.add(header1.tree) self._state.following.append(self.FOLLOW_block_in_prog160) block2 = self.block() self._state.following.pop() stream_block.add(block2.tree) EOF3 = self.match(self.input, EOF, self.FOLLOW_EOF_in_prog162) stream_EOF.add(EOF3) # AST Rewrite # elements: header, block # token labels: # rule labels: retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 52:26: -> ^( PROG header block ) # mexp.g:52:29: ^( PROG header block ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(PROG, "PROG"), root_1) self._adaptor.addChild(root_1, stream_header.nextTree()) self._adaptor.addChild(root_1, stream_block.nextTree()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "prog"
[docs] class header_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.header_return, self).__init__() self.tree = None
# $ANTLR start "header" # mexp.g:55:1: header : (d+= decl SEMI )* -> ^( HEADER ( $d)* ) ;
[docs] def header(self,): retval = self.header_return() retval.start = self.input.LT(1) root_0 = None SEMI4 = None list_d = None d = None d = None SEMI4_tree = None stream_SEMI = RewriteRuleTokenStream(self._adaptor, "token SEMI") stream_decl = RewriteRuleSubtreeStream(self._adaptor, "rule decl") try: try: # mexp.g:55:9: ( (d+= decl SEMI )* -> ^( HEADER ( $d)* ) ) # mexp.g:55:17: (d+= decl SEMI )* pass # mexp.g:55:17: (d+= decl SEMI )* while True: #loop1 alt1 = 2 alt1 = self.dfa1.predict(self.input) if alt1 == 1: # mexp.g:55:18: d+= decl SEMI self._state.following.append( self.FOLLOW_decl_in_header191) d = self.decl() self._state.following.pop() stream_decl.add(d.tree) if list_d is None: list_d = [] list_d.append(d.tree) SEMI4 = self.match(self.input, SEMI, self.FOLLOW_SEMI_in_header193) stream_SEMI.add(SEMI4) else: break #loop1 # AST Rewrite # elements: d # token labels: # rule labels: retval # token list labels: # rule list labels: d # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) stream_d = RewriteRuleSubtreeStream(self._adaptor, "token d", list_d) root_0 = self._adaptor.nil() # 55:34: -> ^( HEADER ( $d)* ) # mexp.g:55:37: ^( HEADER ( $d)* ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(HEADER, "HEADER"), root_1) # mexp.g:55:46: ( $d)* while stream_d.hasNext(): self._adaptor.addChild(root_1, stream_d.nextTree()) stream_d.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "header"
[docs] class decl_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.decl_return, self).__init__() self.tree = None
# $ANTLR start "decl" # mexp.g:56:1: decl : ( decl_meta | decl_output | static );
[docs] def decl(self,): retval = self.decl_return() retval.start = self.input.LT(1) root_0 = None decl_meta5 = None decl_output6 = None static7 = None try: try: # mexp.g:56:9: ( decl_meta | decl_output | static ) alt2 = 3 LA2 = self.input.LA(1) if LA2 == 47: alt2 = 1 elif LA2 == 46: alt2 = 2 elif LA2 == 45: alt2 = 3 else: nvae = NoViableAltException("", 2, 0, self.input) raise nvae if alt2 == 1: # mexp.g:56:17: decl_meta root_0 = self._adaptor.nil() self._state.following.append( self.FOLLOW_decl_meta_in_decl222) decl_meta5 = self.decl_meta() self._state.following.pop() self._adaptor.addChild(root_0, decl_meta5.tree) elif alt2 == 2: # mexp.g:56:29: decl_output root_0 = self._adaptor.nil() self._state.following.append( self.FOLLOW_decl_output_in_decl226) decl_output6 = self.decl_output() self._state.following.pop() self._adaptor.addChild(root_0, decl_output6.tree) elif alt2 == 3: # mexp.g:56:43: static root_0 = self._adaptor.nil() self._state.following.append(self.FOLLOW_static_in_decl230) static7 = self.static() self._state.following.pop() self._adaptor.addChild(root_0, static7.tree) retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "decl"
[docs] class static_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.static_return, self).__init__() self.tree = None
# $ANTLR start "static" # mexp.g:57:1: static : 'static' a+= varWithType ( COMMA a+= varWithType )* -> ( $a)+ ;
[docs] def static(self,): retval = self.static_return() retval.start = self.input.LT(1) root_0 = None string_literal8 = None COMMA9 = None list_a = None a = None a = None string_literal8_tree = None COMMA9_tree = None stream_45 = RewriteRuleTokenStream(self._adaptor, "token 45") stream_COMMA = RewriteRuleTokenStream(self._adaptor, "token COMMA") stream_varWithType = RewriteRuleSubtreeStream(self._adaptor, "rule varWithType") try: try: # mexp.g:57:9: ( 'static' a+= varWithType ( COMMA a+= varWithType )* -> ( $a)+ ) # mexp.g:57:17: 'static' a+= varWithType ( COMMA a+= varWithType )* string_literal8 = self.match(self.input, 45, self.FOLLOW_45_in_static244) stream_45.add(string_literal8) self._state.following.append( self.FOLLOW_varWithType_in_static248) a = self.varWithType() self._state.following.pop() stream_varWithType.add(a.tree) if list_a is None: list_a = [] list_a.append(a.tree) # mexp.g:57:41: ( COMMA a+= varWithType )* while True: #loop3 alt3 = 2 LA3_0 = self.input.LA(1) if (LA3_0 == COMMA): alt3 = 1 if alt3 == 1: # mexp.g:57:42: COMMA a+= varWithType COMMA9 = self.match(self.input, COMMA, self.FOLLOW_COMMA_in_static251) stream_COMMA.add(COMMA9) self._state.following.append( self.FOLLOW_varWithType_in_static255) a = self.varWithType() self._state.following.pop() stream_varWithType.add(a.tree) if list_a is None: list_a = [] list_a.append(a.tree) else: break #loop3 # AST Rewrite # elements: a # token labels: # rule labels: retval # token list labels: # rule list labels: a # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) stream_a = RewriteRuleSubtreeStream(self._adaptor, "token a", list_a) root_0 = self._adaptor.nil() # 57:65: -> ( $a)+ # mexp.g:57:68: ( $a)+ if not (stream_a.hasNext()): raise RewriteEarlyExitException() while stream_a.hasNext(): self._adaptor.addChild(root_0, stream_a.nextTree()) stream_a.reset() retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "static"
[docs] class varWithType_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.varWithType_return, self).__init__() self.tree = None
# $ANTLR start "varWithType" # mexp.g:58:1: varWithType : IDENT POPEN LIT PCLOSE -> ^( STATIC IDENT LIT ) ;
[docs] def varWithType(self,): retval = self.varWithType_return() retval.start = self.input.LT(1) root_0 = None IDENT10 = None POPEN11 = None LIT12 = None PCLOSE13 = None IDENT10_tree = None POPEN11_tree = None LIT12_tree = None PCLOSE13_tree = None stream_IDENT = RewriteRuleTokenStream(self._adaptor, "token IDENT") stream_PCLOSE = RewriteRuleTokenStream(self._adaptor, "token PCLOSE") stream_POPEN = RewriteRuleTokenStream(self._adaptor, "token POPEN") stream_LIT = RewriteRuleTokenStream(self._adaptor, "token LIT") try: try: # mexp.g:58:13: ( IDENT POPEN LIT PCLOSE -> ^( STATIC IDENT LIT ) ) # mexp.g:58:17: IDENT POPEN LIT PCLOSE IDENT10 = self.match(self.input, IDENT, self.FOLLOW_IDENT_in_varWithType272) stream_IDENT.add(IDENT10) POPEN11 = self.match(self.input, POPEN, self.FOLLOW_POPEN_in_varWithType274) stream_POPEN.add(POPEN11) LIT12 = self.match(self.input, LIT, self.FOLLOW_LIT_in_varWithType276) stream_LIT.add(LIT12) PCLOSE13 = self.match(self.input, PCLOSE, self.FOLLOW_PCLOSE_in_varWithType278) stream_PCLOSE.add(PCLOSE13) # AST Rewrite # elements: LIT, IDENT # token labels: # rule labels: retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 58:40: -> ^( STATIC IDENT LIT ) # mexp.g:58:43: ^( STATIC IDENT LIT ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(STATIC, "STATIC"), root_1) self._adaptor.addChild(root_1, stream_IDENT.nextNode()) self._adaptor.addChild(root_1, stream_LIT.nextNode()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "varWithType"
[docs] class decl_output_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.decl_output_return, self).__init__() self.tree = None
# $ANTLR start "decl_output" # mexp.g:60:1: decl_output : 'declare_output' POPEN t+= output_term ( COMMA t+= output_term )* PCLOSE -> ^( DECL_OUTPUT ( $t)+ ) ;
[docs] def decl_output(self,): retval = self.decl_output_return() retval.start = self.input.LT(1) root_0 = None string_literal14 = None POPEN15 = None COMMA16 = None PCLOSE17 = None list_t = None t = None t = None string_literal14_tree = None POPEN15_tree = None COMMA16_tree = None PCLOSE17_tree = None stream_PCLOSE = RewriteRuleTokenStream(self._adaptor, "token PCLOSE") stream_46 = RewriteRuleTokenStream(self._adaptor, "token 46") stream_POPEN = RewriteRuleTokenStream(self._adaptor, "token POPEN") stream_COMMA = RewriteRuleTokenStream(self._adaptor, "token COMMA") stream_output_term = RewriteRuleSubtreeStream(self._adaptor, "rule output_term") try: try: # mexp.g:60:13: ( 'declare_output' POPEN t+= output_term ( COMMA t+= output_term )* PCLOSE -> ^( DECL_OUTPUT ( $t)+ ) ) # mexp.g:60:15: 'declare_output' POPEN t+= output_term ( COMMA t+= output_term )* PCLOSE string_literal14 = self.match(self.input, 46, self.FOLLOW_46_in_decl_output296) stream_46.add(string_literal14) POPEN15 = self.match(self.input, POPEN, self.FOLLOW_POPEN_in_decl_output298) stream_POPEN.add(POPEN15) self._state.following.append( self.FOLLOW_output_term_in_decl_output302) t = self.output_term() self._state.following.pop() stream_output_term.add(t.tree) if list_t is None: list_t = [] list_t.append(t.tree) # mexp.g:60:53: ( COMMA t+= output_term )* while True: #loop4 alt4 = 2 LA4_0 = self.input.LA(1) if (LA4_0 == COMMA): alt4 = 1 if alt4 == 1: # mexp.g:60:54: COMMA t+= output_term COMMA16 = self.match( self.input, COMMA, self.FOLLOW_COMMA_in_decl_output305) stream_COMMA.add(COMMA16) self._state.following.append( self.FOLLOW_output_term_in_decl_output309) t = self.output_term() self._state.following.pop() stream_output_term.add(t.tree) if list_t is None: list_t = [] list_t.append(t.tree) else: break #loop4 PCLOSE17 = self.match(self.input, PCLOSE, self.FOLLOW_PCLOSE_in_decl_output313) stream_PCLOSE.add(PCLOSE17) # AST Rewrite # elements: t # token labels: # rule labels: retval # token list labels: # rule list labels: t # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) stream_t = RewriteRuleSubtreeStream(self._adaptor, "token t", list_t) root_0 = self._adaptor.nil() # 61:19: -> ^( DECL_OUTPUT ( $t)+ ) # mexp.g:61:22: ^( DECL_OUTPUT ( $t)+ ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(DECL_OUTPUT, "DECL_OUTPUT"), root_1) # mexp.g:61:36: ( $t)+ if not (stream_t.hasNext()): raise RewriteEarlyExitException() while stream_t.hasNext(): self._adaptor.addChild(root_1, stream_t.nextTree()) stream_t.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "decl_output"
[docs] class decl_meta_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.decl_meta_return, self).__init__() self.tree = None
# $ANTLR start "decl_meta" # mexp.g:63:1: decl_meta : 'declare_meta' POPEN t+= meta_term ( COMMA t+= meta_term )* PCLOSE -> ^( DECL_META ( $t)+ ) ;
[docs] def decl_meta(self,): retval = self.decl_meta_return() retval.start = self.input.LT(1) root_0 = None string_literal18 = None POPEN19 = None COMMA20 = None PCLOSE21 = None list_t = None t = None t = None string_literal18_tree = None POPEN19_tree = None COMMA20_tree = None PCLOSE21_tree = None stream_PCLOSE = RewriteRuleTokenStream(self._adaptor, "token PCLOSE") stream_47 = RewriteRuleTokenStream(self._adaptor, "token 47") stream_POPEN = RewriteRuleTokenStream(self._adaptor, "token POPEN") stream_COMMA = RewriteRuleTokenStream(self._adaptor, "token COMMA") stream_meta_term = RewriteRuleSubtreeStream(self._adaptor, "rule meta_term") try: try: # mexp.g:63:11: ( 'declare_meta' POPEN t+= meta_term ( COMMA t+= meta_term )* PCLOSE -> ^( DECL_META ( $t)+ ) ) # mexp.g:63:13: 'declare_meta' POPEN t+= meta_term ( COMMA t+= meta_term )* PCLOSE string_literal18 = self.match(self.input, 47, self.FOLLOW_47_in_decl_meta349) stream_47.add(string_literal18) POPEN19 = self.match(self.input, POPEN, self.FOLLOW_POPEN_in_decl_meta351) stream_POPEN.add(POPEN19) self._state.following.append( self.FOLLOW_meta_term_in_decl_meta355) t = self.meta_term() self._state.following.pop() stream_meta_term.add(t.tree) if list_t is None: list_t = [] list_t.append(t.tree) # mexp.g:63:47: ( COMMA t+= meta_term )* while True: #loop5 alt5 = 2 LA5_0 = self.input.LA(1) if (LA5_0 == COMMA): alt5 = 1 if alt5 == 1: # mexp.g:63:48: COMMA t+= meta_term COMMA20 = self.match(self.input, COMMA, self.FOLLOW_COMMA_in_decl_meta358) stream_COMMA.add(COMMA20) self._state.following.append( self.FOLLOW_meta_term_in_decl_meta362) t = self.meta_term() self._state.following.pop() stream_meta_term.add(t.tree) if list_t is None: list_t = [] list_t.append(t.tree) else: break #loop5 PCLOSE21 = self.match(self.input, PCLOSE, self.FOLLOW_PCLOSE_in_decl_meta366) stream_PCLOSE.add(PCLOSE21) # AST Rewrite # elements: t # token labels: # rule labels: retval # token list labels: # rule list labels: t # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) stream_t = RewriteRuleSubtreeStream(self._adaptor, "token t", list_t) root_0 = self._adaptor.nil() # 64:19: -> ^( DECL_META ( $t)+ ) # mexp.g:64:22: ^( DECL_META ( $t)+ ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(DECL_META, "DECL_META"), root_1) # mexp.g:64:34: ( $t)+ if not (stream_t.hasNext()): raise RewriteEarlyExitException() while stream_t.hasNext(): self._adaptor.addChild(root_1, stream_t.nextTree()) stream_t.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "decl_meta"
[docs] class output_term_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.output_term_return, self).__init__() self.tree = None
# $ANTLR start "output_term" # mexp.g:66:1: output_term : ( 'name' EQ STRING -> ^( NAME STRING ) | 'first' EQ (op= SUBTROP )? (v+= 'inf' | v+= LIT ) -> ^( FIRST ( $op)? ( $v)+ ) | 'interval' EQ (v+= 'inf' | v+= LIT ) -> ^( INTERVAL ( $v)+ ) );
[docs] def output_term(self,): retval = self.output_term_return() retval.start = self.input.LT(1) root_0 = None op = None string_literal22 = None EQ23 = None STRING24 = None string_literal25 = None EQ26 = None string_literal27 = None EQ28 = None v = None list_v = None op_tree = None string_literal22_tree = None EQ23_tree = None STRING24_tree = None string_literal25_tree = None EQ26_tree = None string_literal27_tree = None EQ28_tree = None v_tree = None stream_49 = RewriteRuleTokenStream(self._adaptor, "token 49") stream_48 = RewriteRuleTokenStream(self._adaptor, "token 48") stream_EQ = RewriteRuleTokenStream(self._adaptor, "token EQ") stream_51 = RewriteRuleTokenStream(self._adaptor, "token 51") stream_SUBTROP = RewriteRuleTokenStream(self._adaptor, "token SUBTROP") stream_STRING = RewriteRuleTokenStream(self._adaptor, "token STRING") stream_LIT = RewriteRuleTokenStream(self._adaptor, "token LIT") stream_50 = RewriteRuleTokenStream(self._adaptor, "token 50") try: try: # mexp.g:66:13: ( 'name' EQ STRING -> ^( NAME STRING ) | 'first' EQ (op= SUBTROP )? (v+= 'inf' | v+= LIT ) -> ^( FIRST ( $op)? ( $v)+ ) | 'interval' EQ (v+= 'inf' | v+= LIT ) -> ^( INTERVAL ( $v)+ ) ) alt9 = 3 LA9 = self.input.LA(1) if LA9 == 48: alt9 = 1 elif LA9 == 49: alt9 = 2 elif LA9 == 51: alt9 = 3 else: nvae = NoViableAltException("", 9, 0, self.input) raise nvae if alt9 == 1: # mexp.g:66:17: 'name' EQ STRING string_literal22 = self.match( self.input, 48, self.FOLLOW_48_in_output_term404) stream_48.add(string_literal22) EQ23 = self.match(self.input, EQ, self.FOLLOW_EQ_in_output_term412) stream_EQ.add(EQ23) STRING24 = self.match(self.input, STRING, self.FOLLOW_STRING_in_output_term426) stream_STRING.add(STRING24) # AST Rewrite # elements: STRING # token labels: # rule labels: retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 66:66: -> ^( NAME STRING ) # mexp.g:66:69: ^( NAME STRING ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(NAME, "NAME"), root_1) self._adaptor.addChild(root_1, stream_STRING.nextNode()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt9 == 2: # mexp.g:67:17: 'first' EQ (op= SUBTROP )? (v+= 'inf' | v+= LIT ) string_literal25 = self.match( self.input, 49, self.FOLLOW_49_in_output_term476) stream_49.add(string_literal25) EQ26 = self.match(self.input, EQ, self.FOLLOW_EQ_in_output_term483) stream_EQ.add(EQ26) # mexp.g:67:35: (op= SUBTROP )? alt6 = 2 LA6_0 = self.input.LA(1) if (LA6_0 == SUBTROP): alt6 = 1 if alt6 == 1: # mexp.g:67:35: op= SUBTROP op = self.match(self.input, SUBTROP, self.FOLLOW_SUBTROP_in_output_term487) stream_SUBTROP.add(op) # mexp.g:67:45: (v+= 'inf' | v+= LIT ) alt7 = 2 LA7_0 = self.input.LA(1) if (LA7_0 == 50): alt7 = 1 elif (LA7_0 == LIT): alt7 = 2 else: nvae = NoViableAltException("", 7, 0, self.input) raise nvae if alt7 == 1: # mexp.g:67:46: v+= 'inf' v = self.match(self.input, 50, self.FOLLOW_50_in_output_term493) stream_50.add(v) if list_v is None: list_v = [] list_v.append(v) elif alt7 == 2: # mexp.g:67:57: v+= LIT v = self.match(self.input, LIT, self.FOLLOW_LIT_in_output_term499) stream_LIT.add(v) if list_v is None: list_v = [] list_v.append(v) # AST Rewrite # elements: op, v # token labels: op # rule labels: retval # token list labels: v # rule list labels: # wildcard labels: retval.tree = root_0 stream_op = RewriteRuleTokenStream(self._adaptor, "token op", op) stream_v = RewriteRuleTokenStream(self._adaptor, "token v", list_v) if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 67:66: -> ^( FIRST ( $op)? ( $v)+ ) # mexp.g:67:69: ^( FIRST ( $op)? ( $v)+ ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(FIRST, "FIRST"), root_1) # mexp.g:67:80: ( $op)? if stream_op.hasNext(): self._adaptor.addChild(root_1, stream_op.nextNode()) stream_op.reset() # mexp.g:67:85: ( $v)+ if not (stream_v.hasNext()): raise RewriteEarlyExitException() while stream_v.hasNext(): self._adaptor.addChild(root_1, stream_v.nextNode()) stream_v.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt9 == 3: # mexp.g:68:17: 'interval' EQ (v+= 'inf' | v+= LIT ) string_literal27 = self.match( self.input, 51, self.FOLLOW_51_in_output_term536) stream_51.add(string_literal27) EQ28 = self.match(self.input, EQ, self.FOLLOW_EQ_in_output_term540) stream_EQ.add(EQ28) # mexp.g:68:45: (v+= 'inf' | v+= LIT ) alt8 = 2 LA8_0 = self.input.LA(1) if (LA8_0 == 50): alt8 = 1 elif (LA8_0 == LIT): alt8 = 2 else: nvae = NoViableAltException("", 8, 0, self.input) raise nvae if alt8 == 1: # mexp.g:68:46: v+= 'inf' v = self.match(self.input, 50, self.FOLLOW_50_in_output_term557) stream_50.add(v) if list_v is None: list_v = [] list_v.append(v) elif alt8 == 2: # mexp.g:68:57: v+= LIT v = self.match(self.input, LIT, self.FOLLOW_LIT_in_output_term563) stream_LIT.add(v) if list_v is None: list_v = [] list_v.append(v) # AST Rewrite # elements: v # token labels: # rule labels: retval # token list labels: v # rule list labels: # wildcard labels: retval.tree = root_0 stream_v = RewriteRuleTokenStream(self._adaptor, "token v", list_v) if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 68:66: -> ^( INTERVAL ( $v)+ ) # mexp.g:68:69: ^( INTERVAL ( $v)+ ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(INTERVAL, "INTERVAL"), root_1) # mexp.g:68:85: ( $v)+ if not (stream_v.hasNext()): raise RewriteEarlyExitException() while stream_v.hasNext(): self._adaptor.addChild(root_1, stream_v.nextNode()) stream_v.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "output_term"
[docs] class meta_term_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.meta_term_return, self).__init__() self.tree = None
# $ANTLR start "meta_term" # mexp.g:70:1: meta_term : ( 'name' EQ STRING -> ^( NAME STRING ) | 'first' EQ (op= SUBTROP )? (v+= 'inf' | v+= LIT ) -> ^( FIRST ( $op)? ( $v)+ ) | 'interval' EQ (v+= 'inf' | v+= LIT ) -> ^( INTERVAL ( $v)+ ) | 'cutoff' EQ (v+= 'inf' | v+= LIT ) -> ^( CUTOFF ( $v)+ ) | 'dimension' EQ LIT -> ^( DIM LIT ) | 'initial' EQ STRING -> ^( INITKER STRING ) );
[docs] def meta_term(self,): retval = self.meta_term_return() retval.start = self.input.LT(1) root_0 = None op = None string_literal29 = None EQ30 = None STRING31 = None string_literal32 = None EQ33 = None string_literal34 = None EQ35 = None string_literal36 = None EQ37 = None string_literal38 = None EQ39 = None LIT40 = None string_literal41 = None EQ42 = None STRING43 = None v = None list_v = None op_tree = None string_literal29_tree = None EQ30_tree = None STRING31_tree = None string_literal32_tree = None EQ33_tree = None string_literal34_tree = None EQ35_tree = None string_literal36_tree = None EQ37_tree = None string_literal38_tree = None EQ39_tree = None LIT40_tree = None string_literal41_tree = None EQ42_tree = None STRING43_tree = None v_tree = None stream_49 = RewriteRuleTokenStream(self._adaptor, "token 49") stream_48 = RewriteRuleTokenStream(self._adaptor, "token 48") stream_EQ = RewriteRuleTokenStream(self._adaptor, "token EQ") stream_51 = RewriteRuleTokenStream(self._adaptor, "token 51") stream_52 = RewriteRuleTokenStream(self._adaptor, "token 52") stream_53 = RewriteRuleTokenStream(self._adaptor, "token 53") stream_SUBTROP = RewriteRuleTokenStream(self._adaptor, "token SUBTROP") stream_54 = RewriteRuleTokenStream(self._adaptor, "token 54") stream_STRING = RewriteRuleTokenStream(self._adaptor, "token STRING") stream_LIT = RewriteRuleTokenStream(self._adaptor, "token LIT") stream_50 = RewriteRuleTokenStream(self._adaptor, "token 50") try: try: # mexp.g:70:13: ( 'name' EQ STRING -> ^( NAME STRING ) | 'first' EQ (op= SUBTROP )? (v+= 'inf' | v+= LIT ) -> ^( FIRST ( $op)? ( $v)+ ) | 'interval' EQ (v+= 'inf' | v+= LIT ) -> ^( INTERVAL ( $v)+ ) | 'cutoff' EQ (v+= 'inf' | v+= LIT ) -> ^( CUTOFF ( $v)+ ) | 'dimension' EQ LIT -> ^( DIM LIT ) | 'initial' EQ STRING -> ^( INITKER STRING ) ) alt14 = 6 LA14 = self.input.LA(1) if LA14 == 48: alt14 = 1 elif LA14 == 49: alt14 = 2 elif LA14 == 51: alt14 = 3 elif LA14 == 52: alt14 = 4 elif LA14 == 53: alt14 = 5 elif LA14 == 54: alt14 = 6 else: nvae = NoViableAltException("", 14, 0, self.input) raise nvae if alt14 == 1: # mexp.g:70:17: 'name' EQ STRING string_literal29 = self.match( self.input, 48, self.FOLLOW_48_in_meta_term592) stream_48.add(string_literal29) EQ30 = self.match(self.input, EQ, self.FOLLOW_EQ_in_meta_term600) stream_EQ.add(EQ30) STRING31 = self.match(self.input, STRING, self.FOLLOW_STRING_in_meta_term614) stream_STRING.add(STRING31) # AST Rewrite # elements: STRING # token labels: # rule labels: retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 70:66: -> ^( NAME STRING ) # mexp.g:70:69: ^( NAME STRING ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(NAME, "NAME"), root_1) self._adaptor.addChild(root_1, stream_STRING.nextNode()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt14 == 2: # mexp.g:71:17: 'first' EQ (op= SUBTROP )? (v+= 'inf' | v+= LIT ) string_literal32 = self.match( self.input, 49, self.FOLLOW_49_in_meta_term664) stream_49.add(string_literal32) EQ33 = self.match(self.input, EQ, self.FOLLOW_EQ_in_meta_term671) stream_EQ.add(EQ33) # mexp.g:71:35: (op= SUBTROP )? alt10 = 2 LA10_0 = self.input.LA(1) if (LA10_0 == SUBTROP): alt10 = 1 if alt10 == 1: # mexp.g:71:35: op= SUBTROP op = self.match(self.input, SUBTROP, self.FOLLOW_SUBTROP_in_meta_term675) stream_SUBTROP.add(op) # mexp.g:71:45: (v+= 'inf' | v+= LIT ) alt11 = 2 LA11_0 = self.input.LA(1) if (LA11_0 == 50): alt11 = 1 elif (LA11_0 == LIT): alt11 = 2 else: nvae = NoViableAltException("", 11, 0, self.input) raise nvae if alt11 == 1: # mexp.g:71:46: v+= 'inf' v = self.match(self.input, 50, self.FOLLOW_50_in_meta_term681) stream_50.add(v) if list_v is None: list_v = [] list_v.append(v) elif alt11 == 2: # mexp.g:71:57: v+= LIT v = self.match(self.input, LIT, self.FOLLOW_LIT_in_meta_term687) stream_LIT.add(v) if list_v is None: list_v = [] list_v.append(v) # AST Rewrite # elements: v, op # token labels: op # rule labels: retval # token list labels: v # rule list labels: # wildcard labels: retval.tree = root_0 stream_op = RewriteRuleTokenStream(self._adaptor, "token op", op) stream_v = RewriteRuleTokenStream(self._adaptor, "token v", list_v) if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 71:66: -> ^( FIRST ( $op)? ( $v)+ ) # mexp.g:71:69: ^( FIRST ( $op)? ( $v)+ ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(FIRST, "FIRST"), root_1) # mexp.g:71:80: ( $op)? if stream_op.hasNext(): self._adaptor.addChild(root_1, stream_op.nextNode()) stream_op.reset() # mexp.g:71:85: ( $v)+ if not (stream_v.hasNext()): raise RewriteEarlyExitException() while stream_v.hasNext(): self._adaptor.addChild(root_1, stream_v.nextNode()) stream_v.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt14 == 3: # mexp.g:72:17: 'interval' EQ (v+= 'inf' | v+= LIT ) string_literal34 = self.match( self.input, 51, self.FOLLOW_51_in_meta_term724) stream_51.add(string_literal34) EQ35 = self.match(self.input, EQ, self.FOLLOW_EQ_in_meta_term728) stream_EQ.add(EQ35) # mexp.g:72:45: (v+= 'inf' | v+= LIT ) alt12 = 2 LA12_0 = self.input.LA(1) if (LA12_0 == 50): alt12 = 1 elif (LA12_0 == LIT): alt12 = 2 else: nvae = NoViableAltException("", 12, 0, self.input) raise nvae if alt12 == 1: # mexp.g:72:46: v+= 'inf' v = self.match(self.input, 50, self.FOLLOW_50_in_meta_term745) stream_50.add(v) if list_v is None: list_v = [] list_v.append(v) elif alt12 == 2: # mexp.g:72:57: v+= LIT v = self.match(self.input, LIT, self.FOLLOW_LIT_in_meta_term751) stream_LIT.add(v) if list_v is None: list_v = [] list_v.append(v) # AST Rewrite # elements: v # token labels: # rule labels: retval # token list labels: v # rule list labels: # wildcard labels: retval.tree = root_0 stream_v = RewriteRuleTokenStream(self._adaptor, "token v", list_v) if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 72:66: -> ^( INTERVAL ( $v)+ ) # mexp.g:72:69: ^( INTERVAL ( $v)+ ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(INTERVAL, "INTERVAL"), root_1) # mexp.g:72:85: ( $v)+ if not (stream_v.hasNext()): raise RewriteEarlyExitException() while stream_v.hasNext(): self._adaptor.addChild(root_1, stream_v.nextNode()) stream_v.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt14 == 4: # mexp.g:73:17: 'cutoff' EQ (v+= 'inf' | v+= LIT ) string_literal36 = self.match( self.input, 52, self.FOLLOW_52_in_meta_term786) stream_52.add(string_literal36) EQ37 = self.match(self.input, EQ, self.FOLLOW_EQ_in_meta_term792) stream_EQ.add(EQ37) # mexp.g:73:45: (v+= 'inf' | v+= LIT ) alt13 = 2 LA13_0 = self.input.LA(1) if (LA13_0 == 50): alt13 = 1 elif (LA13_0 == LIT): alt13 = 2 else: nvae = NoViableAltException("", 13, 0, self.input) raise nvae if alt13 == 1: # mexp.g:73:46: v+= 'inf' v = self.match(self.input, 50, self.FOLLOW_50_in_meta_term809) stream_50.add(v) if list_v is None: list_v = [] list_v.append(v) elif alt13 == 2: # mexp.g:73:57: v+= LIT v = self.match(self.input, LIT, self.FOLLOW_LIT_in_meta_term815) stream_LIT.add(v) if list_v is None: list_v = [] list_v.append(v) # AST Rewrite # elements: v # token labels: # rule labels: retval # token list labels: v # rule list labels: # wildcard labels: retval.tree = root_0 stream_v = RewriteRuleTokenStream(self._adaptor, "token v", list_v) if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 73:66: -> ^( CUTOFF ( $v)+ ) # mexp.g:73:69: ^( CUTOFF ( $v)+ ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(CUTOFF, "CUTOFF"), root_1) # mexp.g:73:85: ( $v)+ if not (stream_v.hasNext()): raise RewriteEarlyExitException() while stream_v.hasNext(): self._adaptor.addChild(root_1, stream_v.nextNode()) stream_v.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt14 == 5: # mexp.g:74:17: 'dimension' EQ LIT string_literal38 = self.match( self.input, 53, self.FOLLOW_53_in_meta_term852) stream_53.add(string_literal38) EQ39 = self.match(self.input, EQ, self.FOLLOW_EQ_in_meta_term855) stream_EQ.add(EQ39) LIT40 = self.match(self.input, LIT, self.FOLLOW_LIT_in_meta_term869) stream_LIT.add(LIT40) # AST Rewrite # elements: LIT # token labels: # rule labels: retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 74:66: -> ^( DIM LIT ) # mexp.g:74:69: ^( DIM LIT ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(DIM, "DIM"), root_1) self._adaptor.addChild(root_1, stream_LIT.nextNode()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt14 == 6: # mexp.g:75:17: 'initial' EQ STRING string_literal41 = self.match( self.input, 54, self.FOLLOW_54_in_meta_term922) stream_54.add(string_literal41) EQ42 = self.match(self.input, EQ, self.FOLLOW_EQ_in_meta_term927) stream_EQ.add(EQ42) STRING43 = self.match(self.input, STRING, self.FOLLOW_STRING_in_meta_term941) stream_STRING.add(STRING43) # AST Rewrite # elements: STRING # token labels: # rule labels: retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 75:66: -> ^( INITKER STRING ) # mexp.g:75:69: ^( INITKER STRING ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(INITKER, "INITKER"), root_1) self._adaptor.addChild(root_1, stream_STRING.nextNode()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "meta_term"
[docs] class block_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.block_return, self).__init__() self.tree = None
# $ANTLR start "block" # mexp.g:78:1: block : a= exprOrBind ( SEMI b+= exprOrBind )* ( SEMI )? -> ^( BLOCK $a ( $b)* ) ;
[docs] def block(self,): retval = self.block_return() retval.start = self.input.LT(1) root_0 = None SEMI44 = None SEMI45 = None list_b = None a = None b = None b = None SEMI44_tree = None SEMI45_tree = None stream_SEMI = RewriteRuleTokenStream(self._adaptor, "token SEMI") stream_exprOrBind = RewriteRuleSubtreeStream(self._adaptor, "rule exprOrBind") try: try: # mexp.g:78:7: (a= exprOrBind ( SEMI b+= exprOrBind )* ( SEMI )? -> ^( BLOCK $a ( $b)* ) ) # mexp.g:78:9: a= exprOrBind ( SEMI b+= exprOrBind )* ( SEMI )? self._state.following.append(self.FOLLOW_exprOrBind_in_block980) a = self.exprOrBind() self._state.following.pop() stream_exprOrBind.add(a.tree) # mexp.g:78:22: ( SEMI b+= exprOrBind )* while True: #loop15 alt15 = 2 alt15 = self.dfa15.predict(self.input) if alt15 == 1: # mexp.g:78:23: SEMI b+= exprOrBind SEMI44 = self.match(self.input, SEMI, self.FOLLOW_SEMI_in_block983) stream_SEMI.add(SEMI44) self._state.following.append( self.FOLLOW_exprOrBind_in_block987) b = self.exprOrBind() self._state.following.pop() stream_exprOrBind.add(b.tree) if list_b is None: list_b = [] list_b.append(b.tree) else: break #loop15 # mexp.g:78:44: ( SEMI )? alt16 = 2 LA16_0 = self.input.LA(1) if (LA16_0 == SEMI): alt16 = 1 if alt16 == 1: # mexp.g:78:44: SEMI SEMI45 = self.match(self.input, SEMI, self.FOLLOW_SEMI_in_block991) stream_SEMI.add(SEMI45) # AST Rewrite # elements: b, a # token labels: # rule labels: retval, a # token list labels: # rule list labels: b # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) if a is not None: stream_a = RewriteRuleSubtreeStream(self._adaptor, "rule a", a.tree) else: stream_a = RewriteRuleSubtreeStream(self._adaptor, "token a", None) stream_b = RewriteRuleSubtreeStream(self._adaptor, "token b", list_b) root_0 = self._adaptor.nil() # 78:50: -> ^( BLOCK $a ( $b)* ) # mexp.g:78:53: ^( BLOCK $a ( $b)* ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(BLOCK, "BLOCK"), root_1) self._adaptor.addChild(root_1, stream_a.nextTree()) # mexp.g:78:64: ( $b)* while stream_b.hasNext(): self._adaptor.addChild(root_1, stream_b.nextTree()) stream_b.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "block"
[docs] class exprOrBind_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.exprOrBind_return, self).__init__() self.tree = None
# $ANTLR start "exprOrBind" # mexp.g:79:1: exprOrBind : ( expr | bind );
[docs] def exprOrBind(self,): retval = self.exprOrBind_return() retval.start = self.input.LT(1) root_0 = None expr46 = None bind47 = None try: try: # mexp.g:79:12: ( expr | bind ) alt17 = 2 alt17 = self.dfa17.predict(self.input) if alt17 == 1: # mexp.g:79:14: expr root_0 = self._adaptor.nil() self._state.following.append( self.FOLLOW_expr_in_exprOrBind1013) expr46 = self.expr() self._state.following.pop() self._adaptor.addChild(root_0, expr46.tree) elif alt17 == 2: # mexp.g:79:21: bind root_0 = self._adaptor.nil() self._state.following.append( self.FOLLOW_bind_in_exprOrBind1017) bind47 = self.bind() self._state.following.pop() self._adaptor.addChild(root_0, bind47.tree) retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "exprOrBind"
[docs] class bind_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.bind_return, self).__init__() self.tree = None
# $ANTLR start "bind" # mexp.g:81:1: bind : nm= IDENT EQ val= expr -> ^( BIND ^( VAR $nm) $val) ;
[docs] def bind(self,): retval = self.bind_return() retval.start = self.input.LT(1) root_0 = None nm = None EQ48 = None val = None nm_tree = None EQ48_tree = None stream_IDENT = RewriteRuleTokenStream(self._adaptor, "token IDENT") stream_EQ = RewriteRuleTokenStream(self._adaptor, "token EQ") stream_expr = RewriteRuleSubtreeStream(self._adaptor, "rule expr") try: try: # mexp.g:81:6: (nm= IDENT EQ val= expr -> ^( BIND ^( VAR $nm) $val) ) # mexp.g:81:8: nm= IDENT EQ val= expr nm = self.match(self.input, IDENT, self.FOLLOW_IDENT_in_bind1027) stream_IDENT.add(nm) EQ48 = self.match(self.input, EQ, self.FOLLOW_EQ_in_bind1029) stream_EQ.add(EQ48) self._state.following.append(self.FOLLOW_expr_in_bind1033) val = self.expr() self._state.following.pop() stream_expr.add(val.tree) # AST Rewrite # elements: val, nm # token labels: nm # rule labels: val, retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 stream_nm = RewriteRuleTokenStream(self._adaptor, "token nm", nm) if val is not None: stream_val = RewriteRuleSubtreeStream( self._adaptor, "rule val", val.tree) else: stream_val = RewriteRuleSubtreeStream( self._adaptor, "token val", None) if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 81:29: -> ^( BIND ^( VAR $nm) $val) # mexp.g:81:32: ^( BIND ^( VAR $nm) $val) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(BIND, "BIND"), root_1) # mexp.g:81:39: ^( VAR $nm) root_2 = self._adaptor.nil() root_2 = self._adaptor.becomeRoot( self._adaptor.createFromType(VAR, "VAR"), root_2) self._adaptor.addChild(root_2, stream_nm.nextNode()) self._adaptor.addChild(root_1, root_2) self._adaptor.addChild(root_1, stream_val.nextTree()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "bind"
[docs] class expr_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.expr_return, self).__init__() self.tree = None
# $ANTLR start "expr" # mexp.g:83:1: expr : factor ( ( ADDOP | SUBTROP ) factor )* ;
[docs] def expr(self,): retval = self.expr_return() retval.start = self.input.LT(1) root_0 = None set50 = None factor49 = None factor51 = None set50_tree = None try: try: # mexp.g:83:6: ( factor ( ( ADDOP | SUBTROP ) factor )* ) # mexp.g:83:8: factor ( ( ADDOP | SUBTROP ) factor )* root_0 = self._adaptor.nil() self._state.following.append(self.FOLLOW_factor_in_expr1057) factor49 = self.factor() self._state.following.pop() self._adaptor.addChild(root_0, factor49.tree) # mexp.g:83:15: ( ( ADDOP | SUBTROP ) factor )* while True: #loop18 alt18 = 2 alt18 = self.dfa18.predict(self.input) if alt18 == 1: # mexp.g:83:17: ( ADDOP | SUBTROP ) factor set50 = self.input.LT(1) set50 = self.input.LT(1) if (SUBTROP <= self.input.LA(1) <= ADDOP): self.input.consume() root_0 = self._adaptor.becomeRoot( self._adaptor.createWithPayload(set50), root_0) self._state.errorRecovery = False else: mse = MismatchedSetException(None, self.input) raise mse self._state.following.append( self.FOLLOW_factor_in_expr1070) factor51 = self.factor() self._state.following.pop() self._adaptor.addChild(root_0, factor51.tree) else: break #loop18 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "expr"
[docs] class factor_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.factor_return, self).__init__() self.tree = None
# $ANTLR start "factor" # mexp.g:84:1: factor : signedExpComp ( ( MULTOP | DIVOP ) signedExpComp )* ;
[docs] def factor(self,): retval = self.factor_return() retval.start = self.input.LT(1) root_0 = None set53 = None signedExpComp52 = None signedExpComp54 = None set53_tree = None try: try: # mexp.g:84:8: ( signedExpComp ( ( MULTOP | DIVOP ) signedExpComp )* ) # mexp.g:84:10: signedExpComp ( ( MULTOP | DIVOP ) signedExpComp )* root_0 = self._adaptor.nil() self._state.following.append( self.FOLLOW_signedExpComp_in_factor1081) signedExpComp52 = self.signedExpComp() self._state.following.pop() self._adaptor.addChild(root_0, signedExpComp52.tree) # mexp.g:84:25: ( ( MULTOP | DIVOP ) signedExpComp )* while True: #loop19 alt19 = 2 alt19 = self.dfa19.predict(self.input) if alt19 == 1: # mexp.g:84:27: ( MULTOP | DIVOP ) signedExpComp set53 = self.input.LT(1) set53 = self.input.LT(1) if (MULTOP <= self.input.LA(1) <= DIVOP): self.input.consume() root_0 = self._adaptor.becomeRoot( self._adaptor.createWithPayload(set53), root_0) self._state.errorRecovery = False else: mse = MismatchedSetException(None, self.input) raise mse self._state.following.append( self.FOLLOW_signedExpComp_in_factor1096) signedExpComp54 = self.signedExpComp() self._state.following.pop() self._adaptor.addChild(root_0, signedExpComp54.tree) else: break #loop19 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "factor"
[docs] class signedExpComp_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.signedExpComp_return, self).__init__() self.tree = None
# $ANTLR start "signedExpComp" # mexp.g:87:1: signedExpComp : ( ADDOP | SUBTROP )? expComp ;
[docs] def signedExpComp(self,): retval = self.signedExpComp_return() retval.start = self.input.LT(1) root_0 = None ADDOP55 = None SUBTROP56 = None expComp57 = None ADDOP55_tree = None SUBTROP56_tree = None try: try: # mexp.g:87:16: ( ( ADDOP | SUBTROP )? expComp ) # mexp.g:87:18: ( ADDOP | SUBTROP )? expComp root_0 = self._adaptor.nil() # mexp.g:87:18: ( ADDOP | SUBTROP )? alt20 = 3 alt20 = self.dfa20.predict(self.input) if alt20 == 1: # mexp.g:87:19: ADDOP ADDOP55 = self.match(self.input, ADDOP, self.FOLLOW_ADDOP_in_signedExpComp1111) ADDOP55_tree = self._adaptor.createWithPayload(ADDOP55) root_0 = self._adaptor.becomeRoot(ADDOP55_tree, root_0) elif alt20 == 2: # mexp.g:87:28: SUBTROP SUBTROP56 = self.match( self.input, SUBTROP, self.FOLLOW_SUBTROP_in_signedExpComp1116) SUBTROP56_tree = self._adaptor.createWithPayload(SUBTROP56) root_0 = self._adaptor.becomeRoot(SUBTROP56_tree, root_0) self._state.following.append( self.FOLLOW_expComp_in_signedExpComp1121) expComp57 = self.expComp() self._state.following.pop() self._adaptor.addChild(root_0, expComp57.tree) retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "signedExpComp"
[docs] class expComp_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.expComp_return, self).__init__() self.tree = None
# $ANTLR start "expComp" # mexp.g:88:1: expComp : comp ( EXP signedExpComp )? ;
[docs] def expComp(self,): retval = self.expComp_return() retval.start = self.input.LT(1) root_0 = None EXP59 = None comp58 = None signedExpComp60 = None EXP59_tree = None try: try: # mexp.g:88:9: ( comp ( EXP signedExpComp )? ) # mexp.g:88:11: comp ( EXP signedExpComp )? root_0 = self._adaptor.nil() self._state.following.append(self.FOLLOW_comp_in_expComp1129) comp58 = self.comp() self._state.following.pop() self._adaptor.addChild(root_0, comp58.tree) # mexp.g:88:16: ( EXP signedExpComp )? alt21 = 2 alt21 = self.dfa21.predict(self.input) if alt21 == 1: # mexp.g:88:17: EXP signedExpComp EXP59 = self.match(self.input, EXP, self.FOLLOW_EXP_in_expComp1132) EXP59_tree = self._adaptor.createWithPayload(EXP59) root_0 = self._adaptor.becomeRoot(EXP59_tree, root_0) self._state.following.append( self.FOLLOW_signedExpComp_in_expComp1135) signedExpComp60 = self.signedExpComp() self._state.following.pop() self._adaptor.addChild(root_0, signedExpComp60.tree) retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "expComp"
[docs] class comp_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.comp_return, self).__init__() self.tree = None
# $ANTLR start "comp" # mexp.g:90:1: comp : a= atom ( BOPEN b+= expr BCLOSE )* -> ^( ELEM $a ( $b)* ) ;
[docs] def comp(self,): retval = self.comp_return() retval.start = self.input.LT(1) root_0 = None BOPEN61 = None BCLOSE62 = None list_b = None a = None b = None b = None BOPEN61_tree = None BCLOSE62_tree = None stream_BOPEN = RewriteRuleTokenStream(self._adaptor, "token BOPEN") stream_BCLOSE = RewriteRuleTokenStream(self._adaptor, "token BCLOSE") stream_atom = RewriteRuleSubtreeStream(self._adaptor, "rule atom") stream_expr = RewriteRuleSubtreeStream(self._adaptor, "rule expr") try: try: # mexp.g:90:6: (a= atom ( BOPEN b+= expr BCLOSE )* -> ^( ELEM $a ( $b)* ) ) # mexp.g:90:8: a= atom ( BOPEN b+= expr BCLOSE )* self._state.following.append(self.FOLLOW_atom_in_comp1149) a = self.atom() self._state.following.pop() stream_atom.add(a.tree) # mexp.g:90:15: ( BOPEN b+= expr BCLOSE )* while True: #loop22 alt22 = 2 alt22 = self.dfa22.predict(self.input) if alt22 == 1: # mexp.g:90:16: BOPEN b+= expr BCLOSE BOPEN61 = self.match(self.input, BOPEN, self.FOLLOW_BOPEN_in_comp1152) stream_BOPEN.add(BOPEN61) self._state.following.append( self.FOLLOW_expr_in_comp1156) b = self.expr() self._state.following.pop() stream_expr.add(b.tree) if list_b is None: list_b = [] list_b.append(b.tree) BCLOSE62 = self.match(self.input, BCLOSE, self.FOLLOW_BCLOSE_in_comp1158) stream_BCLOSE.add(BCLOSE62) else: break #loop22 # AST Rewrite # elements: b, a # token labels: # rule labels: retval, a # token list labels: # rule list labels: b # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) if a is not None: stream_a = RewriteRuleSubtreeStream(self._adaptor, "rule a", a.tree) else: stream_a = RewriteRuleSubtreeStream(self._adaptor, "token a", None) stream_b = RewriteRuleSubtreeStream(self._adaptor, "token b", list_b) root_0 = self._adaptor.nil() # 90:41: -> ^( ELEM $a ( $b)* ) # mexp.g:90:44: ^( ELEM $a ( $b)* ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(ELEM, "ELEM"), root_1) self._adaptor.addChild(root_1, stream_a.nextTree()) # mexp.g:90:54: ( $b)* while stream_b.hasNext(): self._adaptor.addChild(root_1, stream_b.nextTree()) stream_b.reset() self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "comp"
[docs] class fcnCall_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.fcnCall_return, self).__init__() self.tree = None
# $ANTLR start "fcnCall" # mexp.g:91:1: fcnCall : IDENT POPEN ( expr ( COMMA expr )* )? PCLOSE ;
[docs] def fcnCall(self,): retval = self.fcnCall_return() retval.start = self.input.LT(1) root_0 = None IDENT63 = None POPEN64 = None COMMA66 = None PCLOSE68 = None expr65 = None expr67 = None IDENT63_tree = None POPEN64_tree = None COMMA66_tree = None PCLOSE68_tree = None try: try: # mexp.g:91:9: ( IDENT POPEN ( expr ( COMMA expr )* )? PCLOSE ) # mexp.g:91:11: IDENT POPEN ( expr ( COMMA expr )* )? PCLOSE root_0 = self._adaptor.nil() IDENT63 = self.match(self.input, IDENT, self.FOLLOW_IDENT_in_fcnCall1182) IDENT63_tree = self._adaptor.createWithPayload(IDENT63) root_0 = self._adaptor.becomeRoot(IDENT63_tree, root_0) POPEN64 = self.match(self.input, POPEN, self.FOLLOW_POPEN_in_fcnCall1185) # mexp.g:91:25: ( expr ( COMMA expr )* )? alt24 = 2 alt24 = self.dfa24.predict(self.input) if alt24 == 1: # mexp.g:91:26: expr ( COMMA expr )* self._state.following.append( self.FOLLOW_expr_in_fcnCall1189) expr65 = self.expr() self._state.following.pop() self._adaptor.addChild(root_0, expr65.tree) # mexp.g:91:31: ( COMMA expr )* while True: #loop23 alt23 = 2 LA23_0 = self.input.LA(1) if (LA23_0 == COMMA): alt23 = 1 if alt23 == 1: # mexp.g:91:32: COMMA expr COMMA66 = self.match( self.input, COMMA, self.FOLLOW_COMMA_in_fcnCall1192) self._state.following.append( self.FOLLOW_expr_in_fcnCall1195) expr67 = self.expr() self._state.following.pop() self._adaptor.addChild(root_0, expr67.tree) else: break #loop23 PCLOSE68 = self.match(self.input, PCLOSE, self.FOLLOW_PCLOSE_in_fcnCall1201) retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "fcnCall"
[docs] class atom_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.atom_return, self).__init__() self.tree = None
# $ANTLR start "atom" # mexp.g:93:1: atom : ( series | if_ | fcnCall | STRING | IDENT -> ^( VAR IDENT ) | LIT | POPEN expr PCLOSE | COPEN block CCLOSE );
[docs] def atom(self,): retval = self.atom_return() retval.start = self.input.LT(1) root_0 = None STRING72 = None IDENT73 = None LIT74 = None POPEN75 = None PCLOSE77 = None COPEN78 = None CCLOSE80 = None series69 = None if_70 = None fcnCall71 = None expr76 = None block79 = None STRING72_tree = None IDENT73_tree = None LIT74_tree = None POPEN75_tree = None PCLOSE77_tree = None COPEN78_tree = None CCLOSE80_tree = None stream_IDENT = RewriteRuleTokenStream(self._adaptor, "token IDENT") try: try: # mexp.g:93:7: ( series | if_ | fcnCall | STRING | IDENT -> ^( VAR IDENT ) | LIT | POPEN expr PCLOSE | COPEN block CCLOSE ) alt25 = 8 alt25 = self.dfa25.predict(self.input) if alt25 == 1: # mexp.g:93:9: series root_0 = self._adaptor.nil() self._state.following.append(self.FOLLOW_series_in_atom1229) series69 = self.series() self._state.following.pop() self._adaptor.addChild(root_0, series69.tree) elif alt25 == 2: # mexp.g:94:17: if_ root_0 = self._adaptor.nil() self._state.following.append(self.FOLLOW_if__in_atom1248) if_70 = self.if_() self._state.following.pop() self._adaptor.addChild(root_0, if_70.tree) elif alt25 == 3: # mexp.g:95:17: fcnCall root_0 = self._adaptor.nil() self._state.following.append( self.FOLLOW_fcnCall_in_atom1267) fcnCall71 = self.fcnCall() self._state.following.pop() self._adaptor.addChild(root_0, fcnCall71.tree) elif alt25 == 4: # mexp.g:96:17: STRING root_0 = self._adaptor.nil() STRING72 = self.match(self.input, STRING, self.FOLLOW_STRING_in_atom1286) STRING72_tree = self._adaptor.createWithPayload(STRING72) self._adaptor.addChild(root_0, STRING72_tree) elif alt25 == 5: # mexp.g:97:17: IDENT IDENT73 = self.match(self.input, IDENT, self.FOLLOW_IDENT_in_atom1304) stream_IDENT.add(IDENT73) # AST Rewrite # elements: IDENT # token labels: # rule labels: retval # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) root_0 = self._adaptor.nil() # 97:23: -> ^( VAR IDENT ) # mexp.g:97:26: ^( VAR IDENT ) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(VAR, "VAR"), root_1) self._adaptor.addChild(root_1, stream_IDENT.nextNode()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 elif alt25 == 6: # mexp.g:98:17: LIT root_0 = self._adaptor.nil() LIT74 = self.match(self.input, LIT, self.FOLLOW_LIT_in_atom1332) LIT74_tree = self._adaptor.createWithPayload(LIT74) self._adaptor.addChild(root_0, LIT74_tree) elif alt25 == 7: # mexp.g:99:17: POPEN expr PCLOSE root_0 = self._adaptor.nil() POPEN75 = self.match(self.input, POPEN, self.FOLLOW_POPEN_in_atom1351) self._state.following.append(self.FOLLOW_expr_in_atom1354) expr76 = self.expr() self._state.following.pop() self._adaptor.addChild(root_0, expr76.tree) PCLOSE77 = self.match(self.input, PCLOSE, self.FOLLOW_PCLOSE_in_atom1356) elif alt25 == 8: # mexp.g:100:17: COPEN block CCLOSE root_0 = self._adaptor.nil() COPEN78 = self.match(self.input, COPEN, self.FOLLOW_COPEN_in_atom1376) self._state.following.append(self.FOLLOW_block_in_atom1379) block79 = self.block() self._state.following.pop() self._adaptor.addChild(root_0, block79.tree) CCLOSE80 = self.match(self.input, CCLOSE, self.FOLLOW_CCLOSE_in_atom1381) retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "atom"
[docs] class series_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.series_return, self).__init__() self.tree = None
# $ANTLR start "series" # mexp.g:102:1: series : 'series' POPEN a= iter ( COMMA b+= iter )* PCLOSE e= expr -> ^( SERIES $a ( $b)* $e) ;
[docs] def series(self,): retval = self.series_return() retval.start = self.input.LT(1) root_0 = None string_literal81 = None POPEN82 = None COMMA83 = None PCLOSE84 = None list_b = None a = None e = None b = None b = None string_literal81_tree = None POPEN82_tree = None COMMA83_tree = None PCLOSE84_tree = None stream_PCLOSE = RewriteRuleTokenStream(self._adaptor, "token PCLOSE") stream_55 = RewriteRuleTokenStream(self._adaptor, "token 55") stream_POPEN = RewriteRuleTokenStream(self._adaptor, "token POPEN") stream_COMMA = RewriteRuleTokenStream(self._adaptor, "token COMMA") stream_expr = RewriteRuleSubtreeStream(self._adaptor, "rule expr") stream_iter = RewriteRuleSubtreeStream(self._adaptor, "rule iter") try: try: # mexp.g:102:8: ( 'series' POPEN a= iter ( COMMA b+= iter )* PCLOSE e= expr -> ^( SERIES $a ( $b)* $e) ) # mexp.g:102:10: 'series' POPEN a= iter ( COMMA b+= iter )* PCLOSE e= expr string_literal81 = self.match(self.input, 55, self.FOLLOW_55_in_series1392) stream_55.add(string_literal81) POPEN82 = self.match(self.input, POPEN, self.FOLLOW_POPEN_in_series1394) stream_POPEN.add(POPEN82) self._state.following.append(self.FOLLOW_iter_in_series1398) a = self.iter() self._state.following.pop() stream_iter.add(a.tree) # mexp.g:102:32: ( COMMA b+= iter )* while True: #loop26 alt26 = 2 LA26_0 = self.input.LA(1) if (LA26_0 == COMMA): alt26 = 1 if alt26 == 1: # mexp.g:102:33: COMMA b+= iter COMMA83 = self.match(self.input, COMMA, self.FOLLOW_COMMA_in_series1401) stream_COMMA.add(COMMA83) self._state.following.append( self.FOLLOW_iter_in_series1405) b = self.iter() self._state.following.pop() stream_iter.add(b.tree) if list_b is None: list_b = [] list_b.append(b.tree) else: break #loop26 PCLOSE84 = self.match(self.input, PCLOSE, self.FOLLOW_PCLOSE_in_series1409) stream_PCLOSE.add(PCLOSE84) self._state.following.append(self.FOLLOW_expr_in_series1413) e = self.expr() self._state.following.pop() stream_expr.add(e.tree) # AST Rewrite # elements: e, a, b # token labels: # rule labels: retval, e, a # token list labels: # rule list labels: b # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) if e is not None: stream_e = RewriteRuleSubtreeStream(self._adaptor, "rule e", e.tree) else: stream_e = RewriteRuleSubtreeStream(self._adaptor, "token e", None) if a is not None: stream_a = RewriteRuleSubtreeStream(self._adaptor, "rule a", a.tree) else: stream_a = RewriteRuleSubtreeStream(self._adaptor, "token a", None) stream_b = RewriteRuleSubtreeStream(self._adaptor, "token b", list_b) root_0 = self._adaptor.nil() # 102:63: -> ^( SERIES $a ( $b)* $e) # mexp.g:102:66: ^( SERIES $a ( $b)* $e) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(SERIES, "SERIES"), root_1) self._adaptor.addChild(root_1, stream_a.nextTree()) # mexp.g:102:78: ( $b)* while stream_b.hasNext(): self._adaptor.addChild(root_1, stream_b.nextTree()) stream_b.reset() self._adaptor.addChild(root_1, stream_e.nextTree()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "series"
[docs] class if__return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.if__return, self).__init__() self.tree = None
# $ANTLR start "if_" # mexp.g:103:1: if_ : 'if' c= expr 'then' t= expr 'else' e= expr -> ^( IF $c $t $e) ;
[docs] def if_(self,): retval = self.if__return() retval.start = self.input.LT(1) root_0 = None string_literal85 = None string_literal86 = None string_literal87 = None c = None t = None e = None string_literal85_tree = None string_literal86_tree = None string_literal87_tree = None stream_58 = RewriteRuleTokenStream(self._adaptor, "token 58") stream_57 = RewriteRuleTokenStream(self._adaptor, "token 57") stream_56 = RewriteRuleTokenStream(self._adaptor, "token 56") stream_expr = RewriteRuleSubtreeStream(self._adaptor, "rule expr") try: try: # mexp.g:103:5: ( 'if' c= expr 'then' t= expr 'else' e= expr -> ^( IF $c $t $e) ) # mexp.g:103:7: 'if' c= expr 'then' t= expr 'else' e= expr string_literal85 = self.match(self.input, 56, self.FOLLOW_56_in_if_1436) stream_56.add(string_literal85) self._state.following.append(self.FOLLOW_expr_in_if_1440) c = self.expr() self._state.following.pop() stream_expr.add(c.tree) string_literal86 = self.match(self.input, 57, self.FOLLOW_57_in_if_1442) stream_57.add(string_literal86) self._state.following.append(self.FOLLOW_expr_in_if_1446) t = self.expr() self._state.following.pop() stream_expr.add(t.tree) string_literal87 = self.match(self.input, 58, self.FOLLOW_58_in_if_1448) stream_58.add(string_literal87) self._state.following.append(self.FOLLOW_expr_in_if_1452) e = self.expr() self._state.following.pop() stream_expr.add(e.tree) # AST Rewrite # elements: c, t, e # token labels: # rule labels: retval, e, t, c # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) if e is not None: stream_e = RewriteRuleSubtreeStream(self._adaptor, "rule e", e.tree) else: stream_e = RewriteRuleSubtreeStream(self._adaptor, "token e", None) if t is not None: stream_t = RewriteRuleSubtreeStream(self._adaptor, "rule t", t.tree) else: stream_t = RewriteRuleSubtreeStream(self._adaptor, "token t", None) if c is not None: stream_c = RewriteRuleSubtreeStream(self._adaptor, "rule c", c.tree) else: stream_c = RewriteRuleSubtreeStream(self._adaptor, "token c", None) root_0 = self._adaptor.nil() # 103:47: -> ^( IF $c $t $e) # mexp.g:103:50: ^( IF $c $t $e) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(IF, "IF"), root_1) self._adaptor.addChild(root_1, stream_c.nextTree()) self._adaptor.addChild(root_1, stream_t.nextTree()) self._adaptor.addChild(root_1, stream_e.nextTree()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "if_"
[docs] class iter_return(ParserRuleReturnScope):
[docs] def __init__(self): super(mexpParser.iter_return, self).__init__() self.tree = None
# $ANTLR start "iter" # mexp.g:104:1: iter : nm= IDENT EQ lb= expr COLON ub= expr -> ^( ITER ^( VAR $nm) $lb $ub) ;
[docs] def iter(self,): retval = self.iter_return() retval.start = self.input.LT(1) root_0 = None nm = None EQ88 = None COLON89 = None lb = None ub = None nm_tree = None EQ88_tree = None COLON89_tree = None stream_COLON = RewriteRuleTokenStream(self._adaptor, "token COLON") stream_IDENT = RewriteRuleTokenStream(self._adaptor, "token IDENT") stream_EQ = RewriteRuleTokenStream(self._adaptor, "token EQ") stream_expr = RewriteRuleSubtreeStream(self._adaptor, "rule expr") try: try: # mexp.g:104:6: (nm= IDENT EQ lb= expr COLON ub= expr -> ^( ITER ^( VAR $nm) $lb $ub) ) # mexp.g:104:8: nm= IDENT EQ lb= expr COLON ub= expr nm = self.match(self.input, IDENT, self.FOLLOW_IDENT_in_iter1476) stream_IDENT.add(nm) EQ88 = self.match(self.input, EQ, self.FOLLOW_EQ_in_iter1478) stream_EQ.add(EQ88) self._state.following.append(self.FOLLOW_expr_in_iter1482) lb = self.expr() self._state.following.pop() stream_expr.add(lb.tree) COLON89 = self.match(self.input, COLON, self.FOLLOW_COLON_in_iter1484) stream_COLON.add(COLON89) self._state.following.append(self.FOLLOW_expr_in_iter1488) ub = self.expr() self._state.following.pop() stream_expr.add(ub.tree) # AST Rewrite # elements: ub, lb, nm # token labels: nm # rule labels: retval, ub, lb # token list labels: # rule list labels: # wildcard labels: retval.tree = root_0 stream_nm = RewriteRuleTokenStream(self._adaptor, "token nm", nm) if retval is not None: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "rule retval", retval.tree) else: stream_retval = RewriteRuleSubtreeStream( self._adaptor, "token retval", None) if ub is not None: stream_ub = RewriteRuleSubtreeStream( self._adaptor, "rule ub", ub.tree) else: stream_ub = RewriteRuleSubtreeStream( self._adaptor, "token ub", None) if lb is not None: stream_lb = RewriteRuleSubtreeStream( self._adaptor, "rule lb", lb.tree) else: stream_lb = RewriteRuleSubtreeStream( self._adaptor, "token lb", None) root_0 = self._adaptor.nil() # 104:42: -> ^( ITER ^( VAR $nm) $lb $ub) # mexp.g:104:45: ^( ITER ^( VAR $nm) $lb $ub) root_1 = self._adaptor.nil() root_1 = self._adaptor.becomeRoot( self._adaptor.createFromType(ITER, "ITER"), root_1) # mexp.g:104:52: ^( VAR $nm) root_2 = self._adaptor.nil() root_2 = self._adaptor.becomeRoot( self._adaptor.createFromType(VAR, "VAR"), root_2) self._adaptor.addChild(root_2, stream_nm.nextNode()) self._adaptor.addChild(root_1, root_2) self._adaptor.addChild(root_1, stream_lb.nextTree()) self._adaptor.addChild(root_1, stream_ub.nextTree()) self._adaptor.addChild(root_0, root_1) retval.tree = root_0 retval.stop = self.input.LT(-1) retval.tree = self._adaptor.rulePostProcessing(root_0) self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop) except RecognitionException as re: self.reportError(re) self.recover(self.input, re) retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re) finally: pass return retval
# $ANTLR end "iter" # Delegated rules # lookup tables for DFA #1 DFA1_eot = DFA.unpack(u"\15\uffff") DFA1_eof = DFA.unpack(u"\15\uffff") DFA1_min = DFA.unpack(u"\1\30\14\uffff") DFA1_max = DFA.unpack(u"\1\70\14\uffff") DFA1_accept = DFA.unpack(u"\1\uffff\1\2\10\uffff\1\1\2\uffff") DFA1_special = DFA.unpack(u"\15\uffff") DFA1_transition = [ DFA.unpack(u"\3\1\2\uffff\3\1\5\uffff\1\1\7\uffff\3\12\7\uffff\2" u"\1"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #1
[docs] class DFA1(DFA): pass
# lookup tables for DFA #15 DFA15_eot = DFA.unpack(u"\17\uffff") DFA15_eof = DFA.unpack(u"\2\2\15\uffff") DFA15_min = DFA.unpack(u"\1\26\1\30\15\uffff") DFA15_max = DFA.unpack(u"\1\46\1\70\15\uffff") DFA15_accept = DFA.unpack(u"\2\uffff\1\2\1\uffff\1\1\12\uffff") DFA15_special = DFA.unpack(u"\17\uffff") DFA15_transition = [ DFA.unpack(u"\1\1\17\uffff\1\2"), DFA.unpack(u"\3\4\2\uffff\3\4\5\uffff\1\4\1\2\20\uffff\2\4"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #15
[docs] class DFA15(DFA): pass
# lookup tables for DFA #17 DFA17_eot = DFA.unpack(u"\23\uffff") DFA17_eof = DFA.unpack(u"\5\uffff\1\1\15\uffff") DFA17_min = DFA.unpack(u"\1\30\4\uffff\1\26\15\uffff") DFA17_max = DFA.unpack(u"\1\70\4\uffff\1\46\15\uffff") DFA17_accept = DFA.unpack(u"\1\uffff\1\1\11\uffff\1\2\7\uffff") DFA17_special = DFA.unpack(u"\23\uffff") DFA17_transition = [ DFA.unpack(u"\1\5\2\1\2\uffff\3\1\5\uffff\1\1\21\uffff\2\1"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"\1\1\2\uffff\1\1\2\uffff\1\13\1\uffff\6\1\2\uffff\1" u"\1"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #17
[docs] class DFA17(DFA): pass
# lookup tables for DFA #18 DFA18_eot = DFA.unpack(u"\16\uffff") DFA18_eof = DFA.unpack(u"\1\1\15\uffff") DFA18_min = DFA.unpack(u"\1\26\15\uffff") DFA18_max = DFA.unpack(u"\1\72\15\uffff") DFA18_accept = DFA.unpack(u"\1\uffff\1\2\10\uffff\1\1\3\uffff") DFA18_special = DFA.unpack(u"\16\uffff") DFA18_transition = [ DFA.unpack(u"\2\1\3\uffff\1\1\2\uffff\2\12\5\1\1\uffff\2\1\21\uffff" u"\2\1"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #18
[docs] class DFA18(DFA): pass
# lookup tables for DFA #19 DFA19_eot = DFA.unpack(u"\16\uffff") DFA19_eof = DFA.unpack(u"\1\1\15\uffff") DFA19_min = DFA.unpack(u"\1\26\15\uffff") DFA19_max = DFA.unpack(u"\1\72\15\uffff") DFA19_accept = DFA.unpack(u"\1\uffff\1\2\10\uffff\1\1\3\uffff") DFA19_special = DFA.unpack(u"\16\uffff") DFA19_transition = [ DFA.unpack(u"\2\1\3\uffff\1\1\2\uffff\2\1\2\12\3\1\1\uffff\2\1\21" u"\uffff\2\1"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #19
[docs] class DFA19(DFA): pass
# lookup tables for DFA #20 DFA20_eot = DFA.unpack(u"\12\uffff") DFA20_eof = DFA.unpack(u"\12\uffff") DFA20_min = DFA.unpack(u"\1\30\11\uffff") DFA20_max = DFA.unpack(u"\1\70\11\uffff") DFA20_accept = DFA.unpack(u"\1\uffff\1\1\1\2\1\3\6\uffff") DFA20_special = DFA.unpack(u"\12\uffff") DFA20_transition = [ DFA.unpack(u"\3\3\2\uffff\1\3\1\2\1\1\5\uffff\1\3\21\uffff\2\3"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #20
[docs] class DFA20(DFA): pass
# lookup tables for DFA #21 DFA21_eot = DFA.unpack(u"\16\uffff") DFA21_eof = DFA.unpack(u"\1\2\15\uffff") DFA21_min = DFA.unpack(u"\1\26\15\uffff") DFA21_max = DFA.unpack(u"\1\72\15\uffff") DFA21_accept = DFA.unpack(u"\1\uffff\1\1\1\2\13\uffff") DFA21_special = DFA.unpack(u"\16\uffff") DFA21_transition = [ DFA.unpack(u"\2\2\3\uffff\1\2\2\uffff\4\2\1\1\2\2\1\uffff\2\2\21" u"\uffff\2\2"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #21
[docs] class DFA21(DFA): pass
# lookup tables for DFA #22 DFA22_eot = DFA.unpack(u"\16\uffff") DFA22_eof = DFA.unpack(u"\1\1\15\uffff") DFA22_min = DFA.unpack(u"\1\26\15\uffff") DFA22_max = DFA.unpack(u"\1\72\15\uffff") DFA22_accept = DFA.unpack(u"\1\uffff\1\2\10\uffff\1\1\3\uffff") DFA22_special = DFA.unpack(u"\16\uffff") DFA22_transition = [ DFA.unpack(u"\2\1\3\uffff\1\1\2\uffff\5\1\1\12\1\1\1\uffff\2\1\21" u"\uffff\2\1"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #22
[docs] class DFA22(DFA): pass
# lookup tables for DFA #24 DFA24_eot = DFA.unpack(u"\13\uffff") DFA24_eof = DFA.unpack(u"\13\uffff") DFA24_min = DFA.unpack(u"\1\30\12\uffff") DFA24_max = DFA.unpack(u"\1\70\12\uffff") DFA24_accept = DFA.unpack(u"\1\uffff\1\1\10\uffff\1\2") DFA24_special = DFA.unpack(u"\13\uffff") DFA24_transition = [ DFA.unpack(u"\3\1\1\12\1\uffff\3\1\5\uffff\1\1\21\uffff\2\1"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #24
[docs] class DFA24(DFA): pass
# lookup tables for DFA #25 DFA25_eot = DFA.unpack(u"\26\uffff") DFA25_eof = DFA.unpack(u"\3\uffff\1\11\22\uffff") DFA25_min = DFA.unpack(u"\1\30\2\uffff\1\26\22\uffff") DFA25_max = DFA.unpack(u"\1\70\2\uffff\1\72\22\uffff") DFA25_accept = DFA.unpack( u"\1\uffff\1\1\1\2\1\uffff\1\4\1\6\1\7\1\10\1\3\1\5\14\uffff") DFA25_special = DFA.unpack(u"\26\uffff") DFA25_transition = [ DFA.unpack(u"\1\3\1\6\1\5\2\uffff\1\4\7\uffff\1\7\21\uffff\1\1\1" u"\2"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"\2\11\1\uffff\1\10\1\uffff\1\11\2\uffff\7\11\1\uffff" u"\2\11\21\uffff\2\11"), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u""), DFA.unpack(u"") ] # class definition for DFA #25
[docs] class DFA25(DFA): pass
FOLLOW_header_in_prog158 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_block_in_prog160 = frozenset([]) FOLLOW_EOF_in_prog162 = frozenset([1]) FOLLOW_decl_in_header191 = frozenset([22]) FOLLOW_SEMI_in_header193 = frozenset([1, 45, 46, 47]) FOLLOW_decl_meta_in_decl222 = frozenset([1]) FOLLOW_decl_output_in_decl226 = frozenset([1]) FOLLOW_static_in_decl230 = frozenset([1]) FOLLOW_45_in_static244 = frozenset([24]) FOLLOW_varWithType_in_static248 = frozenset([1, 23]) FOLLOW_COMMA_in_static251 = frozenset([24]) FOLLOW_varWithType_in_static255 = frozenset([1, 23]) FOLLOW_IDENT_in_varWithType272 = frozenset([25]) FOLLOW_POPEN_in_varWithType274 = frozenset([26]) FOLLOW_LIT_in_varWithType276 = frozenset([27]) FOLLOW_PCLOSE_in_varWithType278 = frozenset([1]) FOLLOW_46_in_decl_output296 = frozenset([25]) FOLLOW_POPEN_in_decl_output298 = frozenset([48, 49, 51]) FOLLOW_output_term_in_decl_output302 = frozenset([23, 27]) FOLLOW_COMMA_in_decl_output305 = frozenset([48, 49, 51]) FOLLOW_output_term_in_decl_output309 = frozenset([23, 27]) FOLLOW_PCLOSE_in_decl_output313 = frozenset([1]) FOLLOW_47_in_decl_meta349 = frozenset([25]) FOLLOW_POPEN_in_decl_meta351 = frozenset([48, 49, 51, 52, 53, 54]) FOLLOW_meta_term_in_decl_meta355 = frozenset([23, 27]) FOLLOW_COMMA_in_decl_meta358 = frozenset([48, 49, 51, 52, 53, 54]) FOLLOW_meta_term_in_decl_meta362 = frozenset([23, 27]) FOLLOW_PCLOSE_in_decl_meta366 = frozenset([1]) FOLLOW_48_in_output_term404 = frozenset([28]) FOLLOW_EQ_in_output_term412 = frozenset([29]) FOLLOW_STRING_in_output_term426 = frozenset([1]) FOLLOW_49_in_output_term476 = frozenset([28]) FOLLOW_EQ_in_output_term483 = frozenset([26, 30, 50]) FOLLOW_SUBTROP_in_output_term487 = frozenset([26, 50]) FOLLOW_50_in_output_term493 = frozenset([1]) FOLLOW_LIT_in_output_term499 = frozenset([1]) FOLLOW_51_in_output_term536 = frozenset([28]) FOLLOW_EQ_in_output_term540 = frozenset([26, 50]) FOLLOW_50_in_output_term557 = frozenset([1]) FOLLOW_LIT_in_output_term563 = frozenset([1]) FOLLOW_48_in_meta_term592 = frozenset([28]) FOLLOW_EQ_in_meta_term600 = frozenset([29]) FOLLOW_STRING_in_meta_term614 = frozenset([1]) FOLLOW_49_in_meta_term664 = frozenset([28]) FOLLOW_EQ_in_meta_term671 = frozenset([26, 30, 50]) FOLLOW_SUBTROP_in_meta_term675 = frozenset([26, 50]) FOLLOW_50_in_meta_term681 = frozenset([1]) FOLLOW_LIT_in_meta_term687 = frozenset([1]) FOLLOW_51_in_meta_term724 = frozenset([28]) FOLLOW_EQ_in_meta_term728 = frozenset([26, 50]) FOLLOW_50_in_meta_term745 = frozenset([1]) FOLLOW_LIT_in_meta_term751 = frozenset([1]) FOLLOW_52_in_meta_term786 = frozenset([28]) FOLLOW_EQ_in_meta_term792 = frozenset([26, 50]) FOLLOW_50_in_meta_term809 = frozenset([1]) FOLLOW_LIT_in_meta_term815 = frozenset([1]) FOLLOW_53_in_meta_term852 = frozenset([28]) FOLLOW_EQ_in_meta_term855 = frozenset([26]) FOLLOW_LIT_in_meta_term869 = frozenset([1]) FOLLOW_54_in_meta_term922 = frozenset([28]) FOLLOW_EQ_in_meta_term927 = frozenset([29]) FOLLOW_STRING_in_meta_term941 = frozenset([1]) FOLLOW_exprOrBind_in_block980 = frozenset([1, 22]) FOLLOW_SEMI_in_block983 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_exprOrBind_in_block987 = frozenset([1, 22]) FOLLOW_SEMI_in_block991 = frozenset([1]) FOLLOW_expr_in_exprOrBind1013 = frozenset([1]) FOLLOW_bind_in_exprOrBind1017 = frozenset([1]) FOLLOW_IDENT_in_bind1027 = frozenset([28]) FOLLOW_EQ_in_bind1029 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_bind1033 = frozenset([1]) FOLLOW_factor_in_expr1057 = frozenset([1, 30, 31]) FOLLOW_set_in_expr1061 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_factor_in_expr1070 = frozenset([1, 30, 31]) FOLLOW_signedExpComp_in_factor1081 = frozenset([1, 32, 33]) FOLLOW_set_in_factor1086 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_signedExpComp_in_factor1096 = frozenset([1, 32, 33]) FOLLOW_ADDOP_in_signedExpComp1111 = frozenset( [24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_SUBTROP_in_signedExpComp1116 = frozenset( [24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expComp_in_signedExpComp1121 = frozenset([1]) FOLLOW_comp_in_expComp1129 = frozenset([1, 34]) FOLLOW_EXP_in_expComp1132 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_signedExpComp_in_expComp1135 = frozenset([1]) FOLLOW_atom_in_comp1149 = frozenset([1, 35]) FOLLOW_BOPEN_in_comp1152 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_comp1156 = frozenset([36]) FOLLOW_BCLOSE_in_comp1158 = frozenset([1, 35]) FOLLOW_IDENT_in_fcnCall1182 = frozenset([25]) FOLLOW_POPEN_in_fcnCall1185 = frozenset( [24, 25, 26, 27, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_fcnCall1189 = frozenset([23, 27]) FOLLOW_COMMA_in_fcnCall1192 = frozenset( [24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_fcnCall1195 = frozenset([23, 27]) FOLLOW_PCLOSE_in_fcnCall1201 = frozenset([1]) FOLLOW_series_in_atom1229 = frozenset([1]) FOLLOW_if__in_atom1248 = frozenset([1]) FOLLOW_fcnCall_in_atom1267 = frozenset([1]) FOLLOW_STRING_in_atom1286 = frozenset([1]) FOLLOW_IDENT_in_atom1304 = frozenset([1]) FOLLOW_LIT_in_atom1332 = frozenset([1]) FOLLOW_POPEN_in_atom1351 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_atom1354 = frozenset([27]) FOLLOW_PCLOSE_in_atom1356 = frozenset([1]) FOLLOW_COPEN_in_atom1376 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_block_in_atom1379 = frozenset([38]) FOLLOW_CCLOSE_in_atom1381 = frozenset([1]) FOLLOW_55_in_series1392 = frozenset([25]) FOLLOW_POPEN_in_series1394 = frozenset([24]) FOLLOW_iter_in_series1398 = frozenset([23, 27]) FOLLOW_COMMA_in_series1401 = frozenset([24]) FOLLOW_iter_in_series1405 = frozenset([23, 27]) FOLLOW_PCLOSE_in_series1409 = frozenset( [24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_series1413 = frozenset([1]) FOLLOW_56_in_if_1436 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_if_1440 = frozenset([57]) FOLLOW_57_in_if_1442 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_if_1446 = frozenset([58]) FOLLOW_58_in_if_1448 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_if_1452 = frozenset([1]) FOLLOW_IDENT_in_iter1476 = frozenset([28]) FOLLOW_EQ_in_iter1478 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_iter1482 = frozenset([39]) FOLLOW_COLON_in_iter1484 = frozenset([24, 25, 26, 29, 30, 31, 37, 55, 56]) FOLLOW_expr_in_iter1488 = frozenset([1])
[docs]def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr): from schrodinger.application.desmond.antlr3.main import ParserMain main = ParserMain("mexpLexer", mexpParser) main.stdin = stdin main.stdout = stdout main.stderr = stderr main.execute(argv)
if __name__ == '__main__': main(sys.argv)