1 import unittest 2 import textwrap 3 import antlr3 4 import antlr3.tree 5 import testbase 6 import sys 7 8 class T(testbase.ANTLRTest): 9 def setUp(self): 10 self.oldPath = sys.path[:] 11 sys.path.insert(0, self.baseDir) 12 13 14 def tearDown(self): 15 sys.path = self.oldPath 16 17 18 def parserClass(self, base): 19 class TParser(base): 20 def __init__(self, *args, **kwargs): 21 base.__init__(self, *args, **kwargs) 22 23 self._output = "" 24 25 26 def capture(self, t): 27 self._output += t 28 29 30 def traceIn(self, ruleName, ruleIndex): 31 self.traces.append('>'+ruleName) 32 33 34 def traceOut(self, ruleName, ruleIndex): 35 self.traces.append('<'+ruleName) 36 37 38 def recover(self, input, re): 39 # no error recovery yet, just crash! 40 raise 41 42 return TParser 43 44 45 def lexerClass(self, base): 46 class TLexer(base): 47 def __init__(self, *args, **kwargs): 48 base.__init__(self, *args, **kwargs) 49 50 self._output = "" 51 52 53 def capture(self, t): 54 self._output += t 55 56 57 def traceIn(self, ruleName, ruleIndex): 58 self.traces.append('>'+ruleName) 59 60 61 def traceOut(self, ruleName, ruleIndex): 62 self.traces.append('<'+ruleName) 63 64 65 def recover(self, input): 66 # no error recovery yet, just crash! 67 raise 68 69 return TLexer 70 71 72 def execParser(self, grammar, grammarEntry, slaves, input): 73 for slave in slaves: 74 parserName = self.writeInlineGrammar(slave)[0] 75 # slave parsers are imported as normal python modules 76 # to force reloading current version, purge module from sys.modules 77 try: 78 del sys.modules[parserName+'Parser'] 79 except KeyError: 80 pass 81 82 lexerCls, parserCls = self.compileInlineGrammar(grammar) 83 84 cStream = antlr3.StringStream(input) 85 lexer = lexerCls(cStream) 86 tStream = antlr3.CommonTokenStream(lexer) 87 parser = parserCls(tStream) 88 getattr(parser, grammarEntry)() 89 90 return parser._output 91 92 93 def execLexer(self, grammar, slaves, input): 94 for slave in slaves: 95 parserName = self.writeInlineGrammar(slave)[0] 96 # slave parsers are imported as normal python modules 97 # to force reloading current version, purge module from sys.modules 98 try: 99 del sys.modules[parserName+'Parser'] 100 except KeyError: 101 pass 102 103 lexerCls = self.compileInlineGrammar(grammar) 104 105 cStream = antlr3.StringStream(input) 106 lexer = lexerCls(cStream) 107 108 while True: 109 token = lexer.nextToken() 110 if token is None or token.type == antlr3.EOF: 111 break 112 113 lexer._output += token.text 114 115 return lexer._output 116 117 118 # @Test public void testWildcardStillWorks() throws Exception { 119 # ErrorQueue equeue = new ErrorQueue(); 120 # ErrorManager.setErrorListener(equeue); 121 # String grammar = 122 # "parser grammar S;\n" + 123 # "a : B . C ;\n"; // not qualified ID 124 # Grammar g = new Grammar(grammar); 125 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 126 # } 127 128 129 def testDelegatorInvokesDelegateRule(self): 130 slave = textwrap.dedent( 131 r''' 132 parser grammar S1; 133 options { 134 language=Python; 135 } 136 @members { 137 def capture(self, t): 138 self.gM1.capture(t) 139 140 } 141 142 a : B { self.capture("S.a") } ; 143 ''') 144 145 master = textwrap.dedent( 146 r''' 147 grammar M1; 148 options { 149 language=Python; 150 } 151 import S1; 152 s : a ; 153 B : 'b' ; // defines B from inherited token space 154 WS : (' '|'\n') {self.skip()} ; 155 ''') 156 157 found = self.execParser( 158 master, 's', 159 slaves=[slave], 160 input="b" 161 ) 162 163 self.failUnlessEqual("S.a", found) 164 165 166 # @Test public void testDelegatorInvokesDelegateRuleWithReturnStruct() throws Exception { 167 # // must generate something like: 168 # // public int a(int x) throws RecognitionException { return gS.a(x); } 169 # // in M. 170 # String slave = 171 # "parser grammar S;\n" + 172 # "a : B {System.out.print(\"S.a\");} ;\n"; 173 # mkdir(tmpdir); 174 # writeFile(tmpdir, "S.g", slave); 175 # String master = 176 # "grammar M;\n" + 177 # "import S;\n" + 178 # "s : a {System.out.println($a.text);} ;\n" + 179 # "B : 'b' ;" + // defines B from inherited token space 180 # "WS : (' '|'\\n') {skip();} ;\n" ; 181 # String found = execParser("M.g", master, "MParser", "MLexer", 182 # "s", "b", debug); 183 # assertEquals("S.ab\n", found); 184 # } 185 186 187 def testDelegatorInvokesDelegateRuleWithArgs(self): 188 slave = textwrap.dedent( 189 r''' 190 parser grammar S2; 191 options { 192 language=Python; 193 } 194 @members { 195 def capture(self, t): 196 self.gM2.capture(t) 197 } 198 a[x] returns [y] : B {self.capture("S.a"); $y="1000";} ; 199 ''') 200 201 master = textwrap.dedent( 202 r''' 203 grammar M2; 204 options { 205 language=Python; 206 } 207 import S2; 208 s : label=a[3] {self.capture($label.y);} ; 209 B : 'b' ; // defines B from inherited token space 210 WS : (' '|'\n') {self.skip()} ; 211 ''') 212 213 found = self.execParser( 214 master, 's', 215 slaves=[slave], 216 input="b" 217 ) 218 219 self.failUnlessEqual("S.a1000", found) 220 221 222 def testDelegatorAccessesDelegateMembers(self): 223 slave = textwrap.dedent( 224 r''' 225 parser grammar S3; 226 options { 227 language=Python; 228 } 229 @members { 230 def capture(self, t): 231 self.gM3.capture(t) 232 233 def foo(self): 234 self.capture("foo") 235 } 236 a : B ; 237 ''') 238 239 master = textwrap.dedent( 240 r''' 241 grammar M3; // uses no rules from the import 242 options { 243 language=Python; 244 } 245 import S3; 246 s : 'b' {self.gS3.foo();} ; // gS is import pointer 247 WS : (' '|'\n') {self.skip()} ; 248 ''') 249 250 found = self.execParser( 251 master, 's', 252 slaves=[slave], 253 input="b" 254 ) 255 256 self.failUnlessEqual("foo", found) 257 258 259 def testDelegatorInvokesFirstVersionOfDelegateRule(self): 260 slave = textwrap.dedent( 261 r''' 262 parser grammar S4; 263 options { 264 language=Python; 265 } 266 @members { 267 def capture(self, t): 268 self.gM4.capture(t) 269 } 270 a : b {self.capture("S.a");} ; 271 b : B ; 272 ''') 273 274 slave2 = textwrap.dedent( 275 r''' 276 parser grammar T4; 277 options { 278 language=Python; 279 } 280 @members { 281 def capture(self, t): 282 self.gM4.capture(t) 283 } 284 a : B {self.capture("T.a");} ; // hidden by S.a 285 ''') 286 287 master = textwrap.dedent( 288 r''' 289 grammar M4; 290 options { 291 language=Python; 292 } 293 import S4,T4; 294 s : a ; 295 B : 'b' ; 296 WS : (' '|'\n') {self.skip()} ; 297 ''') 298 299 found = self.execParser( 300 master, 's', 301 slaves=[slave, slave2], 302 input="b" 303 ) 304 305 self.failUnlessEqual("S.a", found) 306 307 308 def testDelegatesSeeSameTokenType(self): 309 slave = textwrap.dedent( 310 r''' 311 parser grammar S5; // A, B, C token type order 312 options { 313 language=Python; 314 } 315 tokens { A; B; C; } 316 @members { 317 def capture(self, t): 318 self.gM5.capture(t) 319 } 320 x : A {self.capture("S.x ");} ; 321 ''') 322 323 slave2 = textwrap.dedent( 324 r''' 325 parser grammar T5; 326 options { 327 language=Python; 328 } 329 tokens { C; B; A; } /// reverse order 330 @members { 331 def capture(self, t): 332 self.gM5.capture(t) 333 } 334 y : A {self.capture("T.y");} ; 335 ''') 336 337 master = textwrap.dedent( 338 r''' 339 grammar M5; 340 options { 341 language=Python; 342 } 343 import S5,T5; 344 s : x y ; // matches AA, which should be "aa" 345 B : 'b' ; // another order: B, A, C 346 A : 'a' ; 347 C : 'c' ; 348 WS : (' '|'\n') {self.skip()} ; 349 ''') 350 351 found = self.execParser( 352 master, 's', 353 slaves=[slave, slave2], 354 input="aa" 355 ) 356 357 self.failUnlessEqual("S.x T.y", found) 358 359 360 # @Test public void testDelegatesSeeSameTokenType2() throws Exception { 361 # ErrorQueue equeue = new ErrorQueue(); 362 # ErrorManager.setErrorListener(equeue); 363 # String slave = 364 # "parser grammar S;\n" + // A, B, C token type order 365 # "tokens { A; B; C; }\n" + 366 # "x : A {System.out.println(\"S.x\");} ;\n"; 367 # mkdir(tmpdir); 368 # writeFile(tmpdir, "S.g", slave); 369 # String slave2 = 370 # "parser grammar T;\n" + 371 # "tokens { C; B; A; }\n" + // reverse order 372 # "y : A {System.out.println(\"T.y\");} ;\n"; 373 # mkdir(tmpdir); 374 # writeFile(tmpdir, "T.g", slave2); 375 376 # String master = 377 # "grammar M;\n" + 378 # "import S,T;\n" + 379 # "s : x y ;\n" + // matches AA, which should be "aa" 380 # "B : 'b' ;\n" + // another order: B, A, C 381 # "A : 'a' ;\n" + 382 # "C : 'c' ;\n" + 383 # "WS : (' '|'\\n') {skip();} ;\n" ; 384 # writeFile(tmpdir, "M.g", master); 385 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 386 # CompositeGrammar composite = new CompositeGrammar(); 387 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 388 # composite.setDelegationRoot(g); 389 # g.parseAndBuildAST(); 390 # g.composite.assignTokenTypes(); 391 392 # String expectedTokenIDToTypeMap = "[A=4, B=5, C=6, WS=7]"; 393 # String expectedStringLiteralToTypeMap = "{}"; 394 # String expectedTypeToTokenList = "[A, B, C, WS]"; 395 396 # assertEquals(expectedTokenIDToTypeMap, 397 # realElements(g.composite.tokenIDToTypeMap).toString()); 398 # assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString()); 399 # assertEquals(expectedTypeToTokenList, 400 # realElements(g.composite.typeToTokenList).toString()); 401 402 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 403 # } 404 405 # @Test public void testCombinedImportsCombined() throws Exception { 406 # // for now, we don't allow combined to import combined 407 # ErrorQueue equeue = new ErrorQueue(); 408 # ErrorManager.setErrorListener(equeue); 409 # String slave = 410 # "grammar S;\n" + // A, B, C token type order 411 # "tokens { A; B; C; }\n" + 412 # "x : 'x' INT {System.out.println(\"S.x\");} ;\n" + 413 # "INT : '0'..'9'+ ;\n" + 414 # "WS : (' '|'\\n') {skip();} ;\n"; 415 # mkdir(tmpdir); 416 # writeFile(tmpdir, "S.g", slave); 417 418 # String master = 419 # "grammar M;\n" + 420 # "import S;\n" + 421 # "s : x INT ;\n"; 422 # writeFile(tmpdir, "M.g", master); 423 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 424 # CompositeGrammar composite = new CompositeGrammar(); 425 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 426 # composite.setDelegationRoot(g); 427 # g.parseAndBuildAST(); 428 # g.composite.assignTokenTypes(); 429 430 # assertEquals("unexpected errors: "+equeue, 1, equeue.errors.size()); 431 # String expectedError = "error(161): "+tmpdir.toString().replaceFirst("\\-[0-9]+","")+"/M.g:2:8: combined grammar M cannot import combined grammar S"; 432 # assertEquals("unexpected errors: "+equeue, expectedError, equeue.errors.get(0).toString().replaceFirst("\\-[0-9]+","")); 433 # } 434 435 # @Test public void testSameStringTwoNames() throws Exception { 436 # ErrorQueue equeue = new ErrorQueue(); 437 # ErrorManager.setErrorListener(equeue); 438 # String slave = 439 # "parser grammar S;\n" + 440 # "tokens { A='a'; }\n" + 441 # "x : A {System.out.println(\"S.x\");} ;\n"; 442 # mkdir(tmpdir); 443 # writeFile(tmpdir, "S.g", slave); 444 # String slave2 = 445 # "parser grammar T;\n" + 446 # "tokens { X='a'; }\n" + 447 # "y : X {System.out.println(\"T.y\");} ;\n"; 448 # mkdir(tmpdir); 449 # writeFile(tmpdir, "T.g", slave2); 450 451 # String master = 452 # "grammar M;\n" + 453 # "import S,T;\n" + 454 # "s : x y ;\n" + 455 # "WS : (' '|'\\n') {skip();} ;\n" ; 456 # writeFile(tmpdir, "M.g", master); 457 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 458 # CompositeGrammar composite = new CompositeGrammar(); 459 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 460 # composite.setDelegationRoot(g); 461 # g.parseAndBuildAST(); 462 # g.composite.assignTokenTypes(); 463 464 # String expectedTokenIDToTypeMap = "[A=4, WS=6, X=5]"; 465 # String expectedStringLiteralToTypeMap = "{'a'=4}"; 466 # String expectedTypeToTokenList = "[A, X, WS]"; 467 468 # assertEquals(expectedTokenIDToTypeMap, 469 # realElements(g.composite.tokenIDToTypeMap).toString()); 470 # assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString()); 471 # assertEquals(expectedTypeToTokenList, 472 # realElements(g.composite.typeToTokenList).toString()); 473 474 # Object expectedArg = "X='a'"; 475 # Object expectedArg2 = "A"; 476 # int expectedMsgID = ErrorManager.MSG_TOKEN_ALIAS_CONFLICT; 477 # GrammarSemanticsMessage expectedMessage = 478 # new GrammarSemanticsMessage(expectedMsgID, g, null, expectedArg, expectedArg2); 479 # checkGrammarSemanticsError(equeue, expectedMessage); 480 481 # assertEquals("unexpected errors: "+equeue, 1, equeue.errors.size()); 482 483 # String expectedError = 484 # "error(158): T.g:2:10: cannot alias X='a'; string already assigned to A"; 485 # assertEquals(expectedError, equeue.errors.get(0).toString()); 486 # } 487 488 # @Test public void testSameNameTwoStrings() throws Exception { 489 # ErrorQueue equeue = new ErrorQueue(); 490 # ErrorManager.setErrorListener(equeue); 491 # String slave = 492 # "parser grammar S;\n" + 493 # "tokens { A='a'; }\n" + 494 # "x : A {System.out.println(\"S.x\");} ;\n"; 495 # mkdir(tmpdir); 496 # writeFile(tmpdir, "S.g", slave); 497 # String slave2 = 498 # "parser grammar T;\n" + 499 # "tokens { A='x'; }\n" + 500 # "y : A {System.out.println(\"T.y\");} ;\n"; 501 502 # writeFile(tmpdir, "T.g", slave2); 503 504 # String master = 505 # "grammar M;\n" + 506 # "import S,T;\n" + 507 # "s : x y ;\n" + 508 # "WS : (' '|'\\n') {skip();} ;\n" ; 509 # writeFile(tmpdir, "M.g", master); 510 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 511 # CompositeGrammar composite = new CompositeGrammar(); 512 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 513 # composite.setDelegationRoot(g); 514 # g.parseAndBuildAST(); 515 # g.composite.assignTokenTypes(); 516 517 # String expectedTokenIDToTypeMap = "[A=4, T__6=6, WS=5]"; 518 # String expectedStringLiteralToTypeMap = "{'a'=4, 'x'=6}"; 519 # String expectedTypeToTokenList = "[A, WS, T__6]"; 520 521 # assertEquals(expectedTokenIDToTypeMap, 522 # realElements(g.composite.tokenIDToTypeMap).toString()); 523 # assertEquals(expectedStringLiteralToTypeMap, sortMapToString(g.composite.stringLiteralToTypeMap)); 524 # assertEquals(expectedTypeToTokenList, 525 # realElements(g.composite.typeToTokenList).toString()); 526 527 # Object expectedArg = "A='x'"; 528 # Object expectedArg2 = "'a'"; 529 # int expectedMsgID = ErrorManager.MSG_TOKEN_ALIAS_REASSIGNMENT; 530 # GrammarSemanticsMessage expectedMessage = 531 # new GrammarSemanticsMessage(expectedMsgID, g, null, expectedArg, expectedArg2); 532 # checkGrammarSemanticsError(equeue, expectedMessage); 533 534 # assertEquals("unexpected errors: "+equeue, 1, equeue.errors.size()); 535 536 # String expectedError = 537 # "error(159): T.g:2:10: cannot alias A='x'; token name already assigned to 'a'"; 538 # assertEquals(expectedError, equeue.errors.get(0).toString()); 539 # } 540 541 # @Test public void testImportedTokenVocabIgnoredWithWarning() throws Exception { 542 # ErrorQueue equeue = new ErrorQueue(); 543 # ErrorManager.setErrorListener(equeue); 544 # String slave = 545 # "parser grammar S;\n" + 546 # "options {tokenVocab=whatever;}\n" + 547 # "tokens { A='a'; }\n" + 548 # "x : A {System.out.println(\"S.x\");} ;\n"; 549 # mkdir(tmpdir); 550 # writeFile(tmpdir, "S.g", slave); 551 552 # String master = 553 # "grammar M;\n" + 554 # "import S;\n" + 555 # "s : x ;\n" + 556 # "WS : (' '|'\\n') {skip();} ;\n" ; 557 # writeFile(tmpdir, "M.g", master); 558 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 559 # CompositeGrammar composite = new CompositeGrammar(); 560 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 561 # composite.setDelegationRoot(g); 562 # g.parseAndBuildAST(); 563 # g.composite.assignTokenTypes(); 564 565 # Object expectedArg = "S"; 566 # int expectedMsgID = ErrorManager.MSG_TOKEN_VOCAB_IN_DELEGATE; 567 # GrammarSemanticsMessage expectedMessage = 568 # new GrammarSemanticsMessage(expectedMsgID, g, null, expectedArg); 569 # checkGrammarSemanticsWarning(equeue, expectedMessage); 570 571 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 572 # assertEquals("unexpected errors: "+equeue, 1, equeue.warnings.size()); 573 574 # String expectedError = 575 # "warning(160): S.g:2:10: tokenVocab option ignored in imported grammar S"; 576 # assertEquals(expectedError, equeue.warnings.get(0).toString()); 577 # } 578 579 # @Test public void testImportedTokenVocabWorksInRoot() throws Exception { 580 # ErrorQueue equeue = new ErrorQueue(); 581 # ErrorManager.setErrorListener(equeue); 582 # String slave = 583 # "parser grammar S;\n" + 584 # "tokens { A='a'; }\n" + 585 # "x : A {System.out.println(\"S.x\");} ;\n"; 586 # mkdir(tmpdir); 587 # writeFile(tmpdir, "S.g", slave); 588 589 # String tokens = 590 # "A=99\n"; 591 # writeFile(tmpdir, "Test.tokens", tokens); 592 593 # String master = 594 # "grammar M;\n" + 595 # "options {tokenVocab=Test;}\n" + 596 # "import S;\n" + 597 # "s : x ;\n" + 598 # "WS : (' '|'\\n') {skip();} ;\n" ; 599 # writeFile(tmpdir, "M.g", master); 600 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 601 # CompositeGrammar composite = new CompositeGrammar(); 602 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 603 # composite.setDelegationRoot(g); 604 # g.parseAndBuildAST(); 605 # g.composite.assignTokenTypes(); 606 607 # String expectedTokenIDToTypeMap = "[A=99, WS=101]"; 608 # String expectedStringLiteralToTypeMap = "{'a'=100}"; 609 # String expectedTypeToTokenList = "[A, 'a', WS]"; 610 611 # assertEquals(expectedTokenIDToTypeMap, 612 # realElements(g.composite.tokenIDToTypeMap).toString()); 613 # assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString()); 614 # assertEquals(expectedTypeToTokenList, 615 # realElements(g.composite.typeToTokenList).toString()); 616 617 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 618 # } 619 620 # @Test public void testSyntaxErrorsInImportsNotThrownOut() throws Exception { 621 # ErrorQueue equeue = new ErrorQueue(); 622 # ErrorManager.setErrorListener(equeue); 623 # String slave = 624 # "parser grammar S;\n" + 625 # "options {toke\n"; 626 # mkdir(tmpdir); 627 # writeFile(tmpdir, "S.g", slave); 628 629 # String master = 630 # "grammar M;\n" + 631 # "import S;\n" + 632 # "s : x ;\n" + 633 # "WS : (' '|'\\n') {skip();} ;\n" ; 634 # writeFile(tmpdir, "M.g", master); 635 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 636 # CompositeGrammar composite = new CompositeGrammar(); 637 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 638 # composite.setDelegationRoot(g); 639 # g.parseAndBuildAST(); 640 # g.composite.assignTokenTypes(); 641 642 # // whole bunch of errors from bad S.g file 643 # assertEquals("unexpected errors: "+equeue, 5, equeue.errors.size()); 644 # } 645 646 # @Test public void testSyntaxErrorsInImportsNotThrownOut2() throws Exception { 647 # ErrorQueue equeue = new ErrorQueue(); 648 # ErrorManager.setErrorListener(equeue); 649 # String slave = 650 # "parser grammar S;\n" + 651 # ": A {System.out.println(\"S.x\");} ;\n"; 652 # mkdir(tmpdir); 653 # writeFile(tmpdir, "S.g", slave); 654 655 # String master = 656 # "grammar M;\n" + 657 # "import S;\n" + 658 # "s : x ;\n" + 659 # "WS : (' '|'\\n') {skip();} ;\n" ; 660 # writeFile(tmpdir, "M.g", master); 661 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 662 # CompositeGrammar composite = new CompositeGrammar(); 663 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 664 # composite.setDelegationRoot(g); 665 # g.parseAndBuildAST(); 666 # g.composite.assignTokenTypes(); 667 668 # // whole bunch of errors from bad S.g file 669 # assertEquals("unexpected errors: "+equeue, 3, equeue.errors.size()); 670 # } 671 672 673 def testDelegatorRuleOverridesDelegate(self): 674 slave = textwrap.dedent( 675 r''' 676 parser grammar S6; 677 options { 678 language=Python; 679 } 680 @members { 681 def capture(self, t): 682 self.gM6.capture(t) 683 } 684 a : b {self.capture("S.a");} ; 685 b : B ; 686 ''') 687 688 master = textwrap.dedent( 689 r''' 690 grammar M6; 691 options { 692 language=Python; 693 } 694 import S6; 695 b : 'b'|'c' ; 696 WS : (' '|'\n') {self.skip()} ; 697 ''') 698 699 found = self.execParser( 700 master, 'a', 701 slaves=[slave], 702 input="c" 703 ) 704 705 self.failUnlessEqual("S.a", found) 706 707 708 # @Test public void testDelegatorRuleOverridesLookaheadInDelegate() throws Exception { 709 # String slave = 710 # "parser grammar JavaDecl;\n" + 711 # "type : 'int' ;\n" + 712 # "decl : type ID ';'\n" + 713 # " | type ID init ';' {System.out.println(\"JavaDecl: \"+$decl.text);}\n" + 714 # " ;\n" + 715 # "init : '=' INT ;\n" ; 716 # mkdir(tmpdir); 717 # writeFile(tmpdir, "JavaDecl.g", slave); 718 # String master = 719 # "grammar Java;\n" + 720 # "import JavaDecl;\n" + 721 # "prog : decl ;\n" + 722 # "type : 'int' | 'float' ;\n" + 723 # "\n" + 724 # "ID : 'a'..'z'+ ;\n" + 725 # "INT : '0'..'9'+ ;\n" + 726 # "WS : (' '|'\\n') {skip();} ;\n" ; 727 # // for float to work in decl, type must be overridden 728 # String found = execParser("Java.g", master, "JavaParser", "JavaLexer", 729 # "prog", "float x = 3;", debug); 730 # assertEquals("JavaDecl: floatx=3;\n", found); 731 # } 732 733 # @Test public void testDelegatorRuleOverridesDelegates() throws Exception { 734 # String slave = 735 # "parser grammar S;\n" + 736 # "a : b {System.out.println(\"S.a\");} ;\n" + 737 # "b : B ;\n" ; 738 # mkdir(tmpdir); 739 # writeFile(tmpdir, "S.g", slave); 740 741 # String slave2 = 742 # "parser grammar T;\n" + 743 # "tokens { A='x'; }\n" + 744 # "b : B {System.out.println(\"T.b\");} ;\n"; 745 # writeFile(tmpdir, "T.g", slave2); 746 747 # String master = 748 # "grammar M;\n" + 749 # "import S, T;\n" + 750 # "b : 'b'|'c' {System.out.println(\"M.b\");}|B|A ;\n" + 751 # "WS : (' '|'\\n') {skip();} ;\n" ; 752 # String found = execParser("M.g", master, "MParser", "MLexer", 753 # "a", "c", debug); 754 # assertEquals("M.b\n" + 755 # "S.a\n", found); 756 # } 757 758 # LEXER INHERITANCE 759 760 def testLexerDelegatorInvokesDelegateRule(self): 761 slave = textwrap.dedent( 762 r''' 763 lexer grammar S7; 764 options { 765 language=Python; 766 } 767 @members { 768 def capture(self, t): 769 self.gM7.capture(t) 770 } 771 A : 'a' {self.capture("S.A ");} ; 772 C : 'c' ; 773 ''') 774 775 master = textwrap.dedent( 776 r''' 777 lexer grammar M7; 778 options { 779 language=Python; 780 } 781 import S7; 782 B : 'b' ; 783 WS : (' '|'\n') {self.skip()} ; 784 ''') 785 786 found = self.execLexer( 787 master, 788 slaves=[slave], 789 input="abc" 790 ) 791 792 self.failUnlessEqual("S.A abc", found) 793 794 795 def testLexerDelegatorRuleOverridesDelegate(self): 796 slave = textwrap.dedent( 797 r''' 798 lexer grammar S8; 799 options { 800 language=Python; 801 } 802 @members { 803 def capture(self, t): 804 self.gM8.capture(t) 805 } 806 A : 'a' {self.capture("S.A")} ; 807 ''') 808 809 master = textwrap.dedent( 810 r''' 811 lexer grammar M8; 812 options { 813 language=Python; 814 } 815 import S8; 816 A : 'a' {self.capture("M.A ");} ; 817 WS : (' '|'\n') {self.skip()} ; 818 ''') 819 820 found = self.execLexer( 821 master, 822 slaves=[slave], 823 input="a" 824 ) 825 826 self.failUnlessEqual("M.A a", found) 827 828 # @Test public void testLexerDelegatorRuleOverridesDelegateLeavingNoRules() throws Exception { 829 # // M.Tokens has nothing to predict tokens from S. Should 830 # // not include S.Tokens alt in this case? 831 # String slave = 832 # "lexer grammar S;\n" + 833 # "A : 'a' {System.out.println(\"S.A\");} ;\n"; 834 # mkdir(tmpdir); 835 # writeFile(tmpdir, "S.g", slave); 836 # String master = 837 # "lexer grammar M;\n" + 838 # "import S;\n" + 839 # "A : 'a' {System.out.println(\"M.A\");} ;\n" + 840 # "WS : (' '|'\\n') {skip();} ;\n" ; 841 # writeFile(tmpdir, "/M.g", master); 842 843 # ErrorQueue equeue = new ErrorQueue(); 844 # ErrorManager.setErrorListener(equeue); 845 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 846 # CompositeGrammar composite = new CompositeGrammar(); 847 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 848 # composite.setDelegationRoot(g); 849 # g.parseAndBuildAST(); 850 # composite.assignTokenTypes(); 851 # composite.defineGrammarSymbols(); 852 # composite.createNFAs(); 853 # g.createLookaheadDFAs(false); 854 855 # // predict only alts from M not S 856 # String expectingDFA = 857 # ".s0-'a'->.s1\n" + 858 # ".s0-{'\\n', ' '}->:s3=>2\n" + 859 # ".s1-<EOT>->:s2=>1\n"; 860 # org.antlr.analysis.DFA dfa = g.getLookaheadDFA(1); 861 # FASerializer serializer = new FASerializer(g); 862 # String result = serializer.serialize(dfa.startState); 863 # assertEquals(expectingDFA, result); 864 865 # // must not be a "unreachable alt: Tokens" error 866 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 867 # } 868 869 # @Test public void testInvalidImportMechanism() throws Exception { 870 # // M.Tokens has nothing to predict tokens from S. Should 871 # // not include S.Tokens alt in this case? 872 # String slave = 873 # "lexer grammar S;\n" + 874 # "A : 'a' {System.out.println(\"S.A\");} ;\n"; 875 # mkdir(tmpdir); 876 # writeFile(tmpdir, "S.g", slave); 877 # String master = 878 # "tree grammar M;\n" + 879 # "import S;\n" + 880 # "a : A ;"; 881 # writeFile(tmpdir, "/M.g", master); 882 883 # ErrorQueue equeue = new ErrorQueue(); 884 # ErrorManager.setErrorListener(equeue); 885 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 886 # CompositeGrammar composite = new CompositeGrammar(); 887 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 888 # composite.setDelegationRoot(g); 889 # g.parseAndBuildAST(); 890 891 # assertEquals("unexpected errors: "+equeue, 1, equeue.errors.size()); 892 # assertEquals("unexpected errors: "+equeue, 0, equeue.warnings.size()); 893 894 # String expectedError = 895 # "error(161): "+tmpdir.toString().replaceFirst("\\-[0-9]+","")+"/M.g:2:8: tree grammar M cannot import lexer grammar S"; 896 # assertEquals(expectedError, equeue.errors.get(0).toString().replaceFirst("\\-[0-9]+","")); 897 # } 898 899 # @Test public void testSyntacticPredicateRulesAreNotInherited() throws Exception { 900 # // if this compiles, it means that synpred1_S is defined in S.java 901 # // but not MParser.java. MParser has its own synpred1_M which must 902 # // be separate to compile. 903 # String slave = 904 # "parser grammar S;\n" + 905 # "a : 'a' {System.out.println(\"S.a1\");}\n" + 906 # " | 'a' {System.out.println(\"S.a2\");}\n" + 907 # " ;\n" + 908 # "b : 'x' | 'y' {;} ;\n"; // preds generated but not need in DFA here 909 # mkdir(tmpdir); 910 # writeFile(tmpdir, "S.g", slave); 911 # String master = 912 # "grammar M;\n" + 913 # "options {backtrack=true;}\n" + 914 # "import S;\n" + 915 # "start : a b ;\n" + 916 # "nonsense : 'q' | 'q' {;} ;" + // forces def of preds here in M 917 # "WS : (' '|'\\n') {skip();} ;\n" ; 918 # String found = execParser("M.g", master, "MParser", "MLexer", 919 # "start", "ax", debug); 920 # assertEquals("S.a1\n", found); 921 # } 922 923 # @Test public void testKeywordVSIDGivesNoWarning() throws Exception { 924 # ErrorQueue equeue = new ErrorQueue(); 925 # ErrorManager.setErrorListener(equeue); 926 # String slave = 927 # "lexer grammar S;\n" + 928 # "A : 'abc' {System.out.println(\"S.A\");} ;\n" + 929 # "ID : 'a'..'z'+ ;\n"; 930 # mkdir(tmpdir); 931 # writeFile(tmpdir, "S.g", slave); 932 # String master = 933 # "grammar M;\n" + 934 # "import S;\n" + 935 # "a : A {System.out.println(\"M.a\");} ;\n" + 936 # "WS : (' '|'\\n') {skip();} ;\n" ; 937 # String found = execParser("M.g", master, "MParser", "MLexer", 938 # "a", "abc", debug); 939 940 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 941 # assertEquals("unexpected warnings: "+equeue, 0, equeue.warnings.size()); 942 943 # assertEquals("S.A\nM.a\n", found); 944 # } 945 946 # @Test public void testWarningForUndefinedToken() throws Exception { 947 # ErrorQueue equeue = new ErrorQueue(); 948 # ErrorManager.setErrorListener(equeue); 949 # String slave = 950 # "lexer grammar S;\n" + 951 # "A : 'abc' {System.out.println(\"S.A\");} ;\n"; 952 # mkdir(tmpdir); 953 # writeFile(tmpdir, "S.g", slave); 954 # String master = 955 # "grammar M;\n" + 956 # "import S;\n" + 957 # "a : ABC A {System.out.println(\"M.a\");} ;\n" + 958 # "WS : (' '|'\\n') {skip();} ;\n" ; 959 # // A is defined in S but M should still see it and not give warning. 960 # // only problem is ABC. 961 962 # rawGenerateAndBuildRecognizer("M.g", master, "MParser", "MLexer", debug); 963 964 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 965 # assertEquals("unexpected warnings: "+equeue, 1, equeue.warnings.size()); 966 967 # String expectedError = 968 # "warning(105): "+tmpdir.toString().replaceFirst("\\-[0-9]+","")+"/M.g:3:5: no lexer rule corresponding to token: ABC"; 969 # assertEquals(expectedError, equeue.warnings.get(0).toString().replaceFirst("\\-[0-9]+","")); 970 # } 971 972 # /** Make sure that M can import S that imports T. */ 973 # @Test public void test3LevelImport() throws Exception { 974 # ErrorQueue equeue = new ErrorQueue(); 975 # ErrorManager.setErrorListener(equeue); 976 # String slave = 977 # "parser grammar T;\n" + 978 # "a : T ;\n" ; 979 # mkdir(tmpdir); 980 # writeFile(tmpdir, "T.g", slave); 981 # String slave2 = 982 # "parser grammar S;\n" + // A, B, C token type order 983 # "import T;\n" + 984 # "a : S ;\n" ; 985 # mkdir(tmpdir); 986 # writeFile(tmpdir, "S.g", slave2); 987 988 # String master = 989 # "grammar M;\n" + 990 # "import S;\n" + 991 # "a : M ;\n" ; 992 # writeFile(tmpdir, "M.g", master); 993 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 994 # CompositeGrammar composite = new CompositeGrammar(); 995 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 996 # composite.setDelegationRoot(g); 997 # g.parseAndBuildAST(); 998 # g.composite.assignTokenTypes(); 999 # g.composite.defineGrammarSymbols(); 1000 1001 # String expectedTokenIDToTypeMap = "[M=6, S=5, T=4]"; 1002 # String expectedStringLiteralToTypeMap = "{}"; 1003 # String expectedTypeToTokenList = "[T, S, M]"; 1004 1005 # assertEquals(expectedTokenIDToTypeMap, 1006 # realElements(g.composite.tokenIDToTypeMap).toString()); 1007 # assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString()); 1008 # assertEquals(expectedTypeToTokenList, 1009 # realElements(g.composite.typeToTokenList).toString()); 1010 1011 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 1012 1013 # boolean ok = 1014 # rawGenerateAndBuildRecognizer("M.g", master, "MParser", null, false); 1015 # boolean expecting = true; // should be ok 1016 # assertEquals(expecting, ok); 1017 # } 1018 1019 # @Test public void testBigTreeOfImports() throws Exception { 1020 # ErrorQueue equeue = new ErrorQueue(); 1021 # ErrorManager.setErrorListener(equeue); 1022 # String slave = 1023 # "parser grammar T;\n" + 1024 # "x : T ;\n" ; 1025 # mkdir(tmpdir); 1026 # writeFile(tmpdir, "T.g", slave); 1027 # slave = 1028 # "parser grammar S;\n" + 1029 # "import T;\n" + 1030 # "y : S ;\n" ; 1031 # mkdir(tmpdir); 1032 # writeFile(tmpdir, "S.g", slave); 1033 1034 # slave = 1035 # "parser grammar C;\n" + 1036 # "i : C ;\n" ; 1037 # mkdir(tmpdir); 1038 # writeFile(tmpdir, "C.g", slave); 1039 # slave = 1040 # "parser grammar B;\n" + 1041 # "j : B ;\n" ; 1042 # mkdir(tmpdir); 1043 # writeFile(tmpdir, "B.g", slave); 1044 # slave = 1045 # "parser grammar A;\n" + 1046 # "import B,C;\n" + 1047 # "k : A ;\n" ; 1048 # mkdir(tmpdir); 1049 # writeFile(tmpdir, "A.g", slave); 1050 1051 # String master = 1052 # "grammar M;\n" + 1053 # "import S,A;\n" + 1054 # "a : M ;\n" ; 1055 # writeFile(tmpdir, "M.g", master); 1056 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 1057 # CompositeGrammar composite = new CompositeGrammar(); 1058 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 1059 # composite.setDelegationRoot(g); 1060 # g.parseAndBuildAST(); 1061 # g.composite.assignTokenTypes(); 1062 # g.composite.defineGrammarSymbols(); 1063 1064 # String expectedTokenIDToTypeMap = "[A=8, B=6, C=7, M=9, S=5, T=4]"; 1065 # String expectedStringLiteralToTypeMap = "{}"; 1066 # String expectedTypeToTokenList = "[T, S, B, C, A, M]"; 1067 1068 # assertEquals(expectedTokenIDToTypeMap, 1069 # realElements(g.composite.tokenIDToTypeMap).toString()); 1070 # assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString()); 1071 # assertEquals(expectedTypeToTokenList, 1072 # realElements(g.composite.typeToTokenList).toString()); 1073 1074 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 1075 1076 # boolean ok = 1077 # rawGenerateAndBuildRecognizer("M.g", master, "MParser", null, false); 1078 # boolean expecting = true; // should be ok 1079 # assertEquals(expecting, ok); 1080 # } 1081 1082 # @Test public void testRulesVisibleThroughMultilevelImport() throws Exception { 1083 # ErrorQueue equeue = new ErrorQueue(); 1084 # ErrorManager.setErrorListener(equeue); 1085 # String slave = 1086 # "parser grammar T;\n" + 1087 # "x : T ;\n" ; 1088 # mkdir(tmpdir); 1089 # writeFile(tmpdir, "T.g", slave); 1090 # String slave2 = 1091 # "parser grammar S;\n" + // A, B, C token type order 1092 # "import T;\n" + 1093 # "a : S ;\n" ; 1094 # mkdir(tmpdir); 1095 # writeFile(tmpdir, "S.g", slave2); 1096 1097 # String master = 1098 # "grammar M;\n" + 1099 # "import S;\n" + 1100 # "a : M x ;\n" ; // x MUST BE VISIBLE TO M 1101 # writeFile(tmpdir, "M.g", master); 1102 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 1103 # CompositeGrammar composite = new CompositeGrammar(); 1104 # Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite); 1105 # composite.setDelegationRoot(g); 1106 # g.parseAndBuildAST(); 1107 # g.composite.assignTokenTypes(); 1108 # g.composite.defineGrammarSymbols(); 1109 1110 # String expectedTokenIDToTypeMap = "[M=6, S=5, T=4]"; 1111 # String expectedStringLiteralToTypeMap = "{}"; 1112 # String expectedTypeToTokenList = "[T, S, M]"; 1113 1114 # assertEquals(expectedTokenIDToTypeMap, 1115 # realElements(g.composite.tokenIDToTypeMap).toString()); 1116 # assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString()); 1117 # assertEquals(expectedTypeToTokenList, 1118 # realElements(g.composite.typeToTokenList).toString()); 1119 1120 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 1121 # } 1122 1123 # @Test public void testNestedComposite() throws Exception { 1124 # // Wasn't compiling. http://www.antlr.org/jira/browse/ANTLR-438 1125 # ErrorQueue equeue = new ErrorQueue(); 1126 # ErrorManager.setErrorListener(equeue); 1127 # String gstr = 1128 # "lexer grammar L;\n" + 1129 # "T1: '1';\n" + 1130 # "T2: '2';\n" + 1131 # "T3: '3';\n" + 1132 # "T4: '4';\n" ; 1133 # mkdir(tmpdir); 1134 # writeFile(tmpdir, "L.g", gstr); 1135 # gstr = 1136 # "parser grammar G1;\n" + 1137 # "s: a | b;\n" + 1138 # "a: T1;\n" + 1139 # "b: T2;\n" ; 1140 # mkdir(tmpdir); 1141 # writeFile(tmpdir, "G1.g", gstr); 1142 1143 # gstr = 1144 # "parser grammar G2;\n" + 1145 # "import G1;\n" + 1146 # "a: T3;\n" ; 1147 # mkdir(tmpdir); 1148 # writeFile(tmpdir, "G2.g", gstr); 1149 # String G3str = 1150 # "grammar G3;\n" + 1151 # "import G2;\n" + 1152 # "b: T4;\n" ; 1153 # mkdir(tmpdir); 1154 # writeFile(tmpdir, "G3.g", G3str); 1155 1156 # Tool antlr = newTool(new String[] {"-lib", tmpdir}); 1157 # CompositeGrammar composite = new CompositeGrammar(); 1158 # Grammar g = new Grammar(antlr,tmpdir+"/G3.g",composite); 1159 # composite.setDelegationRoot(g); 1160 # g.parseAndBuildAST(); 1161 # g.composite.assignTokenTypes(); 1162 # g.composite.defineGrammarSymbols(); 1163 1164 # String expectedTokenIDToTypeMap = "[T1=4, T2=5, T3=6, T4=7]"; 1165 # String expectedStringLiteralToTypeMap = "{}"; 1166 # String expectedTypeToTokenList = "[T1, T2, T3, T4]"; 1167 1168 # assertEquals(expectedTokenIDToTypeMap, 1169 # realElements(g.composite.tokenIDToTypeMap).toString()); 1170 # assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString()); 1171 # assertEquals(expectedTypeToTokenList, 1172 # realElements(g.composite.typeToTokenList).toString()); 1173 1174 # assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); 1175 1176 # boolean ok = 1177 # rawGenerateAndBuildRecognizer("G3.g", G3str, "G3Parser", null, false); 1178 # boolean expecting = true; // should be ok 1179 # assertEquals(expecting, ok); 1180 # } 1181 1182 # @Test public void testHeadersPropogatedCorrectlyToImportedGrammars() throws Exception { 1183 # String slave = 1184 # "parser grammar S;\n" + 1185 # "a : B {System.out.print(\"S.a\");} ;\n"; 1186 # mkdir(tmpdir); 1187 # writeFile(tmpdir, "S.g", slave); 1188 # String master = 1189 # "grammar M;\n" + 1190 # "import S;\n" + 1191 # "@header{package mypackage;}\n" + 1192 # "@lexer::header{package mypackage;}\n" + 1193 # "s : a ;\n" + 1194 # "B : 'b' ;" + // defines B from inherited token space 1195 # "WS : (' '|'\\n') {skip();} ;\n" ; 1196 # boolean ok = antlr("M.g", "M.g", master, debug); 1197 # boolean expecting = true; // should be ok 1198 # assertEquals(expecting, ok); 1199 # } 1200 1201 1202 if __name__ == '__main__': 1203 unittest.main() 1204