1 import copy 2 import gc 3 import pickle 4 import sys 5 import unittest 6 import warnings 7 import weakref 8 import inspect 9 10 from test import support 11 12 try: 13 import _testcapi 14 except ImportError: 15 _testcapi = None 16 17 18 # This tests to make sure that if a SIGINT arrives just before we send into a 19 # yield from chain, the KeyboardInterrupt is raised in the innermost 20 # generator (see bpo-30039). 21 @unittest.skipUnless(_testcapi is not None and 22 hasattr(_testcapi, "raise_SIGINT_then_send_None"), 23 "needs _testcapi.raise_SIGINT_then_send_None") 24 class SignalAndYieldFromTest(unittest.TestCase): 25 26 def generator1(self): 27 return (yield from self.generator2()) 28 29 def generator2(self): 30 try: 31 yield 32 except KeyboardInterrupt: 33 return "PASSED" 34 else: 35 return "FAILED" 36 37 def test_raise_and_yield_from(self): 38 gen = self.generator1() 39 gen.send(None) 40 try: 41 _testcapi.raise_SIGINT_then_send_None(gen) 42 except BaseException as _exc: 43 exc = _exc 44 self.assertIs(type(exc), StopIteration) 45 self.assertEqual(exc.value, "PASSED") 46 47 48 class FinalizationTest(unittest.TestCase): 49 50 def test_frame_resurrect(self): 51 # A generator frame can be resurrected by a generator's finalization. 52 def gen(): 53 nonlocal frame 54 try: 55 yield 56 finally: 57 frame = sys._getframe() 58 59 g = gen() 60 wr = weakref.ref(g) 61 next(g) 62 del g 63 support.gc_collect() 64 self.assertIs(wr(), None) 65 self.assertTrue(frame) 66 del frame 67 support.gc_collect() 68 69 def test_refcycle(self): 70 # A generator caught in a refcycle gets finalized anyway. 71 old_garbage = gc.garbage[:] 72 finalized = False 73 def gen(): 74 nonlocal finalized 75 try: 76 g = yield 77 yield 1 78 finally: 79 finalized = True 80 81 g = gen() 82 next(g) 83 g.send(g) 84 self.assertGreater(sys.getrefcount(g), 2) 85 self.assertFalse(finalized) 86 del g 87 support.gc_collect() 88 self.assertTrue(finalized) 89 self.assertEqual(gc.garbage, old_garbage) 90 91 def test_lambda_generator(self): 92 # Issue #23192: Test that a lambda returning a generator behaves 93 # like the equivalent function 94 f = lambda: (yield 1) 95 def g(): return (yield 1) 96 97 # test 'yield from' 98 f2 = lambda: (yield from g()) 99 def g2(): return (yield from g()) 100 101 f3 = lambda: (yield from f()) 102 def g3(): return (yield from f()) 103 104 for gen_fun in (f, g, f2, g2, f3, g3): 105 gen = gen_fun() 106 self.assertEqual(next(gen), 1) 107 with self.assertRaises(StopIteration) as cm: 108 gen.send(2) 109 self.assertEqual(cm.exception.value, 2) 110 111 112 class GeneratorTest(unittest.TestCase): 113 114 def test_name(self): 115 def func(): 116 yield 1 117 118 # check generator names 119 gen = func() 120 self.assertEqual(gen.__name__, "func") 121 self.assertEqual(gen.__qualname__, 122 "GeneratorTest.test_name.<locals>.func") 123 124 # modify generator names 125 gen.__name__ = "name" 126 gen.__qualname__ = "qualname" 127 self.assertEqual(gen.__name__, "name") 128 self.assertEqual(gen.__qualname__, "qualname") 129 130 # generator names must be a string and cannot be deleted 131 self.assertRaises(TypeError, setattr, gen, '__name__', 123) 132 self.assertRaises(TypeError, setattr, gen, '__qualname__', 123) 133 self.assertRaises(TypeError, delattr, gen, '__name__') 134 self.assertRaises(TypeError, delattr, gen, '__qualname__') 135 136 # modify names of the function creating the generator 137 func.__qualname__ = "func_qualname" 138 func.__name__ = "func_name" 139 gen = func() 140 self.assertEqual(gen.__name__, "func_name") 141 self.assertEqual(gen.__qualname__, "func_qualname") 142 143 # unnamed generator 144 gen = (x for x in range(10)) 145 self.assertEqual(gen.__name__, 146 "<genexpr>") 147 self.assertEqual(gen.__qualname__, 148 "GeneratorTest.test_name.<locals>.<genexpr>") 149 150 def test_copy(self): 151 def f(): 152 yield 1 153 g = f() 154 with self.assertRaises(TypeError): 155 copy.copy(g) 156 157 def test_pickle(self): 158 def f(): 159 yield 1 160 g = f() 161 for proto in range(pickle.HIGHEST_PROTOCOL + 1): 162 with self.assertRaises((TypeError, pickle.PicklingError)): 163 pickle.dumps(g, proto) 164 165 166 class ExceptionTest(unittest.TestCase): 167 # Tests for the issue #23353: check that the currently handled exception 168 # is correctly saved/restored in PyEval_EvalFrameEx(). 169 170 def test_except_throw(self): 171 def store_raise_exc_generator(): 172 try: 173 self.assertEqual(sys.exc_info()[0], None) 174 yield 175 except Exception as exc: 176 # exception raised by gen.throw(exc) 177 self.assertEqual(sys.exc_info()[0], ValueError) 178 self.assertIsNone(exc.__context__) 179 yield 180 181 # ensure that the exception is not lost 182 self.assertEqual(sys.exc_info()[0], ValueError) 183 yield 184 185 # we should be able to raise back the ValueError 186 raise 187 188 make = store_raise_exc_generator() 189 next(make) 190 191 try: 192 raise ValueError() 193 except Exception as exc: 194 try: 195 make.throw(exc) 196 except Exception: 197 pass 198 199 next(make) 200 with self.assertRaises(ValueError) as cm: 201 next(make) 202 self.assertIsNone(cm.exception.__context__) 203 204 self.assertEqual(sys.exc_info(), (None, None, None)) 205 206 def test_except_next(self): 207 def gen(): 208 self.assertEqual(sys.exc_info()[0], ValueError) 209 yield "done" 210 211 g = gen() 212 try: 213 raise ValueError 214 except Exception: 215 self.assertEqual(next(g), "done") 216 self.assertEqual(sys.exc_info(), (None, None, None)) 217 218 def test_except_gen_except(self): 219 def gen(): 220 try: 221 self.assertEqual(sys.exc_info()[0], None) 222 yield 223 # we are called from "except ValueError:", TypeError must 224 # inherit ValueError in its context 225 raise TypeError() 226 except TypeError as exc: 227 self.assertEqual(sys.exc_info()[0], TypeError) 228 self.assertEqual(type(exc.__context__), ValueError) 229 # here we are still called from the "except ValueError:" 230 self.assertEqual(sys.exc_info()[0], ValueError) 231 yield 232 self.assertIsNone(sys.exc_info()[0]) 233 yield "done" 234 235 g = gen() 236 next(g) 237 try: 238 raise ValueError 239 except Exception: 240 next(g) 241 242 self.assertEqual(next(g), "done") 243 self.assertEqual(sys.exc_info(), (None, None, None)) 244 245 def test_except_throw_exception_context(self): 246 def gen(): 247 try: 248 try: 249 self.assertEqual(sys.exc_info()[0], None) 250 yield 251 except ValueError: 252 # we are called from "except ValueError:" 253 self.assertEqual(sys.exc_info()[0], ValueError) 254 raise TypeError() 255 except Exception as exc: 256 self.assertEqual(sys.exc_info()[0], TypeError) 257 self.assertEqual(type(exc.__context__), ValueError) 258 # we are still called from "except ValueError:" 259 self.assertEqual(sys.exc_info()[0], ValueError) 260 yield 261 self.assertIsNone(sys.exc_info()[0]) 262 yield "done" 263 264 g = gen() 265 next(g) 266 try: 267 raise ValueError 268 except Exception as exc: 269 g.throw(exc) 270 271 self.assertEqual(next(g), "done") 272 self.assertEqual(sys.exc_info(), (None, None, None)) 273 274 def test_stopiteration_error(self): 275 # See also PEP 479. 276 277 def gen(): 278 raise StopIteration 279 yield 280 281 with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'): 282 next(gen()) 283 284 def test_tutorial_stopiteration(self): 285 # Raise StopIteration" stops the generator too: 286 287 def f(): 288 yield 1 289 raise StopIteration 290 yield 2 # never reached 291 292 g = f() 293 self.assertEqual(next(g), 1) 294 295 with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'): 296 next(g) 297 298 def test_return_tuple(self): 299 def g(): 300 return (yield 1) 301 302 gen = g() 303 self.assertEqual(next(gen), 1) 304 with self.assertRaises(StopIteration) as cm: 305 gen.send((2,)) 306 self.assertEqual(cm.exception.value, (2,)) 307 308 def test_return_stopiteration(self): 309 def g(): 310 return (yield 1) 311 312 gen = g() 313 self.assertEqual(next(gen), 1) 314 with self.assertRaises(StopIteration) as cm: 315 gen.send(StopIteration(2)) 316 self.assertIsInstance(cm.exception.value, StopIteration) 317 self.assertEqual(cm.exception.value.value, 2) 318 319 320 class YieldFromTests(unittest.TestCase): 321 def test_generator_gi_yieldfrom(self): 322 def a(): 323 self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING) 324 self.assertIsNone(gen_b.gi_yieldfrom) 325 yield 326 self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING) 327 self.assertIsNone(gen_b.gi_yieldfrom) 328 329 def b(): 330 self.assertIsNone(gen_b.gi_yieldfrom) 331 yield from a() 332 self.assertIsNone(gen_b.gi_yieldfrom) 333 yield 334 self.assertIsNone(gen_b.gi_yieldfrom) 335 336 gen_b = b() 337 self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED) 338 self.assertIsNone(gen_b.gi_yieldfrom) 339 340 gen_b.send(None) 341 self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED) 342 self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a') 343 344 gen_b.send(None) 345 self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED) 346 self.assertIsNone(gen_b.gi_yieldfrom) 347 348 [] = gen_b # Exhaust generator 349 self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED) 350 self.assertIsNone(gen_b.gi_yieldfrom) 351 352 353 tutorial_tests = """ 354 Let's try a simple generator: 355 356 >>> def f(): 357 ... yield 1 358 ... yield 2 359 360 >>> for i in f(): 361 ... print(i) 362 1 363 2 364 >>> g = f() 365 >>> next(g) 366 1 367 >>> next(g) 368 2 369 370 "Falling off the end" stops the generator: 371 372 >>> next(g) 373 Traceback (most recent call last): 374 File "<stdin>", line 1, in ? 375 File "<stdin>", line 2, in g 376 StopIteration 377 378 "return" also stops the generator: 379 380 >>> def f(): 381 ... yield 1 382 ... return 383 ... yield 2 # never reached 384 ... 385 >>> g = f() 386 >>> next(g) 387 1 388 >>> next(g) 389 Traceback (most recent call last): 390 File "<stdin>", line 1, in ? 391 File "<stdin>", line 3, in f 392 StopIteration 393 >>> next(g) # once stopped, can't be resumed 394 Traceback (most recent call last): 395 File "<stdin>", line 1, in ? 396 StopIteration 397 398 However, "return" and StopIteration are not exactly equivalent: 399 400 >>> def g1(): 401 ... try: 402 ... return 403 ... except: 404 ... yield 1 405 ... 406 >>> list(g1()) 407 [] 408 409 >>> def g2(): 410 ... try: 411 ... raise StopIteration 412 ... except: 413 ... yield 42 414 >>> print(list(g2())) 415 [42] 416 417 This may be surprising at first: 418 419 >>> def g3(): 420 ... try: 421 ... return 422 ... finally: 423 ... yield 1 424 ... 425 >>> list(g3()) 426 [1] 427 428 Let's create an alternate range() function implemented as a generator: 429 430 >>> def yrange(n): 431 ... for i in range(n): 432 ... yield i 433 ... 434 >>> list(yrange(5)) 435 [0, 1, 2, 3, 4] 436 437 Generators always return to the most recent caller: 438 439 >>> def creator(): 440 ... r = yrange(5) 441 ... print("creator", next(r)) 442 ... return r 443 ... 444 >>> def caller(): 445 ... r = creator() 446 ... for i in r: 447 ... print("caller", i) 448 ... 449 >>> caller() 450 creator 0 451 caller 1 452 caller 2 453 caller 3 454 caller 4 455 456 Generators can call other generators: 457 458 >>> def zrange(n): 459 ... for i in yrange(n): 460 ... yield i 461 ... 462 >>> list(zrange(5)) 463 [0, 1, 2, 3, 4] 464 465 """ 466 467 # The examples from PEP 255. 468 469 pep_tests = """ 470 471 Specification: Yield 472 473 Restriction: A generator cannot be resumed while it is actively 474 running: 475 476 >>> def g(): 477 ... i = next(me) 478 ... yield i 479 >>> me = g() 480 >>> next(me) 481 Traceback (most recent call last): 482 ... 483 File "<string>", line 2, in g 484 ValueError: generator already executing 485 486 Specification: Return 487 488 Note that return isn't always equivalent to raising StopIteration: the 489 difference lies in how enclosing try/except constructs are treated. 490 For example, 491 492 >>> def f1(): 493 ... try: 494 ... return 495 ... except: 496 ... yield 1 497 >>> print(list(f1())) 498 [] 499 500 because, as in any function, return simply exits, but 501 502 >>> def f2(): 503 ... try: 504 ... raise StopIteration 505 ... except: 506 ... yield 42 507 >>> print(list(f2())) 508 [42] 509 510 because StopIteration is captured by a bare "except", as is any 511 exception. 512 513 Specification: Generators and Exception Propagation 514 515 >>> def f(): 516 ... return 1//0 517 >>> def g(): 518 ... yield f() # the zero division exception propagates 519 ... yield 42 # and we'll never get here 520 >>> k = g() 521 >>> next(k) 522 Traceback (most recent call last): 523 File "<stdin>", line 1, in ? 524 File "<stdin>", line 2, in g 525 File "<stdin>", line 2, in f 526 ZeroDivisionError: integer division or modulo by zero 527 >>> next(k) # and the generator cannot be resumed 528 Traceback (most recent call last): 529 File "<stdin>", line 1, in ? 530 StopIteration 531 >>> 532 533 Specification: Try/Except/Finally 534 535 >>> def f(): 536 ... try: 537 ... yield 1 538 ... try: 539 ... yield 2 540 ... 1//0 541 ... yield 3 # never get here 542 ... except ZeroDivisionError: 543 ... yield 4 544 ... yield 5 545 ... raise 546 ... except: 547 ... yield 6 548 ... yield 7 # the "raise" above stops this 549 ... except: 550 ... yield 8 551 ... yield 9 552 ... try: 553 ... x = 12 554 ... finally: 555 ... yield 10 556 ... yield 11 557 >>> print(list(f())) 558 [1, 2, 4, 5, 8, 9, 10, 11] 559 >>> 560 561 Guido's binary tree example. 562 563 >>> # A binary tree class. 564 >>> class Tree: 565 ... 566 ... def __init__(self, label, left=None, right=None): 567 ... self.label = label 568 ... self.left = left 569 ... self.right = right 570 ... 571 ... def __repr__(self, level=0, indent=" "): 572 ... s = level*indent + repr(self.label) 573 ... if self.left: 574 ... s = s + "\\n" + self.left.__repr__(level+1, indent) 575 ... if self.right: 576 ... s = s + "\\n" + self.right.__repr__(level+1, indent) 577 ... return s 578 ... 579 ... def __iter__(self): 580 ... return inorder(self) 581 582 >>> # Create a Tree from a list. 583 >>> def tree(list): 584 ... n = len(list) 585 ... if n == 0: 586 ... return [] 587 ... i = n // 2 588 ... return Tree(list[i], tree(list[:i]), tree(list[i+1:])) 589 590 >>> # Show it off: create a tree. 591 >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ") 592 593 >>> # A recursive generator that generates Tree labels in in-order. 594 >>> def inorder(t): 595 ... if t: 596 ... for x in inorder(t.left): 597 ... yield x 598 ... yield t.label 599 ... for x in inorder(t.right): 600 ... yield x 601 602 >>> # Show it off: create a tree. 603 >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ") 604 >>> # Print the nodes of the tree in in-order. 605 >>> for x in t: 606 ... print(' '+x, end='') 607 A B C D E F G H I J K L M N O P Q R S T U V W X Y Z 608 609 >>> # A non-recursive generator. 610 >>> def inorder(node): 611 ... stack = [] 612 ... while node: 613 ... while node.left: 614 ... stack.append(node) 615 ... node = node.left 616 ... yield node.label 617 ... while not node.right: 618 ... try: 619 ... node = stack.pop() 620 ... except IndexError: 621 ... return 622 ... yield node.label 623 ... node = node.right 624 625 >>> # Exercise the non-recursive generator. 626 >>> for x in t: 627 ... print(' '+x, end='') 628 A B C D E F G H I J K L M N O P Q R S T U V W X Y Z 629 630 """ 631 632 # Examples from Iterator-List and Python-Dev and c.l.py. 633 634 email_tests = """ 635 636 The difference between yielding None and returning it. 637 638 >>> def g(): 639 ... for i in range(3): 640 ... yield None 641 ... yield None 642 ... return 643 >>> list(g()) 644 [None, None, None, None] 645 646 Ensure that explicitly raising StopIteration acts like any other exception 647 in try/except, not like a return. 648 649 >>> def g(): 650 ... yield 1 651 ... try: 652 ... raise StopIteration 653 ... except: 654 ... yield 2 655 ... yield 3 656 >>> list(g()) 657 [1, 2, 3] 658 659 Next one was posted to c.l.py. 660 661 >>> def gcomb(x, k): 662 ... "Generate all combinations of k elements from list x." 663 ... 664 ... if k > len(x): 665 ... return 666 ... if k == 0: 667 ... yield [] 668 ... else: 669 ... first, rest = x[0], x[1:] 670 ... # A combination does or doesn't contain first. 671 ... # If it does, the remainder is a k-1 comb of rest. 672 ... for c in gcomb(rest, k-1): 673 ... c.insert(0, first) 674 ... yield c 675 ... # If it doesn't contain first, it's a k comb of rest. 676 ... for c in gcomb(rest, k): 677 ... yield c 678 679 >>> seq = list(range(1, 5)) 680 >>> for k in range(len(seq) + 2): 681 ... print("%d-combs of %s:" % (k, seq)) 682 ... for c in gcomb(seq, k): 683 ... print(" ", c) 684 0-combs of [1, 2, 3, 4]: 685 [] 686 1-combs of [1, 2, 3, 4]: 687 [1] 688 [2] 689 [3] 690 [4] 691 2-combs of [1, 2, 3, 4]: 692 [1, 2] 693 [1, 3] 694 [1, 4] 695 [2, 3] 696 [2, 4] 697 [3, 4] 698 3-combs of [1, 2, 3, 4]: 699 [1, 2, 3] 700 [1, 2, 4] 701 [1, 3, 4] 702 [2, 3, 4] 703 4-combs of [1, 2, 3, 4]: 704 [1, 2, 3, 4] 705 5-combs of [1, 2, 3, 4]: 706 707 From the Iterators list, about the types of these things. 708 709 >>> def g(): 710 ... yield 1 711 ... 712 >>> type(g) 713 <class 'function'> 714 >>> i = g() 715 >>> type(i) 716 <class 'generator'> 717 >>> [s for s in dir(i) if not s.startswith('_')] 718 ['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom', 'send', 'throw'] 719 >>> from test.support import HAVE_DOCSTRINGS 720 >>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).') 721 Implement next(self). 722 >>> iter(i) is i 723 True 724 >>> import types 725 >>> isinstance(i, types.GeneratorType) 726 True 727 728 And more, added later. 729 730 >>> i.gi_running 731 0 732 >>> type(i.gi_frame) 733 <class 'frame'> 734 >>> i.gi_running = 42 735 Traceback (most recent call last): 736 ... 737 AttributeError: readonly attribute 738 >>> def g(): 739 ... yield me.gi_running 740 >>> me = g() 741 >>> me.gi_running 742 0 743 >>> next(me) 744 1 745 >>> me.gi_running 746 0 747 748 A clever union-find implementation from c.l.py, due to David Eppstein. 749 Sent: Friday, June 29, 2001 12:16 PM 750 To: python-list (at] python.org 751 Subject: Re: PEP 255: Simple Generators 752 753 >>> class disjointSet: 754 ... def __init__(self, name): 755 ... self.name = name 756 ... self.parent = None 757 ... self.generator = self.generate() 758 ... 759 ... def generate(self): 760 ... while not self.parent: 761 ... yield self 762 ... for x in self.parent.generator: 763 ... yield x 764 ... 765 ... def find(self): 766 ... return next(self.generator) 767 ... 768 ... def union(self, parent): 769 ... if self.parent: 770 ... raise ValueError("Sorry, I'm not a root!") 771 ... self.parent = parent 772 ... 773 ... def __str__(self): 774 ... return self.name 775 776 >>> names = "ABCDEFGHIJKLM" 777 >>> sets = [disjointSet(name) for name in names] 778 >>> roots = sets[:] 779 780 >>> import random 781 >>> gen = random.Random(42) 782 >>> while 1: 783 ... for s in sets: 784 ... print(" %s->%s" % (s, s.find()), end='') 785 ... print() 786 ... if len(roots) > 1: 787 ... s1 = gen.choice(roots) 788 ... roots.remove(s1) 789 ... s2 = gen.choice(roots) 790 ... s1.union(s2) 791 ... print("merged", s1, "into", s2) 792 ... else: 793 ... break 794 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M 795 merged K into B 796 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M 797 merged A into F 798 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M 799 merged E into F 800 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M 801 merged D into C 802 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M 803 merged M into C 804 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C 805 merged J into B 806 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C 807 merged B into C 808 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C 809 merged F into G 810 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C 811 merged L into C 812 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C 813 merged G into I 814 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C 815 merged I into H 816 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C 817 merged C into H 818 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H 819 820 """ 821 # Emacs turd ' 822 823 # Fun tests (for sufficiently warped notions of "fun"). 824 825 fun_tests = """ 826 827 Build up to a recursive Sieve of Eratosthenes generator. 828 829 >>> def firstn(g, n): 830 ... return [next(g) for i in range(n)] 831 832 >>> def intsfrom(i): 833 ... while 1: 834 ... yield i 835 ... i += 1 836 837 >>> firstn(intsfrom(5), 7) 838 [5, 6, 7, 8, 9, 10, 11] 839 840 >>> def exclude_multiples(n, ints): 841 ... for i in ints: 842 ... if i % n: 843 ... yield i 844 845 >>> firstn(exclude_multiples(3, intsfrom(1)), 6) 846 [1, 2, 4, 5, 7, 8] 847 848 >>> def sieve(ints): 849 ... prime = next(ints) 850 ... yield prime 851 ... not_divisible_by_prime = exclude_multiples(prime, ints) 852 ... for p in sieve(not_divisible_by_prime): 853 ... yield p 854 855 >>> primes = sieve(intsfrom(2)) 856 >>> firstn(primes, 20) 857 [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71] 858 859 860 Another famous problem: generate all integers of the form 861 2**i * 3**j * 5**k 862 in increasing order, where i,j,k >= 0. Trickier than it may look at first! 863 Try writing it without generators, and correctly, and without generating 864 3 internal results for each result output. 865 866 >>> def times(n, g): 867 ... for i in g: 868 ... yield n * i 869 >>> firstn(times(10, intsfrom(1)), 10) 870 [10, 20, 30, 40, 50, 60, 70, 80, 90, 100] 871 872 >>> def merge(g, h): 873 ... ng = next(g) 874 ... nh = next(h) 875 ... while 1: 876 ... if ng < nh: 877 ... yield ng 878 ... ng = next(g) 879 ... elif ng > nh: 880 ... yield nh 881 ... nh = next(h) 882 ... else: 883 ... yield ng 884 ... ng = next(g) 885 ... nh = next(h) 886 887 The following works, but is doing a whale of a lot of redundant work -- 888 it's not clear how to get the internal uses of m235 to share a single 889 generator. Note that me_times2 (etc) each need to see every element in the 890 result sequence. So this is an example where lazy lists are more natural 891 (you can look at the head of a lazy list any number of times). 892 893 >>> def m235(): 894 ... yield 1 895 ... me_times2 = times(2, m235()) 896 ... me_times3 = times(3, m235()) 897 ... me_times5 = times(5, m235()) 898 ... for i in merge(merge(me_times2, 899 ... me_times3), 900 ... me_times5): 901 ... yield i 902 903 Don't print "too many" of these -- the implementation above is extremely 904 inefficient: each call of m235() leads to 3 recursive calls, and in 905 turn each of those 3 more, and so on, and so on, until we've descended 906 enough levels to satisfy the print stmts. Very odd: when I printed 5 907 lines of results below, this managed to screw up Win98's malloc in "the 908 usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting 909 address space, and it *looked* like a very slow leak. 910 911 >>> result = m235() 912 >>> for i in range(3): 913 ... print(firstn(result, 15)) 914 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] 915 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] 916 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] 917 918 Heh. Here's one way to get a shared list, complete with an excruciating 919 namespace renaming trick. The *pretty* part is that the times() and merge() 920 functions can be reused as-is, because they only assume their stream 921 arguments are iterable -- a LazyList is the same as a generator to times(). 922 923 >>> class LazyList: 924 ... def __init__(self, g): 925 ... self.sofar = [] 926 ... self.fetch = g.__next__ 927 ... 928 ... def __getitem__(self, i): 929 ... sofar, fetch = self.sofar, self.fetch 930 ... while i >= len(sofar): 931 ... sofar.append(fetch()) 932 ... return sofar[i] 933 934 >>> def m235(): 935 ... yield 1 936 ... # Gack: m235 below actually refers to a LazyList. 937 ... me_times2 = times(2, m235) 938 ... me_times3 = times(3, m235) 939 ... me_times5 = times(5, m235) 940 ... for i in merge(merge(me_times2, 941 ... me_times3), 942 ... me_times5): 943 ... yield i 944 945 Print as many of these as you like -- *this* implementation is memory- 946 efficient. 947 948 >>> m235 = LazyList(m235()) 949 >>> for i in range(5): 950 ... print([m235[j] for j in range(15*i, 15*(i+1))]) 951 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] 952 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] 953 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] 954 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384] 955 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675] 956 957 Ye olde Fibonacci generator, LazyList style. 958 959 >>> def fibgen(a, b): 960 ... 961 ... def sum(g, h): 962 ... while 1: 963 ... yield next(g) + next(h) 964 ... 965 ... def tail(g): 966 ... next(g) # throw first away 967 ... for x in g: 968 ... yield x 969 ... 970 ... yield a 971 ... yield b 972 ... for s in sum(iter(fib), 973 ... tail(iter(fib))): 974 ... yield s 975 976 >>> fib = LazyList(fibgen(1, 2)) 977 >>> firstn(iter(fib), 17) 978 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584] 979 980 981 Running after your tail with itertools.tee (new in version 2.4) 982 983 The algorithms "m235" (Hamming) and Fibonacci presented above are both 984 examples of a whole family of FP (functional programming) algorithms 985 where a function produces and returns a list while the production algorithm 986 suppose the list as already produced by recursively calling itself. 987 For these algorithms to work, they must: 988 989 - produce at least a first element without presupposing the existence of 990 the rest of the list 991 - produce their elements in a lazy manner 992 993 To work efficiently, the beginning of the list must not be recomputed over 994 and over again. This is ensured in most FP languages as a built-in feature. 995 In python, we have to explicitly maintain a list of already computed results 996 and abandon genuine recursivity. 997 998 This is what had been attempted above with the LazyList class. One problem 999 with that class is that it keeps a list of all of the generated results and 1000 therefore continually grows. This partially defeats the goal of the generator 1001 concept, viz. produce the results only as needed instead of producing them 1002 all and thereby wasting memory. 1003 1004 Thanks to itertools.tee, it is now clear "how to get the internal uses of 1005 m235 to share a single generator". 1006 1007 >>> from itertools import tee 1008 >>> def m235(): 1009 ... def _m235(): 1010 ... yield 1 1011 ... for n in merge(times(2, m2), 1012 ... merge(times(3, m3), 1013 ... times(5, m5))): 1014 ... yield n 1015 ... m1 = _m235() 1016 ... m2, m3, m5, mRes = tee(m1, 4) 1017 ... return mRes 1018 1019 >>> it = m235() 1020 >>> for i in range(5): 1021 ... print(firstn(it, 15)) 1022 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] 1023 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] 1024 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] 1025 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384] 1026 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675] 1027 1028 The "tee" function does just what we want. It internally keeps a generated 1029 result for as long as it has not been "consumed" from all of the duplicated 1030 iterators, whereupon it is deleted. You can therefore print the hamming 1031 sequence during hours without increasing memory usage, or very little. 1032 1033 The beauty of it is that recursive running-after-their-tail FP algorithms 1034 are quite straightforwardly expressed with this Python idiom. 1035 1036 Ye olde Fibonacci generator, tee style. 1037 1038 >>> def fib(): 1039 ... 1040 ... def _isum(g, h): 1041 ... while 1: 1042 ... yield next(g) + next(h) 1043 ... 1044 ... def _fib(): 1045 ... yield 1 1046 ... yield 2 1047 ... next(fibTail) # throw first away 1048 ... for res in _isum(fibHead, fibTail): 1049 ... yield res 1050 ... 1051 ... realfib = _fib() 1052 ... fibHead, fibTail, fibRes = tee(realfib, 3) 1053 ... return fibRes 1054 1055 >>> firstn(fib(), 17) 1056 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584] 1057 1058 """ 1059 1060 # syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0 1061 # hackery. 1062 1063 syntax_tests = """ 1064 1065 These are fine: 1066 1067 >>> def f(): 1068 ... yield 1 1069 ... return 1070 1071 >>> def f(): 1072 ... try: 1073 ... yield 1 1074 ... finally: 1075 ... pass 1076 1077 >>> def f(): 1078 ... try: 1079 ... try: 1080 ... 1//0 1081 ... except ZeroDivisionError: 1082 ... yield 666 1083 ... except: 1084 ... pass 1085 ... finally: 1086 ... pass 1087 1088 >>> def f(): 1089 ... try: 1090 ... try: 1091 ... yield 12 1092 ... 1//0 1093 ... except ZeroDivisionError: 1094 ... yield 666 1095 ... except: 1096 ... try: 1097 ... x = 12 1098 ... finally: 1099 ... yield 12 1100 ... except: 1101 ... return 1102 >>> list(f()) 1103 [12, 666] 1104 1105 >>> def f(): 1106 ... yield 1107 >>> type(f()) 1108 <class 'generator'> 1109 1110 1111 >>> def f(): 1112 ... if 0: 1113 ... yield 1114 >>> type(f()) 1115 <class 'generator'> 1116 1117 1118 >>> def f(): 1119 ... if 0: 1120 ... yield 1 1121 >>> type(f()) 1122 <class 'generator'> 1123 1124 >>> def f(): 1125 ... if "": 1126 ... yield None 1127 >>> type(f()) 1128 <class 'generator'> 1129 1130 >>> def f(): 1131 ... return 1132 ... try: 1133 ... if x==4: 1134 ... pass 1135 ... elif 0: 1136 ... try: 1137 ... 1//0 1138 ... except SyntaxError: 1139 ... pass 1140 ... else: 1141 ... if 0: 1142 ... while 12: 1143 ... x += 1 1144 ... yield 2 # don't blink 1145 ... f(a, b, c, d, e) 1146 ... else: 1147 ... pass 1148 ... except: 1149 ... x = 1 1150 ... return 1151 >>> type(f()) 1152 <class 'generator'> 1153 1154 >>> def f(): 1155 ... if 0: 1156 ... def g(): 1157 ... yield 1 1158 ... 1159 >>> type(f()) 1160 <class 'NoneType'> 1161 1162 >>> def f(): 1163 ... if 0: 1164 ... class C: 1165 ... def __init__(self): 1166 ... yield 1 1167 ... def f(self): 1168 ... yield 2 1169 >>> type(f()) 1170 <class 'NoneType'> 1171 1172 >>> def f(): 1173 ... if 0: 1174 ... return 1175 ... if 0: 1176 ... yield 2 1177 >>> type(f()) 1178 <class 'generator'> 1179 1180 This one caused a crash (see SF bug 567538): 1181 1182 >>> def f(): 1183 ... for i in range(3): 1184 ... try: 1185 ... continue 1186 ... finally: 1187 ... yield i 1188 ... 1189 >>> g = f() 1190 >>> print(next(g)) 1191 0 1192 >>> print(next(g)) 1193 1 1194 >>> print(next(g)) 1195 2 1196 >>> print(next(g)) 1197 Traceback (most recent call last): 1198 StopIteration 1199 1200 1201 Test the gi_code attribute 1202 1203 >>> def f(): 1204 ... yield 5 1205 ... 1206 >>> g = f() 1207 >>> g.gi_code is f.__code__ 1208 True 1209 >>> next(g) 1210 5 1211 >>> next(g) 1212 Traceback (most recent call last): 1213 StopIteration 1214 >>> g.gi_code is f.__code__ 1215 True 1216 1217 1218 Test the __name__ attribute and the repr() 1219 1220 >>> def f(): 1221 ... yield 5 1222 ... 1223 >>> g = f() 1224 >>> g.__name__ 1225 'f' 1226 >>> repr(g) # doctest: +ELLIPSIS 1227 '<generator object f at ...>' 1228 1229 Lambdas shouldn't have their usual return behavior. 1230 1231 >>> x = lambda: (yield 1) 1232 >>> list(x()) 1233 [1] 1234 1235 >>> x = lambda: ((yield 1), (yield 2)) 1236 >>> list(x()) 1237 [1, 2] 1238 """ 1239 1240 # conjoin is a simple backtracking generator, named in honor of Icon's 1241 # "conjunction" control structure. Pass a list of no-argument functions 1242 # that return iterable objects. Easiest to explain by example: assume the 1243 # function list [x, y, z] is passed. Then conjoin acts like: 1244 # 1245 # def g(): 1246 # values = [None] * 3 1247 # for values[0] in x(): 1248 # for values[1] in y(): 1249 # for values[2] in z(): 1250 # yield values 1251 # 1252 # So some 3-lists of values *may* be generated, each time we successfully 1253 # get into the innermost loop. If an iterator fails (is exhausted) before 1254 # then, it "backtracks" to get the next value from the nearest enclosing 1255 # iterator (the one "to the left"), and starts all over again at the next 1256 # slot (pumps a fresh iterator). Of course this is most useful when the 1257 # iterators have side-effects, so that which values *can* be generated at 1258 # each slot depend on the values iterated at previous slots. 1259 1260 def simple_conjoin(gs): 1261 1262 values = [None] * len(gs) 1263 1264 def gen(i): 1265 if i >= len(gs): 1266 yield values 1267 else: 1268 for values[i] in gs[i](): 1269 for x in gen(i+1): 1270 yield x 1271 1272 for x in gen(0): 1273 yield x 1274 1275 # That works fine, but recursing a level and checking i against len(gs) for 1276 # each item produced is inefficient. By doing manual loop unrolling across 1277 # generator boundaries, it's possible to eliminate most of that overhead. 1278 # This isn't worth the bother *in general* for generators, but conjoin() is 1279 # a core building block for some CPU-intensive generator applications. 1280 1281 def conjoin(gs): 1282 1283 n = len(gs) 1284 values = [None] * n 1285 1286 # Do one loop nest at time recursively, until the # of loop nests 1287 # remaining is divisible by 3. 1288 1289 def gen(i): 1290 if i >= n: 1291 yield values 1292 1293 elif (n-i) % 3: 1294 ip1 = i+1 1295 for values[i] in gs[i](): 1296 for x in gen(ip1): 1297 yield x 1298 1299 else: 1300 for x in _gen3(i): 1301 yield x 1302 1303 # Do three loop nests at a time, recursing only if at least three more 1304 # remain. Don't call directly: this is an internal optimization for 1305 # gen's use. 1306 1307 def _gen3(i): 1308 assert i < n and (n-i) % 3 == 0 1309 ip1, ip2, ip3 = i+1, i+2, i+3 1310 g, g1, g2 = gs[i : ip3] 1311 1312 if ip3 >= n: 1313 # These are the last three, so we can yield values directly. 1314 for values[i] in g(): 1315 for values[ip1] in g1(): 1316 for values[ip2] in g2(): 1317 yield values 1318 1319 else: 1320 # At least 6 loop nests remain; peel off 3 and recurse for the 1321 # rest. 1322 for values[i] in g(): 1323 for values[ip1] in g1(): 1324 for values[ip2] in g2(): 1325 for x in _gen3(ip3): 1326 yield x 1327 1328 for x in gen(0): 1329 yield x 1330 1331 # And one more approach: For backtracking apps like the Knight's Tour 1332 # solver below, the number of backtracking levels can be enormous (one 1333 # level per square, for the Knight's Tour, so that e.g. a 100x100 board 1334 # needs 10,000 levels). In such cases Python is likely to run out of 1335 # stack space due to recursion. So here's a recursion-free version of 1336 # conjoin too. 1337 # NOTE WELL: This allows large problems to be solved with only trivial 1338 # demands on stack space. Without explicitly resumable generators, this is 1339 # much harder to achieve. OTOH, this is much slower (up to a factor of 2) 1340 # than the fancy unrolled recursive conjoin. 1341 1342 def flat_conjoin(gs): # rename to conjoin to run tests with this instead 1343 n = len(gs) 1344 values = [None] * n 1345 iters = [None] * n 1346 _StopIteration = StopIteration # make local because caught a *lot* 1347 i = 0 1348 while 1: 1349 # Descend. 1350 try: 1351 while i < n: 1352 it = iters[i] = gs[i]().__next__ 1353 values[i] = it() 1354 i += 1 1355 except _StopIteration: 1356 pass 1357 else: 1358 assert i == n 1359 yield values 1360 1361 # Backtrack until an older iterator can be resumed. 1362 i -= 1 1363 while i >= 0: 1364 try: 1365 values[i] = iters[i]() 1366 # Success! Start fresh at next level. 1367 i += 1 1368 break 1369 except _StopIteration: 1370 # Continue backtracking. 1371 i -= 1 1372 else: 1373 assert i < 0 1374 break 1375 1376 # A conjoin-based N-Queens solver. 1377 1378 class Queens: 1379 def __init__(self, n): 1380 self.n = n 1381 rangen = range(n) 1382 1383 # Assign a unique int to each column and diagonal. 1384 # columns: n of those, range(n). 1385 # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along 1386 # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0- 1387 # based. 1388 # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along 1389 # each, smallest i+j is 0, largest is 2n-2. 1390 1391 # For each square, compute a bit vector of the columns and 1392 # diagonals it covers, and for each row compute a function that 1393 # generates the possibilities for the columns in that row. 1394 self.rowgenerators = [] 1395 for i in rangen: 1396 rowuses = [(1 << j) | # column ordinal 1397 (1 << (n + i-j + n-1)) | # NW-SE ordinal 1398 (1 << (n + 2*n-1 + i+j)) # NE-SW ordinal 1399 for j in rangen] 1400 1401 def rowgen(rowuses=rowuses): 1402 for j in rangen: 1403 uses = rowuses[j] 1404 if uses & self.used == 0: 1405 self.used |= uses 1406 yield j 1407 self.used &= ~uses 1408 1409 self.rowgenerators.append(rowgen) 1410 1411 # Generate solutions. 1412 def solve(self): 1413 self.used = 0 1414 for row2col in conjoin(self.rowgenerators): 1415 yield row2col 1416 1417 def printsolution(self, row2col): 1418 n = self.n 1419 assert n == len(row2col) 1420 sep = "+" + "-+" * n 1421 print(sep) 1422 for i in range(n): 1423 squares = [" " for j in range(n)] 1424 squares[row2col[i]] = "Q" 1425 print("|" + "|".join(squares) + "|") 1426 print(sep) 1427 1428 # A conjoin-based Knight's Tour solver. This is pretty sophisticated 1429 # (e.g., when used with flat_conjoin above, and passing hard=1 to the 1430 # constructor, a 200x200 Knight's Tour was found quickly -- note that we're 1431 # creating 10s of thousands of generators then!), and is lengthy. 1432 1433 class Knights: 1434 def __init__(self, m, n, hard=0): 1435 self.m, self.n = m, n 1436 1437 # solve() will set up succs[i] to be a list of square #i's 1438 # successors. 1439 succs = self.succs = [] 1440 1441 # Remove i0 from each of its successor's successor lists, i.e. 1442 # successors can't go back to i0 again. Return 0 if we can 1443 # detect this makes a solution impossible, else return 1. 1444 1445 def remove_from_successors(i0, len=len): 1446 # If we remove all exits from a free square, we're dead: 1447 # even if we move to it next, we can't leave it again. 1448 # If we create a square with one exit, we must visit it next; 1449 # else somebody else will have to visit it, and since there's 1450 # only one adjacent, there won't be a way to leave it again. 1451 # Finally, if we create more than one free square with a 1452 # single exit, we can only move to one of them next, leaving 1453 # the other one a dead end. 1454 ne0 = ne1 = 0 1455 for i in succs[i0]: 1456 s = succs[i] 1457 s.remove(i0) 1458 e = len(s) 1459 if e == 0: 1460 ne0 += 1 1461 elif e == 1: 1462 ne1 += 1 1463 return ne0 == 0 and ne1 < 2 1464 1465 # Put i0 back in each of its successor's successor lists. 1466 1467 def add_to_successors(i0): 1468 for i in succs[i0]: 1469 succs[i].append(i0) 1470 1471 # Generate the first move. 1472 def first(): 1473 if m < 1 or n < 1: 1474 return 1475 1476 # Since we're looking for a cycle, it doesn't matter where we 1477 # start. Starting in a corner makes the 2nd move easy. 1478 corner = self.coords2index(0, 0) 1479 remove_from_successors(corner) 1480 self.lastij = corner 1481 yield corner 1482 add_to_successors(corner) 1483 1484 # Generate the second moves. 1485 def second(): 1486 corner = self.coords2index(0, 0) 1487 assert self.lastij == corner # i.e., we started in the corner 1488 if m < 3 or n < 3: 1489 return 1490 assert len(succs[corner]) == 2 1491 assert self.coords2index(1, 2) in succs[corner] 1492 assert self.coords2index(2, 1) in succs[corner] 1493 # Only two choices. Whichever we pick, the other must be the 1494 # square picked on move m*n, as it's the only way to get back 1495 # to (0, 0). Save its index in self.final so that moves before 1496 # the last know it must be kept free. 1497 for i, j in (1, 2), (2, 1): 1498 this = self.coords2index(i, j) 1499 final = self.coords2index(3-i, 3-j) 1500 self.final = final 1501 1502 remove_from_successors(this) 1503 succs[final].append(corner) 1504 self.lastij = this 1505 yield this 1506 succs[final].remove(corner) 1507 add_to_successors(this) 1508 1509 # Generate moves 3 through m*n-1. 1510 def advance(len=len): 1511 # If some successor has only one exit, must take it. 1512 # Else favor successors with fewer exits. 1513 candidates = [] 1514 for i in succs[self.lastij]: 1515 e = len(succs[i]) 1516 assert e > 0, "else remove_from_successors() pruning flawed" 1517 if e == 1: 1518 candidates = [(e, i)] 1519 break 1520 candidates.append((e, i)) 1521 else: 1522 candidates.sort() 1523 1524 for e, i in candidates: 1525 if i != self.final: 1526 if remove_from_successors(i): 1527 self.lastij = i 1528 yield i 1529 add_to_successors(i) 1530 1531 # Generate moves 3 through m*n-1. Alternative version using a 1532 # stronger (but more expensive) heuristic to order successors. 1533 # Since the # of backtracking levels is m*n, a poor move early on 1534 # can take eons to undo. Smallest square board for which this 1535 # matters a lot is 52x52. 1536 def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len): 1537 # If some successor has only one exit, must take it. 1538 # Else favor successors with fewer exits. 1539 # Break ties via max distance from board centerpoint (favor 1540 # corners and edges whenever possible). 1541 candidates = [] 1542 for i in succs[self.lastij]: 1543 e = len(succs[i]) 1544 assert e > 0, "else remove_from_successors() pruning flawed" 1545 if e == 1: 1546 candidates = [(e, 0, i)] 1547 break 1548 i1, j1 = self.index2coords(i) 1549 d = (i1 - vmid)**2 + (j1 - hmid)**2 1550 candidates.append((e, -d, i)) 1551 else: 1552 candidates.sort() 1553 1554 for e, d, i in candidates: 1555 if i != self.final: 1556 if remove_from_successors(i): 1557 self.lastij = i 1558 yield i 1559 add_to_successors(i) 1560 1561 # Generate the last move. 1562 def last(): 1563 assert self.final in succs[self.lastij] 1564 yield self.final 1565 1566 if m*n < 4: 1567 self.squaregenerators = [first] 1568 else: 1569 self.squaregenerators = [first, second] + \ 1570 [hard and advance_hard or advance] * (m*n - 3) + \ 1571 [last] 1572 1573 def coords2index(self, i, j): 1574 assert 0 <= i < self.m 1575 assert 0 <= j < self.n 1576 return i * self.n + j 1577 1578 def index2coords(self, index): 1579 assert 0 <= index < self.m * self.n 1580 return divmod(index, self.n) 1581 1582 def _init_board(self): 1583 succs = self.succs 1584 del succs[:] 1585 m, n = self.m, self.n 1586 c2i = self.coords2index 1587 1588 offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2), 1589 (-1, -2), (-2, -1), (-2, 1), (-1, 2)] 1590 rangen = range(n) 1591 for i in range(m): 1592 for j in rangen: 1593 s = [c2i(i+io, j+jo) for io, jo in offsets 1594 if 0 <= i+io < m and 1595 0 <= j+jo < n] 1596 succs.append(s) 1597 1598 # Generate solutions. 1599 def solve(self): 1600 self._init_board() 1601 for x in conjoin(self.squaregenerators): 1602 yield x 1603 1604 def printsolution(self, x): 1605 m, n = self.m, self.n 1606 assert len(x) == m*n 1607 w = len(str(m*n)) 1608 format = "%" + str(w) + "d" 1609 1610 squares = [[None] * n for i in range(m)] 1611 k = 1 1612 for i in x: 1613 i1, j1 = self.index2coords(i) 1614 squares[i1][j1] = format % k 1615 k += 1 1616 1617 sep = "+" + ("-" * w + "+") * n 1618 print(sep) 1619 for i in range(m): 1620 row = squares[i] 1621 print("|" + "|".join(row) + "|") 1622 print(sep) 1623 1624 conjoin_tests = """ 1625 1626 Generate the 3-bit binary numbers in order. This illustrates dumbest- 1627 possible use of conjoin, just to generate the full cross-product. 1628 1629 >>> for c in conjoin([lambda: iter((0, 1))] * 3): 1630 ... print(c) 1631 [0, 0, 0] 1632 [0, 0, 1] 1633 [0, 1, 0] 1634 [0, 1, 1] 1635 [1, 0, 0] 1636 [1, 0, 1] 1637 [1, 1, 0] 1638 [1, 1, 1] 1639 1640 For efficiency in typical backtracking apps, conjoin() yields the same list 1641 object each time. So if you want to save away a full account of its 1642 generated sequence, you need to copy its results. 1643 1644 >>> def gencopy(iterator): 1645 ... for x in iterator: 1646 ... yield x[:] 1647 1648 >>> for n in range(10): 1649 ... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n))) 1650 ... print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n) 1651 0 1 True True 1652 1 2 True True 1653 2 4 True True 1654 3 8 True True 1655 4 16 True True 1656 5 32 True True 1657 6 64 True True 1658 7 128 True True 1659 8 256 True True 1660 9 512 True True 1661 1662 And run an 8-queens solver. 1663 1664 >>> q = Queens(8) 1665 >>> LIMIT = 2 1666 >>> count = 0 1667 >>> for row2col in q.solve(): 1668 ... count += 1 1669 ... if count <= LIMIT: 1670 ... print("Solution", count) 1671 ... q.printsolution(row2col) 1672 Solution 1 1673 +-+-+-+-+-+-+-+-+ 1674 |Q| | | | | | | | 1675 +-+-+-+-+-+-+-+-+ 1676 | | | | |Q| | | | 1677 +-+-+-+-+-+-+-+-+ 1678 | | | | | | | |Q| 1679 +-+-+-+-+-+-+-+-+ 1680 | | | | | |Q| | | 1681 +-+-+-+-+-+-+-+-+ 1682 | | |Q| | | | | | 1683 +-+-+-+-+-+-+-+-+ 1684 | | | | | | |Q| | 1685 +-+-+-+-+-+-+-+-+ 1686 | |Q| | | | | | | 1687 +-+-+-+-+-+-+-+-+ 1688 | | | |Q| | | | | 1689 +-+-+-+-+-+-+-+-+ 1690 Solution 2 1691 +-+-+-+-+-+-+-+-+ 1692 |Q| | | | | | | | 1693 +-+-+-+-+-+-+-+-+ 1694 | | | | | |Q| | | 1695 +-+-+-+-+-+-+-+-+ 1696 | | | | | | | |Q| 1697 +-+-+-+-+-+-+-+-+ 1698 | | |Q| | | | | | 1699 +-+-+-+-+-+-+-+-+ 1700 | | | | | | |Q| | 1701 +-+-+-+-+-+-+-+-+ 1702 | | | |Q| | | | | 1703 +-+-+-+-+-+-+-+-+ 1704 | |Q| | | | | | | 1705 +-+-+-+-+-+-+-+-+ 1706 | | | | |Q| | | | 1707 +-+-+-+-+-+-+-+-+ 1708 1709 >>> print(count, "solutions in all.") 1710 92 solutions in all. 1711 1712 And run a Knight's Tour on a 10x10 board. Note that there are about 1713 20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion. 1714 1715 >>> k = Knights(10, 10) 1716 >>> LIMIT = 2 1717 >>> count = 0 1718 >>> for x in k.solve(): 1719 ... count += 1 1720 ... if count <= LIMIT: 1721 ... print("Solution", count) 1722 ... k.printsolution(x) 1723 ... else: 1724 ... break 1725 Solution 1 1726 +---+---+---+---+---+---+---+---+---+---+ 1727 | 1| 58| 27| 34| 3| 40| 29| 10| 5| 8| 1728 +---+---+---+---+---+---+---+---+---+---+ 1729 | 26| 35| 2| 57| 28| 33| 4| 7| 30| 11| 1730 +---+---+---+---+---+---+---+---+---+---+ 1731 | 59|100| 73| 36| 41| 56| 39| 32| 9| 6| 1732 +---+---+---+---+---+---+---+---+---+---+ 1733 | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31| 1734 +---+---+---+---+---+---+---+---+---+---+ 1735 | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50| 1736 +---+---+---+---+---+---+---+---+---+---+ 1737 | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13| 1738 +---+---+---+---+---+---+---+---+---+---+ 1739 | 87| 98| 91| 80| 77| 84| 53| 46| 65| 44| 1740 +---+---+---+---+---+---+---+---+---+---+ 1741 | 90| 23| 88| 95| 70| 79| 68| 83| 14| 17| 1742 +---+---+---+---+---+---+---+---+---+---+ 1743 | 97| 92| 21| 78| 81| 94| 19| 16| 45| 66| 1744 +---+---+---+---+---+---+---+---+---+---+ 1745 | 22| 89| 96| 93| 20| 69| 82| 67| 18| 15| 1746 +---+---+---+---+---+---+---+---+---+---+ 1747 Solution 2 1748 +---+---+---+---+---+---+---+---+---+---+ 1749 | 1| 58| 27| 34| 3| 40| 29| 10| 5| 8| 1750 +---+---+---+---+---+---+---+---+---+---+ 1751 | 26| 35| 2| 57| 28| 33| 4| 7| 30| 11| 1752 +---+---+---+---+---+---+---+---+---+---+ 1753 | 59|100| 73| 36| 41| 56| 39| 32| 9| 6| 1754 +---+---+---+---+---+---+---+---+---+---+ 1755 | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31| 1756 +---+---+---+---+---+---+---+---+---+---+ 1757 | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50| 1758 +---+---+---+---+---+---+---+---+---+---+ 1759 | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13| 1760 +---+---+---+---+---+---+---+---+---+---+ 1761 | 87| 98| 89| 80| 77| 84| 53| 46| 65| 44| 1762 +---+---+---+---+---+---+---+---+---+---+ 1763 | 90| 23| 92| 95| 70| 79| 68| 83| 14| 17| 1764 +---+---+---+---+---+---+---+---+---+---+ 1765 | 97| 88| 21| 78| 81| 94| 19| 16| 45| 66| 1766 +---+---+---+---+---+---+---+---+---+---+ 1767 | 22| 91| 96| 93| 20| 69| 82| 67| 18| 15| 1768 +---+---+---+---+---+---+---+---+---+---+ 1769 """ 1770 1771 weakref_tests = """\ 1772 Generators are weakly referencable: 1773 1774 >>> import weakref 1775 >>> def gen(): 1776 ... yield 'foo!' 1777 ... 1778 >>> wr = weakref.ref(gen) 1779 >>> wr() is gen 1780 True 1781 >>> p = weakref.proxy(gen) 1782 1783 Generator-iterators are weakly referencable as well: 1784 1785 >>> gi = gen() 1786 >>> wr = weakref.ref(gi) 1787 >>> wr() is gi 1788 True 1789 >>> p = weakref.proxy(gi) 1790 >>> list(p) 1791 ['foo!'] 1792 1793 """ 1794 1795 coroutine_tests = """\ 1796 Sending a value into a started generator: 1797 1798 >>> def f(): 1799 ... print((yield 1)) 1800 ... yield 2 1801 >>> g = f() 1802 >>> next(g) 1803 1 1804 >>> g.send(42) 1805 42 1806 2 1807 1808 Sending a value into a new generator produces a TypeError: 1809 1810 >>> f().send("foo") 1811 Traceback (most recent call last): 1812 ... 1813 TypeError: can't send non-None value to a just-started generator 1814 1815 1816 Yield by itself yields None: 1817 1818 >>> def f(): yield 1819 >>> list(f()) 1820 [None] 1821 1822 1823 Yield is allowed only in the outermost iterable in generator expression: 1824 1825 >>> def f(): list(i for i in [(yield 26)]) 1826 >>> type(f()) 1827 <class 'generator'> 1828 1829 1830 A yield expression with augmented assignment. 1831 1832 >>> def coroutine(seq): 1833 ... count = 0 1834 ... while count < 200: 1835 ... count += yield 1836 ... seq.append(count) 1837 >>> seq = [] 1838 >>> c = coroutine(seq) 1839 >>> next(c) 1840 >>> print(seq) 1841 [] 1842 >>> c.send(10) 1843 >>> print(seq) 1844 [10] 1845 >>> c.send(10) 1846 >>> print(seq) 1847 [10, 20] 1848 >>> c.send(10) 1849 >>> print(seq) 1850 [10, 20, 30] 1851 1852 1853 Check some syntax errors for yield expressions: 1854 1855 >>> f=lambda: (yield 1),(yield 2) 1856 Traceback (most recent call last): 1857 ... 1858 SyntaxError: 'yield' outside function 1859 1860 >>> def f(): x = yield = y 1861 Traceback (most recent call last): 1862 ... 1863 SyntaxError: assignment to yield expression not possible 1864 1865 >>> def f(): (yield bar) = y 1866 Traceback (most recent call last): 1867 ... 1868 SyntaxError: can't assign to yield expression 1869 1870 >>> def f(): (yield bar) += y 1871 Traceback (most recent call last): 1872 ... 1873 SyntaxError: can't assign to yield expression 1874 1875 1876 Now check some throw() conditions: 1877 1878 >>> def f(): 1879 ... while True: 1880 ... try: 1881 ... print((yield)) 1882 ... except ValueError as v: 1883 ... print("caught ValueError (%s)" % (v)) 1884 >>> import sys 1885 >>> g = f() 1886 >>> next(g) 1887 1888 >>> g.throw(ValueError) # type only 1889 caught ValueError () 1890 1891 >>> g.throw(ValueError("xyz")) # value only 1892 caught ValueError (xyz) 1893 1894 >>> g.throw(ValueError, ValueError(1)) # value+matching type 1895 caught ValueError (1) 1896 1897 >>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped 1898 caught ValueError (1) 1899 1900 >>> g.throw(ValueError, ValueError(1), None) # explicit None traceback 1901 caught ValueError (1) 1902 1903 >>> g.throw(ValueError(1), "foo") # bad args 1904 Traceback (most recent call last): 1905 ... 1906 TypeError: instance exception may not have a separate value 1907 1908 >>> g.throw(ValueError, "foo", 23) # bad args 1909 Traceback (most recent call last): 1910 ... 1911 TypeError: throw() third argument must be a traceback object 1912 1913 >>> g.throw("abc") 1914 Traceback (most recent call last): 1915 ... 1916 TypeError: exceptions must be classes or instances deriving from BaseException, not str 1917 1918 >>> g.throw(0) 1919 Traceback (most recent call last): 1920 ... 1921 TypeError: exceptions must be classes or instances deriving from BaseException, not int 1922 1923 >>> g.throw(list) 1924 Traceback (most recent call last): 1925 ... 1926 TypeError: exceptions must be classes or instances deriving from BaseException, not type 1927 1928 >>> def throw(g,exc): 1929 ... try: 1930 ... raise exc 1931 ... except: 1932 ... g.throw(*sys.exc_info()) 1933 >>> throw(g,ValueError) # do it with traceback included 1934 caught ValueError () 1935 1936 >>> g.send(1) 1937 1 1938 1939 >>> throw(g,TypeError) # terminate the generator 1940 Traceback (most recent call last): 1941 ... 1942 TypeError 1943 1944 >>> print(g.gi_frame) 1945 None 1946 1947 >>> g.send(2) 1948 Traceback (most recent call last): 1949 ... 1950 StopIteration 1951 1952 >>> g.throw(ValueError,6) # throw on closed generator 1953 Traceback (most recent call last): 1954 ... 1955 ValueError: 6 1956 1957 >>> f().throw(ValueError,7) # throw on just-opened generator 1958 Traceback (most recent call last): 1959 ... 1960 ValueError: 7 1961 1962 Plain "raise" inside a generator should preserve the traceback (#13188). 1963 The traceback should have 3 levels: 1964 - g.throw() 1965 - f() 1966 - 1/0 1967 1968 >>> def f(): 1969 ... try: 1970 ... yield 1971 ... except: 1972 ... raise 1973 >>> g = f() 1974 >>> try: 1975 ... 1/0 1976 ... except ZeroDivisionError as v: 1977 ... try: 1978 ... g.throw(v) 1979 ... except Exception as w: 1980 ... tb = w.__traceback__ 1981 >>> levels = 0 1982 >>> while tb: 1983 ... levels += 1 1984 ... tb = tb.tb_next 1985 >>> levels 1986 3 1987 1988 Now let's try closing a generator: 1989 1990 >>> def f(): 1991 ... try: yield 1992 ... except GeneratorExit: 1993 ... print("exiting") 1994 1995 >>> g = f() 1996 >>> next(g) 1997 >>> g.close() 1998 exiting 1999 >>> g.close() # should be no-op now 2000 2001 >>> f().close() # close on just-opened generator should be fine 2002 2003 >>> def f(): yield # an even simpler generator 2004 >>> f().close() # close before opening 2005 >>> g = f() 2006 >>> next(g) 2007 >>> g.close() # close normally 2008 2009 And finalization: 2010 2011 >>> def f(): 2012 ... try: yield 2013 ... finally: 2014 ... print("exiting") 2015 2016 >>> g = f() 2017 >>> next(g) 2018 >>> del g 2019 exiting 2020 2021 2022 GeneratorExit is not caught by except Exception: 2023 2024 >>> def f(): 2025 ... try: yield 2026 ... except Exception: 2027 ... print('except') 2028 ... finally: 2029 ... print('finally') 2030 2031 >>> g = f() 2032 >>> next(g) 2033 >>> del g 2034 finally 2035 2036 2037 Now let's try some ill-behaved generators: 2038 2039 >>> def f(): 2040 ... try: yield 2041 ... except GeneratorExit: 2042 ... yield "foo!" 2043 >>> g = f() 2044 >>> next(g) 2045 >>> g.close() 2046 Traceback (most recent call last): 2047 ... 2048 RuntimeError: generator ignored GeneratorExit 2049 >>> g.close() 2050 2051 2052 Our ill-behaved code should be invoked during GC: 2053 2054 >>> import sys, io 2055 >>> old, sys.stderr = sys.stderr, io.StringIO() 2056 >>> g = f() 2057 >>> next(g) 2058 >>> del g 2059 >>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue() 2060 True 2061 >>> sys.stderr = old 2062 2063 2064 And errors thrown during closing should propagate: 2065 2066 >>> def f(): 2067 ... try: yield 2068 ... except GeneratorExit: 2069 ... raise TypeError("fie!") 2070 >>> g = f() 2071 >>> next(g) 2072 >>> g.close() 2073 Traceback (most recent call last): 2074 ... 2075 TypeError: fie! 2076 2077 2078 Ensure that various yield expression constructs make their 2079 enclosing function a generator: 2080 2081 >>> def f(): x += yield 2082 >>> type(f()) 2083 <class 'generator'> 2084 2085 >>> def f(): x = yield 2086 >>> type(f()) 2087 <class 'generator'> 2088 2089 >>> def f(): lambda x=(yield): 1 2090 >>> type(f()) 2091 <class 'generator'> 2092 2093 >>> def f(d): d[(yield "a")] = d[(yield "b")] = 27 2094 >>> data = [1,2] 2095 >>> g = f(data) 2096 >>> type(g) 2097 <class 'generator'> 2098 >>> g.send(None) 2099 'a' 2100 >>> data 2101 [1, 2] 2102 >>> g.send(0) 2103 'b' 2104 >>> data 2105 [27, 2] 2106 >>> try: g.send(1) 2107 ... except StopIteration: pass 2108 >>> data 2109 [27, 27] 2110 2111 """ 2112 2113 refleaks_tests = """ 2114 Prior to adding cycle-GC support to itertools.tee, this code would leak 2115 references. We add it to the standard suite so the routine refleak-tests 2116 would trigger if it starts being uncleanable again. 2117 2118 >>> import itertools 2119 >>> def leak(): 2120 ... class gen: 2121 ... def __iter__(self): 2122 ... return self 2123 ... def __next__(self): 2124 ... return self.item 2125 ... g = gen() 2126 ... head, tail = itertools.tee(g) 2127 ... g.item = head 2128 ... return head 2129 >>> it = leak() 2130 2131 Make sure to also test the involvement of the tee-internal teedataobject, 2132 which stores returned items. 2133 2134 >>> item = next(it) 2135 2136 2137 2138 This test leaked at one point due to generator finalization/destruction. 2139 It was copied from Lib/test/leakers/test_generator_cycle.py before the file 2140 was removed. 2141 2142 >>> def leak(): 2143 ... def gen(): 2144 ... while True: 2145 ... yield g 2146 ... g = gen() 2147 2148 >>> leak() 2149 2150 2151 2152 This test isn't really generator related, but rather exception-in-cleanup 2153 related. The coroutine tests (above) just happen to cause an exception in 2154 the generator's __del__ (tp_del) method. We can also test for this 2155 explicitly, without generators. We do have to redirect stderr to avoid 2156 printing warnings and to doublecheck that we actually tested what we wanted 2157 to test. 2158 2159 >>> import sys, io 2160 >>> old = sys.stderr 2161 >>> try: 2162 ... sys.stderr = io.StringIO() 2163 ... class Leaker: 2164 ... def __del__(self): 2165 ... def invoke(message): 2166 ... raise RuntimeError(message) 2167 ... invoke("test") 2168 ... 2169 ... l = Leaker() 2170 ... del l 2171 ... err = sys.stderr.getvalue().strip() 2172 ... "Exception ignored in" in err 2173 ... "RuntimeError: test" in err 2174 ... "Traceback" in err 2175 ... "in invoke" in err 2176 ... finally: 2177 ... sys.stderr = old 2178 True 2179 True 2180 True 2181 True 2182 2183 2184 These refleak tests should perhaps be in a testfile of their own, 2185 test_generators just happened to be the test that drew these out. 2186 2187 """ 2188 2189 __test__ = {"tut": tutorial_tests, 2190 "pep": pep_tests, 2191 "email": email_tests, 2192 "fun": fun_tests, 2193 "syntax": syntax_tests, 2194 "conjoin": conjoin_tests, 2195 "weakref": weakref_tests, 2196 "coroutine": coroutine_tests, 2197 "refleaks": refleaks_tests, 2198 } 2199 2200 # Magic test name that regrtest.py invokes *after* importing this module. 2201 # This worms around a bootstrap problem. 2202 # Note that doctest and regrtest both look in sys.argv for a "-v" argument, 2203 # so this works as expected in both ways of running regrtest. 2204 def test_main(verbose=None): 2205 from test import support, test_generators 2206 support.run_unittest(__name__) 2207 support.run_doctest(test_generators, verbose) 2208 2209 # This part isn't needed for regrtest, but for running the test directly. 2210 if __name__ == "__main__": 2211 test_main(1) 2212