Home | History | Annotate | Download | only in test
      1 tutorial_tests = """
      2 Let's try a simple generator:
      3 
      4     >>> def f():
      5     ...    yield 1
      6     ...    yield 2
      7 
      8     >>> for i in f():
      9     ...     print i
     10     1
     11     2
     12     >>> g = f()
     13     >>> g.next()
     14     1
     15     >>> g.next()
     16     2
     17 
     18 "Falling off the end" stops the generator:
     19 
     20     >>> g.next()
     21     Traceback (most recent call last):
     22       File "<stdin>", line 1, in ?
     23       File "<stdin>", line 2, in g
     24     StopIteration
     25 
     26 "return" also stops the generator:
     27 
     28     >>> def f():
     29     ...     yield 1
     30     ...     return
     31     ...     yield 2 # never reached
     32     ...
     33     >>> g = f()
     34     >>> g.next()
     35     1
     36     >>> g.next()
     37     Traceback (most recent call last):
     38       File "<stdin>", line 1, in ?
     39       File "<stdin>", line 3, in f
     40     StopIteration
     41     >>> g.next() # once stopped, can't be resumed
     42     Traceback (most recent call last):
     43       File "<stdin>", line 1, in ?
     44     StopIteration
     45 
     46 "raise StopIteration" stops the generator too:
     47 
     48     >>> def f():
     49     ...     yield 1
     50     ...     raise StopIteration
     51     ...     yield 2 # never reached
     52     ...
     53     >>> g = f()
     54     >>> g.next()
     55     1
     56     >>> g.next()
     57     Traceback (most recent call last):
     58       File "<stdin>", line 1, in ?
     59     StopIteration
     60     >>> g.next()
     61     Traceback (most recent call last):
     62       File "<stdin>", line 1, in ?
     63     StopIteration
     64 
     65 However, they are not exactly equivalent:
     66 
     67     >>> def g1():
     68     ...     try:
     69     ...         return
     70     ...     except:
     71     ...         yield 1
     72     ...
     73     >>> list(g1())
     74     []
     75 
     76     >>> def g2():
     77     ...     try:
     78     ...         raise StopIteration
     79     ...     except:
     80     ...         yield 42
     81     >>> print list(g2())
     82     [42]
     83 
     84 This may be surprising at first:
     85 
     86     >>> def g3():
     87     ...     try:
     88     ...         return
     89     ...     finally:
     90     ...         yield 1
     91     ...
     92     >>> list(g3())
     93     [1]
     94 
     95 Let's create an alternate range() function implemented as a generator:
     96 
     97     >>> def yrange(n):
     98     ...     for i in range(n):
     99     ...         yield i
    100     ...
    101     >>> list(yrange(5))
    102     [0, 1, 2, 3, 4]
    103 
    104 Generators always return to the most recent caller:
    105 
    106     >>> def creator():
    107     ...     r = yrange(5)
    108     ...     print "creator", r.next()
    109     ...     return r
    110     ...
    111     >>> def caller():
    112     ...     r = creator()
    113     ...     for i in r:
    114     ...             print "caller", i
    115     ...
    116     >>> caller()
    117     creator 0
    118     caller 1
    119     caller 2
    120     caller 3
    121     caller 4
    122 
    123 Generators can call other generators:
    124 
    125     >>> def zrange(n):
    126     ...     for i in yrange(n):
    127     ...         yield i
    128     ...
    129     >>> list(zrange(5))
    130     [0, 1, 2, 3, 4]
    131 
    132 """
    133 
    134 # The examples from PEP 255.

    135 
    136 pep_tests = """
    137 
    138 Specification:  Yield
    139 
    140     Restriction:  A generator cannot be resumed while it is actively
    141     running:
    142 
    143     >>> def g():
    144     ...     i = me.next()
    145     ...     yield i
    146     >>> me = g()
    147     >>> me.next()
    148     Traceback (most recent call last):
    149      ...
    150       File "<string>", line 2, in g
    151     ValueError: generator already executing
    152 
    153 Specification: Return
    154 
    155     Note that return isn't always equivalent to raising StopIteration:  the
    156     difference lies in how enclosing try/except constructs are treated.
    157     For example,
    158 
    159         >>> def f1():
    160         ...     try:
    161         ...         return
    162         ...     except:
    163         ...        yield 1
    164         >>> print list(f1())
    165         []
    166 
    167     because, as in any function, return simply exits, but
    168 
    169         >>> def f2():
    170         ...     try:
    171         ...         raise StopIteration
    172         ...     except:
    173         ...         yield 42
    174         >>> print list(f2())
    175         [42]
    176 
    177     because StopIteration is captured by a bare "except", as is any
    178     exception.
    179 
    180 Specification: Generators and Exception Propagation
    181 
    182     >>> def f():
    183     ...     return 1//0
    184     >>> def g():
    185     ...     yield f()  # the zero division exception propagates
    186     ...     yield 42   # and we'll never get here
    187     >>> k = g()
    188     >>> k.next()
    189     Traceback (most recent call last):
    190       File "<stdin>", line 1, in ?
    191       File "<stdin>", line 2, in g
    192       File "<stdin>", line 2, in f
    193     ZeroDivisionError: integer division or modulo by zero
    194     >>> k.next()  # and the generator cannot be resumed
    195     Traceback (most recent call last):
    196       File "<stdin>", line 1, in ?
    197     StopIteration
    198     >>>
    199 
    200 Specification: Try/Except/Finally
    201 
    202     >>> def f():
    203     ...     try:
    204     ...         yield 1
    205     ...         try:
    206     ...             yield 2
    207     ...             1//0
    208     ...             yield 3  # never get here
    209     ...         except ZeroDivisionError:
    210     ...             yield 4
    211     ...             yield 5
    212     ...             raise
    213     ...         except:
    214     ...             yield 6
    215     ...         yield 7     # the "raise" above stops this
    216     ...     except:
    217     ...         yield 8
    218     ...     yield 9
    219     ...     try:
    220     ...         x = 12
    221     ...     finally:
    222     ...         yield 10
    223     ...     yield 11
    224     >>> print list(f())
    225     [1, 2, 4, 5, 8, 9, 10, 11]
    226     >>>
    227 
    228 Guido's binary tree example.
    229 
    230     >>> # A binary tree class.
    231     >>> class Tree:
    232     ...
    233     ...     def __init__(self, label, left=None, right=None):
    234     ...         self.label = label
    235     ...         self.left = left
    236     ...         self.right = right
    237     ...
    238     ...     def __repr__(self, level=0, indent="    "):
    239     ...         s = level*indent + repr(self.label)
    240     ...         if self.left:
    241     ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
    242     ...         if self.right:
    243     ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
    244     ...         return s
    245     ...
    246     ...     def __iter__(self):
    247     ...         return inorder(self)
    248 
    249     >>> # Create a Tree from a list.
    250     >>> def tree(list):
    251     ...     n = len(list)
    252     ...     if n == 0:
    253     ...         return []
    254     ...     i = n // 2
    255     ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
    256 
    257     >>> # Show it off: create a tree.
    258     >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
    259 
    260     >>> # A recursive generator that generates Tree labels in in-order.
    261     >>> def inorder(t):
    262     ...     if t:
    263     ...         for x in inorder(t.left):
    264     ...             yield x
    265     ...         yield t.label
    266     ...         for x in inorder(t.right):
    267     ...             yield x
    268 
    269     >>> # Show it off: create a tree.
    270     >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
    271     >>> # Print the nodes of the tree in in-order.
    272     >>> for x in t:
    273     ...     print x,
    274     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
    275 
    276     >>> # A non-recursive generator.
    277     >>> def inorder(node):
    278     ...     stack = []
    279     ...     while node:
    280     ...         while node.left:
    281     ...             stack.append(node)
    282     ...             node = node.left
    283     ...         yield node.label
    284     ...         while not node.right:
    285     ...             try:
    286     ...                 node = stack.pop()
    287     ...             except IndexError:
    288     ...                 return
    289     ...             yield node.label
    290     ...         node = node.right
    291 
    292     >>> # Exercise the non-recursive generator.
    293     >>> for x in t:
    294     ...     print x,
    295     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
    296 
    297 """
    298 
    299 # Examples from Iterator-List and Python-Dev and c.l.py.

    300 
    301 email_tests = """
    302 
    303 The difference between yielding None and returning it.
    304 
    305 >>> def g():
    306 ...     for i in range(3):
    307 ...         yield None
    308 ...     yield None
    309 ...     return
    310 >>> list(g())
    311 [None, None, None, None]
    312 
    313 Ensure that explicitly raising StopIteration acts like any other exception
    314 in try/except, not like a return.
    315 
    316 >>> def g():
    317 ...     yield 1
    318 ...     try:
    319 ...         raise StopIteration
    320 ...     except:
    321 ...         yield 2
    322 ...     yield 3
    323 >>> list(g())
    324 [1, 2, 3]
    325 
    326 Next one was posted to c.l.py.
    327 
    328 >>> def gcomb(x, k):
    329 ...     "Generate all combinations of k elements from list x."
    330 ...
    331 ...     if k > len(x):
    332 ...         return
    333 ...     if k == 0:
    334 ...         yield []
    335 ...     else:
    336 ...         first, rest = x[0], x[1:]
    337 ...         # A combination does or doesn't contain first.
    338 ...         # If it does, the remainder is a k-1 comb of rest.
    339 ...         for c in gcomb(rest, k-1):
    340 ...             c.insert(0, first)
    341 ...             yield c
    342 ...         # If it doesn't contain first, it's a k comb of rest.
    343 ...         for c in gcomb(rest, k):
    344 ...             yield c
    345 
    346 >>> seq = range(1, 5)
    347 >>> for k in range(len(seq) + 2):
    348 ...     print "%d-combs of %s:" % (k, seq)
    349 ...     for c in gcomb(seq, k):
    350 ...         print "   ", c
    351 0-combs of [1, 2, 3, 4]:
    352     []
    353 1-combs of [1, 2, 3, 4]:
    354     [1]
    355     [2]
    356     [3]
    357     [4]
    358 2-combs of [1, 2, 3, 4]:
    359     [1, 2]
    360     [1, 3]
    361     [1, 4]
    362     [2, 3]
    363     [2, 4]
    364     [3, 4]
    365 3-combs of [1, 2, 3, 4]:
    366     [1, 2, 3]
    367     [1, 2, 4]
    368     [1, 3, 4]
    369     [2, 3, 4]
    370 4-combs of [1, 2, 3, 4]:
    371     [1, 2, 3, 4]
    372 5-combs of [1, 2, 3, 4]:
    373 
    374 From the Iterators list, about the types of these things.
    375 
    376 >>> def g():
    377 ...     yield 1
    378 ...
    379 >>> type(g)
    380 <type 'function'>
    381 >>> i = g()
    382 >>> type(i)
    383 <type 'generator'>
    384 >>> [s for s in dir(i) if not s.startswith('_')]
    385 ['close', 'gi_code', 'gi_frame', 'gi_running', 'next', 'send', 'throw']
    386 >>> print i.next.__doc__
    387 x.next() -> the next value, or raise StopIteration
    388 >>> iter(i) is i
    389 True
    390 >>> import types
    391 >>> isinstance(i, types.GeneratorType)
    392 True
    393 
    394 And more, added later.
    395 
    396 >>> i.gi_running
    397 0
    398 >>> type(i.gi_frame)
    399 <type 'frame'>
    400 >>> i.gi_running = 42
    401 Traceback (most recent call last):
    402   ...
    403 TypeError: readonly attribute
    404 >>> def g():
    405 ...     yield me.gi_running
    406 >>> me = g()
    407 >>> me.gi_running
    408 0
    409 >>> me.next()
    410 1
    411 >>> me.gi_running
    412 0
    413 
    414 A clever union-find implementation from c.l.py, due to David Eppstein.
    415 Sent: Friday, June 29, 2001 12:16 PM
    416 To: python-list (at] python.org
    417 Subject: Re: PEP 255: Simple Generators
    418 
    419 >>> class disjointSet:
    420 ...     def __init__(self, name):
    421 ...         self.name = name
    422 ...         self.parent = None
    423 ...         self.generator = self.generate()
    424 ...
    425 ...     def generate(self):
    426 ...         while not self.parent:
    427 ...             yield self
    428 ...         for x in self.parent.generator:
    429 ...             yield x
    430 ...
    431 ...     def find(self):
    432 ...         return self.generator.next()
    433 ...
    434 ...     def union(self, parent):
    435 ...         if self.parent:
    436 ...             raise ValueError("Sorry, I'm not a root!")
    437 ...         self.parent = parent
    438 ...
    439 ...     def __str__(self):
    440 ...         return self.name
    441 
    442 >>> names = "ABCDEFGHIJKLM"
    443 >>> sets = [disjointSet(name) for name in names]
    444 >>> roots = sets[:]
    445 
    446 >>> import random
    447 >>> gen = random.WichmannHill(42)
    448 >>> while 1:
    449 ...     for s in sets:
    450 ...         print "%s->%s" % (s, s.find()),
    451 ...     print
    452 ...     if len(roots) > 1:
    453 ...         s1 = gen.choice(roots)
    454 ...         roots.remove(s1)
    455 ...         s2 = gen.choice(roots)
    456 ...         s1.union(s2)
    457 ...         print "merged", s1, "into", s2
    458 ...     else:
    459 ...         break
    460 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
    461 merged D into G
    462 A->A B->B C->C D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
    463 merged C into F
    464 A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
    465 merged L into A
    466 A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->A M->M
    467 merged H into E
    468 A->A B->B C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
    469 merged B into E
    470 A->A B->E C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
    471 merged J into G
    472 A->A B->E C->F D->G E->E F->F G->G H->E I->I J->G K->K L->A M->M
    473 merged E into G
    474 A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->M
    475 merged M into G
    476 A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->G
    477 merged I into K
    478 A->A B->G C->F D->G E->G F->F G->G H->G I->K J->G K->K L->A M->G
    479 merged K into A
    480 A->A B->G C->F D->G E->G F->F G->G H->G I->A J->G K->A L->A M->G
    481 merged F into A
    482 A->A B->G C->A D->G E->G F->A G->G H->G I->A J->G K->A L->A M->G
    483 merged A into G
    484 A->G B->G C->G D->G E->G F->G G->G H->G I->G J->G K->G L->G M->G
    485 
    486 """
    487 # Emacs turd '
    488 
    489 # Fun tests (for sufficiently warped notions of "fun").

    490 
    491 fun_tests = """
    492 
    493 Build up to a recursive Sieve of Eratosthenes generator.
    494 
    495 >>> def firstn(g, n):
    496 ...     return [g.next() for i in range(n)]
    497 
    498 >>> def intsfrom(i):
    499 ...     while 1:
    500 ...         yield i
    501 ...         i += 1
    502 
    503 >>> firstn(intsfrom(5), 7)
    504 [5, 6, 7, 8, 9, 10, 11]
    505 
    506 >>> def exclude_multiples(n, ints):
    507 ...     for i in ints:
    508 ...         if i % n:
    509 ...             yield i
    510 
    511 >>> firstn(exclude_multiples(3, intsfrom(1)), 6)
    512 [1, 2, 4, 5, 7, 8]
    513 
    514 >>> def sieve(ints):
    515 ...     prime = ints.next()
    516 ...     yield prime
    517 ...     not_divisible_by_prime = exclude_multiples(prime, ints)
    518 ...     for p in sieve(not_divisible_by_prime):
    519 ...         yield p
    520 
    521 >>> primes = sieve(intsfrom(2))
    522 >>> firstn(primes, 20)
    523 [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
    524 
    525 
    526 Another famous problem:  generate all integers of the form
    527     2**i * 3**j  * 5**k
    528 in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
    529 Try writing it without generators, and correctly, and without generating
    530 3 internal results for each result output.
    531 
    532 >>> def times(n, g):
    533 ...     for i in g:
    534 ...         yield n * i
    535 >>> firstn(times(10, intsfrom(1)), 10)
    536 [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
    537 
    538 >>> def merge(g, h):
    539 ...     ng = g.next()
    540 ...     nh = h.next()
    541 ...     while 1:
    542 ...         if ng < nh:
    543 ...             yield ng
    544 ...             ng = g.next()
    545 ...         elif ng > nh:
    546 ...             yield nh
    547 ...             nh = h.next()
    548 ...         else:
    549 ...             yield ng
    550 ...             ng = g.next()
    551 ...             nh = h.next()
    552 
    553 The following works, but is doing a whale of a lot of redundant work --
    554 it's not clear how to get the internal uses of m235 to share a single
    555 generator.  Note that me_times2 (etc) each need to see every element in the
    556 result sequence.  So this is an example where lazy lists are more natural
    557 (you can look at the head of a lazy list any number of times).
    558 
    559 >>> def m235():
    560 ...     yield 1
    561 ...     me_times2 = times(2, m235())
    562 ...     me_times3 = times(3, m235())
    563 ...     me_times5 = times(5, m235())
    564 ...     for i in merge(merge(me_times2,
    565 ...                          me_times3),
    566 ...                    me_times5):
    567 ...         yield i
    568 
    569 Don't print "too many" of these -- the implementation above is extremely
    570 inefficient:  each call of m235() leads to 3 recursive calls, and in
    571 turn each of those 3 more, and so on, and so on, until we've descended
    572 enough levels to satisfy the print stmts.  Very odd:  when I printed 5
    573 lines of results below, this managed to screw up Win98's malloc in "the
    574 usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
    575 address space, and it *looked* like a very slow leak.
    576 
    577 >>> result = m235()
    578 >>> for i in range(3):
    579 ...     print firstn(result, 15)
    580 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
    581 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
    582 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
    583 
    584 Heh.  Here's one way to get a shared list, complete with an excruciating
    585 namespace renaming trick.  The *pretty* part is that the times() and merge()
    586 functions can be reused as-is, because they only assume their stream
    587 arguments are iterable -- a LazyList is the same as a generator to times().
    588 
    589 >>> class LazyList:
    590 ...     def __init__(self, g):
    591 ...         self.sofar = []
    592 ...         self.fetch = g.next
    593 ...
    594 ...     def __getitem__(self, i):
    595 ...         sofar, fetch = self.sofar, self.fetch
    596 ...         while i >= len(sofar):
    597 ...             sofar.append(fetch())
    598 ...         return sofar[i]
    599 
    600 >>> def m235():
    601 ...     yield 1
    602 ...     # Gack:  m235 below actually refers to a LazyList.
    603 ...     me_times2 = times(2, m235)
    604 ...     me_times3 = times(3, m235)
    605 ...     me_times5 = times(5, m235)
    606 ...     for i in merge(merge(me_times2,
    607 ...                          me_times3),
    608 ...                    me_times5):
    609 ...         yield i
    610 
    611 Print as many of these as you like -- *this* implementation is memory-
    612 efficient.
    613 
    614 >>> m235 = LazyList(m235())
    615 >>> for i in range(5):
    616 ...     print [m235[j] for j in range(15*i, 15*(i+1))]
    617 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
    618 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
    619 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
    620 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
    621 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
    622 
    623 Ye olde Fibonacci generator, LazyList style.
    624 
    625 >>> def fibgen(a, b):
    626 ...
    627 ...     def sum(g, h):
    628 ...         while 1:
    629 ...             yield g.next() + h.next()
    630 ...
    631 ...     def tail(g):
    632 ...         g.next()    # throw first away
    633 ...         for x in g:
    634 ...             yield x
    635 ...
    636 ...     yield a
    637 ...     yield b
    638 ...     for s in sum(iter(fib),
    639 ...                  tail(iter(fib))):
    640 ...         yield s
    641 
    642 >>> fib = LazyList(fibgen(1, 2))
    643 >>> firstn(iter(fib), 17)
    644 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
    645 
    646 
    647 Running after your tail with itertools.tee (new in version 2.4)
    648 
    649 The algorithms "m235" (Hamming) and Fibonacci presented above are both
    650 examples of a whole family of FP (functional programming) algorithms
    651 where a function produces and returns a list while the production algorithm
    652 suppose the list as already produced by recursively calling itself.
    653 For these algorithms to work, they must:
    654 
    655 - produce at least a first element without presupposing the existence of
    656   the rest of the list
    657 - produce their elements in a lazy manner
    658 
    659 To work efficiently, the beginning of the list must not be recomputed over
    660 and over again. This is ensured in most FP languages as a built-in feature.
    661 In python, we have to explicitly maintain a list of already computed results
    662 and abandon genuine recursivity.
    663 
    664 This is what had been attempted above with the LazyList class. One problem
    665 with that class is that it keeps a list of all of the generated results and
    666 therefore continually grows. This partially defeats the goal of the generator
    667 concept, viz. produce the results only as needed instead of producing them
    668 all and thereby wasting memory.
    669 
    670 Thanks to itertools.tee, it is now clear "how to get the internal uses of
    671 m235 to share a single generator".
    672 
    673 >>> from itertools import tee
    674 >>> def m235():
    675 ...     def _m235():
    676 ...         yield 1
    677 ...         for n in merge(times(2, m2),
    678 ...                        merge(times(3, m3),
    679 ...                              times(5, m5))):
    680 ...             yield n
    681 ...     m1 = _m235()
    682 ...     m2, m3, m5, mRes = tee(m1, 4)
    683 ...     return mRes
    684 
    685 >>> it = m235()
    686 >>> for i in range(5):
    687 ...     print firstn(it, 15)
    688 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
    689 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
    690 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
    691 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
    692 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
    693 
    694 The "tee" function does just what we want. It internally keeps a generated
    695 result for as long as it has not been "consumed" from all of the duplicated
    696 iterators, whereupon it is deleted. You can therefore print the hamming
    697 sequence during hours without increasing memory usage, or very little.
    698 
    699 The beauty of it is that recursive running-after-their-tail FP algorithms
    700 are quite straightforwardly expressed with this Python idiom.
    701 
    702 Ye olde Fibonacci generator, tee style.
    703 
    704 >>> def fib():
    705 ...
    706 ...     def _isum(g, h):
    707 ...         while 1:
    708 ...             yield g.next() + h.next()
    709 ...
    710 ...     def _fib():
    711 ...         yield 1
    712 ...         yield 2
    713 ...         fibTail.next() # throw first away
    714 ...         for res in _isum(fibHead, fibTail):
    715 ...             yield res
    716 ...
    717 ...     realfib = _fib()
    718 ...     fibHead, fibTail, fibRes = tee(realfib, 3)
    719 ...     return fibRes
    720 
    721 >>> firstn(fib(), 17)
    722 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
    723 
    724 """
    725 
    726 # syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0

    727 # hackery.

    728 
    729 syntax_tests = """
    730 
    731 >>> def f():
    732 ...     return 22
    733 ...     yield 1
    734 Traceback (most recent call last):
    735   ..
    736 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[0]>, line 3)
    737 
    738 >>> def f():
    739 ...     yield 1
    740 ...     return 22
    741 Traceback (most recent call last):
    742   ..
    743 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[1]>, line 3)
    744 
    745 "return None" is not the same as "return" in a generator:
    746 
    747 >>> def f():
    748 ...     yield 1
    749 ...     return None
    750 Traceback (most recent call last):
    751   ..
    752 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[2]>, line 3)
    753 
    754 These are fine:
    755 
    756 >>> def f():
    757 ...     yield 1
    758 ...     return
    759 
    760 >>> def f():
    761 ...     try:
    762 ...         yield 1
    763 ...     finally:
    764 ...         pass
    765 
    766 >>> def f():
    767 ...     try:
    768 ...         try:
    769 ...             1//0
    770 ...         except ZeroDivisionError:
    771 ...             yield 666
    772 ...         except:
    773 ...             pass
    774 ...     finally:
    775 ...         pass
    776 
    777 >>> def f():
    778 ...     try:
    779 ...         try:
    780 ...             yield 12
    781 ...             1//0
    782 ...         except ZeroDivisionError:
    783 ...             yield 666
    784 ...         except:
    785 ...             try:
    786 ...                 x = 12
    787 ...             finally:
    788 ...                 yield 12
    789 ...     except:
    790 ...         return
    791 >>> list(f())
    792 [12, 666]
    793 
    794 >>> def f():
    795 ...    yield
    796 >>> type(f())
    797 <type 'generator'>
    798 
    799 
    800 >>> def f():
    801 ...    if 0:
    802 ...        yield
    803 >>> type(f())
    804 <type 'generator'>
    805 
    806 
    807 >>> def f():
    808 ...     if 0:
    809 ...         yield 1
    810 >>> type(f())
    811 <type 'generator'>
    812 
    813 >>> def f():
    814 ...    if "":
    815 ...        yield None
    816 >>> type(f())
    817 <type 'generator'>
    818 
    819 >>> def f():
    820 ...     return
    821 ...     try:
    822 ...         if x==4:
    823 ...             pass
    824 ...         elif 0:
    825 ...             try:
    826 ...                 1//0
    827 ...             except SyntaxError:
    828 ...                 pass
    829 ...             else:
    830 ...                 if 0:
    831 ...                     while 12:
    832 ...                         x += 1
    833 ...                         yield 2 # don't blink
    834 ...                         f(a, b, c, d, e)
    835 ...         else:
    836 ...             pass
    837 ...     except:
    838 ...         x = 1
    839 ...     return
    840 >>> type(f())
    841 <type 'generator'>
    842 
    843 >>> def f():
    844 ...     if 0:
    845 ...         def g():
    846 ...             yield 1
    847 ...
    848 >>> type(f())
    849 <type 'NoneType'>
    850 
    851 >>> def f():
    852 ...     if 0:
    853 ...         class C:
    854 ...             def __init__(self):
    855 ...                 yield 1
    856 ...             def f(self):
    857 ...                 yield 2
    858 >>> type(f())
    859 <type 'NoneType'>
    860 
    861 >>> def f():
    862 ...     if 0:
    863 ...         return
    864 ...     if 0:
    865 ...         yield 2
    866 >>> type(f())
    867 <type 'generator'>
    868 
    869 
    870 >>> def f():
    871 ...     if 0:
    872 ...         lambda x:  x        # shouldn't trigger here
    873 ...         return              # or here
    874 ...         def f(i):
    875 ...             return 2*i      # or here
    876 ...         if 0:
    877 ...             return 3        # but *this* sucks (line 8)
    878 ...     if 0:
    879 ...         yield 2             # because it's a generator (line 10)
    880 Traceback (most recent call last):
    881 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[24]>, line 10)
    882 
    883 This one caused a crash (see SF bug 567538):
    884 
    885 >>> def f():
    886 ...     for i in range(3):
    887 ...         try:
    888 ...             continue
    889 ...         finally:
    890 ...             yield i
    891 ...
    892 >>> g = f()
    893 >>> print g.next()
    894 0
    895 >>> print g.next()
    896 1
    897 >>> print g.next()
    898 2
    899 >>> print g.next()
    900 Traceback (most recent call last):
    901 StopIteration
    902 
    903 
    904 Test the gi_code attribute
    905 
    906 >>> def f():
    907 ...     yield 5
    908 ...
    909 >>> g = f()
    910 >>> g.gi_code is f.func_code
    911 True
    912 >>> g.next()
    913 5
    914 >>> g.next()
    915 Traceback (most recent call last):
    916 StopIteration
    917 >>> g.gi_code is f.func_code
    918 True
    919 
    920 
    921 Test the __name__ attribute and the repr()
    922 
    923 >>> def f():
    924 ...    yield 5
    925 ...
    926 >>> g = f()
    927 >>> g.__name__
    928 'f'
    929 >>> repr(g)  # doctest: +ELLIPSIS
    930 '<generator object f at ...>'
    931 
    932 Lambdas shouldn't have their usual return behavior.
    933 
    934 >>> x = lambda: (yield 1)
    935 >>> list(x())
    936 [1]
    937 
    938 >>> x = lambda: ((yield 1), (yield 2))
    939 >>> list(x())
    940 [1, 2]
    941 """
    942 
    943 # conjoin is a simple backtracking generator, named in honor of Icon's

    944 # "conjunction" control structure.  Pass a list of no-argument functions

    945 # that return iterable objects.  Easiest to explain by example:  assume the

    946 # function list [x, y, z] is passed.  Then conjoin acts like:

    947 #

    948 # def g():

    949 #     values = [None] * 3

    950 #     for values[0] in x():

    951 #         for values[1] in y():

    952 #             for values[2] in z():

    953 #                 yield values

    954 #

    955 # So some 3-lists of values *may* be generated, each time we successfully

    956 # get into the innermost loop.  If an iterator fails (is exhausted) before

    957 # then, it "backtracks" to get the next value from the nearest enclosing

    958 # iterator (the one "to the left"), and starts all over again at the next

    959 # slot (pumps a fresh iterator).  Of course this is most useful when the

    960 # iterators have side-effects, so that which values *can* be generated at

    961 # each slot depend on the values iterated at previous slots.

    962 
    963 def simple_conjoin(gs):
    964 
    965     values = [None] * len(gs)
    966 
    967     def gen(i):
    968         if i >= len(gs):
    969             yield values
    970         else:
    971             for values[i] in gs[i]():
    972                 for x in gen(i+1):
    973                     yield x
    974 
    975     for x in gen(0):
    976         yield x
    977 
    978 # That works fine, but recursing a level and checking i against len(gs) for

    979 # each item produced is inefficient.  By doing manual loop unrolling across

    980 # generator boundaries, it's possible to eliminate most of that overhead.

    981 # This isn't worth the bother *in general* for generators, but conjoin() is

    982 # a core building block for some CPU-intensive generator applications.

    983 
    984 def conjoin(gs):
    985 
    986     n = len(gs)
    987     values = [None] * n
    988 
    989     # Do one loop nest at time recursively, until the # of loop nests

    990     # remaining is divisible by 3.

    991 
    992     def gen(i):
    993         if i >= n:
    994             yield values
    995 
    996         elif (n-i) % 3:
    997             ip1 = i+1
    998             for values[i] in gs[i]():
    999                 for x in gen(ip1):
   1000                     yield x
   1001 
   1002         else:
   1003             for x in _gen3(i):
   1004                 yield x
   1005 
   1006     # Do three loop nests at a time, recursing only if at least three more

   1007     # remain.  Don't call directly:  this is an internal optimization for

   1008     # gen's use.

   1009 
   1010     def _gen3(i):
   1011         assert i < n and (n-i) % 3 == 0
   1012         ip1, ip2, ip3 = i+1, i+2, i+3
   1013         g, g1, g2 = gs[i : ip3]
   1014 
   1015         if ip3 >= n:
   1016             # These are the last three, so we can yield values directly.

   1017             for values[i] in g():
   1018                 for values[ip1] in g1():
   1019                     for values[ip2] in g2():
   1020                         yield values
   1021 
   1022         else:
   1023             # At least 6 loop nests remain; peel off 3 and recurse for the

   1024             # rest.

   1025             for values[i] in g():
   1026                 for values[ip1] in g1():
   1027                     for values[ip2] in g2():
   1028                         for x in _gen3(ip3):
   1029                             yield x
   1030 
   1031     for x in gen(0):
   1032         yield x
   1033 
   1034 # And one more approach:  For backtracking apps like the Knight's Tour

   1035 # solver below, the number of backtracking levels can be enormous (one

   1036 # level per square, for the Knight's Tour, so that e.g. a 100x100 board

   1037 # needs 10,000 levels).  In such cases Python is likely to run out of

   1038 # stack space due to recursion.  So here's a recursion-free version of

   1039 # conjoin too.

   1040 # NOTE WELL:  This allows large problems to be solved with only trivial

   1041 # demands on stack space.  Without explicitly resumable generators, this is

   1042 # much harder to achieve.  OTOH, this is much slower (up to a factor of 2)

   1043 # than the fancy unrolled recursive conjoin.

   1044 
   1045 def flat_conjoin(gs):  # rename to conjoin to run tests with this instead

   1046     n = len(gs)
   1047     values = [None] * n
   1048     iters  = [None] * n
   1049     _StopIteration = StopIteration  # make local because caught a *lot*

   1050     i = 0
   1051     while 1:
   1052         # Descend.

   1053         try:
   1054             while i < n:
   1055                 it = iters[i] = gs[i]().next
   1056                 values[i] = it()
   1057                 i += 1
   1058         except _StopIteration:
   1059             pass
   1060         else:
   1061             assert i == n
   1062             yield values
   1063 
   1064         # Backtrack until an older iterator can be resumed.

   1065         i -= 1
   1066         while i >= 0:
   1067             try:
   1068                 values[i] = iters[i]()
   1069                 # Success!  Start fresh at next level.

   1070                 i += 1
   1071                 break
   1072             except _StopIteration:
   1073                 # Continue backtracking.

   1074                 i -= 1
   1075         else:
   1076             assert i < 0
   1077             break
   1078 
   1079 # A conjoin-based N-Queens solver.

   1080 
   1081 class Queens:
   1082     def __init__(self, n):
   1083         self.n = n
   1084         rangen = range(n)
   1085 
   1086         # Assign a unique int to each column and diagonal.

   1087         # columns:  n of those, range(n).

   1088         # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along

   1089         # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-

   1090         # based.

   1091         # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along

   1092         # each, smallest i+j is 0, largest is 2n-2.

   1093 
   1094         # For each square, compute a bit vector of the columns and

   1095         # diagonals it covers, and for each row compute a function that

   1096         # generates the possiblities for the columns in that row.

   1097         self.rowgenerators = []
   1098         for i in rangen:
   1099             rowuses = [(1L << j) |                  # column ordinal

   1100                        (1L << (n + i-j + n-1)) |    # NW-SE ordinal

   1101                        (1L << (n + 2*n-1 + i+j))    # NE-SW ordinal

   1102                             for j in rangen]
   1103 
   1104             def rowgen(rowuses=rowuses):
   1105                 for j in rangen:
   1106                     uses = rowuses[j]
   1107                     if uses & self.used == 0:
   1108                         self.used |= uses
   1109                         yield j
   1110                         self.used &= ~uses
   1111 
   1112             self.rowgenerators.append(rowgen)
   1113 
   1114     # Generate solutions.

   1115     def solve(self):
   1116         self.used = 0
   1117         for row2col in conjoin(self.rowgenerators):
   1118             yield row2col
   1119 
   1120     def printsolution(self, row2col):
   1121         n = self.n
   1122         assert n == len(row2col)
   1123         sep = "+" + "-+" * n
   1124         print sep
   1125         for i in range(n):
   1126             squares = [" " for j in range(n)]
   1127             squares[row2col[i]] = "Q"
   1128             print "|" + "|".join(squares) + "|"
   1129             print sep
   1130 
   1131 # A conjoin-based Knight's Tour solver.  This is pretty sophisticated

   1132 # (e.g., when used with flat_conjoin above, and passing hard=1 to the

   1133 # constructor, a 200x200 Knight's Tour was found quickly -- note that we're

   1134 # creating 10s of thousands of generators then!), and is lengthy.

   1135 
   1136 class Knights:
   1137     def __init__(self, m, n, hard=0):
   1138         self.m, self.n = m, n
   1139 
   1140         # solve() will set up succs[i] to be a list of square #i's

   1141         # successors.

   1142         succs = self.succs = []
   1143 
   1144         # Remove i0 from each of its successor's successor lists, i.e.

   1145         # successors can't go back to i0 again.  Return 0 if we can

   1146         # detect this makes a solution impossible, else return 1.

   1147 
   1148         def remove_from_successors(i0, len=len):
   1149             # If we remove all exits from a free square, we're dead:

   1150             # even if we move to it next, we can't leave it again.

   1151             # If we create a square with one exit, we must visit it next;

   1152             # else somebody else will have to visit it, and since there's

   1153             # only one adjacent, there won't be a way to leave it again.

   1154             # Finelly, if we create more than one free square with a

   1155             # single exit, we can only move to one of them next, leaving

   1156             # the other one a dead end.

   1157             ne0 = ne1 = 0
   1158             for i in succs[i0]:
   1159                 s = succs[i]
   1160                 s.remove(i0)
   1161                 e = len(s)
   1162                 if e == 0:
   1163                     ne0 += 1
   1164                 elif e == 1:
   1165                     ne1 += 1
   1166             return ne0 == 0 and ne1 < 2
   1167 
   1168         # Put i0 back in each of its successor's successor lists.

   1169 
   1170         def add_to_successors(i0):
   1171             for i in succs[i0]:
   1172                 succs[i].append(i0)
   1173 
   1174         # Generate the first move.

   1175         def first():
   1176             if m < 1 or n < 1:
   1177                 return
   1178 
   1179             # Since we're looking for a cycle, it doesn't matter where we

   1180             # start.  Starting in a corner makes the 2nd move easy.

   1181             corner = self.coords2index(0, 0)
   1182             remove_from_successors(corner)
   1183             self.lastij = corner
   1184             yield corner
   1185             add_to_successors(corner)
   1186 
   1187         # Generate the second moves.

   1188         def second():
   1189             corner = self.coords2index(0, 0)
   1190             assert self.lastij == corner  # i.e., we started in the corner

   1191             if m < 3 or n < 3:
   1192                 return
   1193             assert len(succs[corner]) == 2
   1194             assert self.coords2index(1, 2) in succs[corner]
   1195             assert self.coords2index(2, 1) in succs[corner]
   1196             # Only two choices.  Whichever we pick, the other must be the

   1197             # square picked on move m*n, as it's the only way to get back

   1198             # to (0, 0).  Save its index in self.final so that moves before

   1199             # the last know it must be kept free.

   1200             for i, j in (1, 2), (2, 1):
   1201                 this  = self.coords2index(i, j)
   1202                 final = self.coords2index(3-i, 3-j)
   1203                 self.final = final
   1204 
   1205                 remove_from_successors(this)
   1206                 succs[final].append(corner)
   1207                 self.lastij = this
   1208                 yield this
   1209                 succs[final].remove(corner)
   1210                 add_to_successors(this)
   1211 
   1212         # Generate moves 3 thru m*n-1.

   1213         def advance(len=len):
   1214             # If some successor has only one exit, must take it.

   1215             # Else favor successors with fewer exits.

   1216             candidates = []
   1217             for i in succs[self.lastij]:
   1218                 e = len(succs[i])
   1219                 assert e > 0, "else remove_from_successors() pruning flawed"
   1220                 if e == 1:
   1221                     candidates = [(e, i)]
   1222                     break
   1223                 candidates.append((e, i))
   1224             else:
   1225                 candidates.sort()
   1226 
   1227             for e, i in candidates:
   1228                 if i != self.final:
   1229                     if remove_from_successors(i):
   1230                         self.lastij = i
   1231                         yield i
   1232                     add_to_successors(i)
   1233 
   1234         # Generate moves 3 thru m*n-1.  Alternative version using a

   1235         # stronger (but more expensive) heuristic to order successors.

   1236         # Since the # of backtracking levels is m*n, a poor move early on

   1237         # can take eons to undo.  Smallest square board for which this

   1238         # matters a lot is 52x52.

   1239         def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
   1240             # If some successor has only one exit, must take it.

   1241             # Else favor successors with fewer exits.

   1242             # Break ties via max distance from board centerpoint (favor

   1243             # corners and edges whenever possible).

   1244             candidates = []
   1245             for i in succs[self.lastij]:
   1246                 e = len(succs[i])
   1247                 assert e > 0, "else remove_from_successors() pruning flawed"
   1248                 if e == 1:
   1249                     candidates = [(e, 0, i)]
   1250                     break
   1251                 i1, j1 = self.index2coords(i)
   1252                 d = (i1 - vmid)**2 + (j1 - hmid)**2
   1253                 candidates.append((e, -d, i))
   1254             else:
   1255                 candidates.sort()
   1256 
   1257             for e, d, i in candidates:
   1258                 if i != self.final:
   1259                     if remove_from_successors(i):
   1260                         self.lastij = i
   1261                         yield i
   1262                     add_to_successors(i)
   1263 
   1264         # Generate the last move.

   1265         def last():
   1266             assert self.final in succs[self.lastij]
   1267             yield self.final
   1268 
   1269         if m*n < 4:
   1270             self.squaregenerators = [first]
   1271         else:
   1272             self.squaregenerators = [first, second] + \
   1273                 [hard and advance_hard or advance] * (m*n - 3) + \
   1274                 [last]
   1275 
   1276     def coords2index(self, i, j):
   1277         assert 0 <= i < self.m
   1278         assert 0 <= j < self.n
   1279         return i * self.n + j
   1280 
   1281     def index2coords(self, index):
   1282         assert 0 <= index < self.m * self.n
   1283         return divmod(index, self.n)
   1284 
   1285     def _init_board(self):
   1286         succs = self.succs
   1287         del succs[:]
   1288         m, n = self.m, self.n
   1289         c2i = self.coords2index
   1290 
   1291         offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
   1292                    (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
   1293         rangen = range(n)
   1294         for i in range(m):
   1295             for j in rangen:
   1296                 s = [c2i(i+io, j+jo) for io, jo in offsets
   1297                                      if 0 <= i+io < m and
   1298                                         0 <= j+jo < n]
   1299                 succs.append(s)
   1300 
   1301     # Generate solutions.

   1302     def solve(self):
   1303         self._init_board()
   1304         for x in conjoin(self.squaregenerators):
   1305             yield x
   1306 
   1307     def printsolution(self, x):
   1308         m, n = self.m, self.n
   1309         assert len(x) == m*n
   1310         w = len(str(m*n))
   1311         format = "%" + str(w) + "d"
   1312 
   1313         squares = [[None] * n for i in range(m)]
   1314         k = 1
   1315         for i in x:
   1316             i1, j1 = self.index2coords(i)
   1317             squares[i1][j1] = format % k
   1318             k += 1
   1319 
   1320         sep = "+" + ("-" * w + "+") * n
   1321         print sep
   1322         for i in range(m):
   1323             row = squares[i]
   1324             print "|" + "|".join(row) + "|"
   1325             print sep
   1326 
   1327 conjoin_tests = """
   1328 
   1329 Generate the 3-bit binary numbers in order.  This illustrates dumbest-
   1330 possible use of conjoin, just to generate the full cross-product.
   1331 
   1332 >>> for c in conjoin([lambda: iter((0, 1))] * 3):
   1333 ...     print c
   1334 [0, 0, 0]
   1335 [0, 0, 1]
   1336 [0, 1, 0]
   1337 [0, 1, 1]
   1338 [1, 0, 0]
   1339 [1, 0, 1]
   1340 [1, 1, 0]
   1341 [1, 1, 1]
   1342 
   1343 For efficiency in typical backtracking apps, conjoin() yields the same list
   1344 object each time.  So if you want to save away a full account of its
   1345 generated sequence, you need to copy its results.
   1346 
   1347 >>> def gencopy(iterator):
   1348 ...     for x in iterator:
   1349 ...         yield x[:]
   1350 
   1351 >>> for n in range(10):
   1352 ...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
   1353 ...     print n, len(all), all[0] == [0] * n, all[-1] == [1] * n
   1354 0 1 True True
   1355 1 2 True True
   1356 2 4 True True
   1357 3 8 True True
   1358 4 16 True True
   1359 5 32 True True
   1360 6 64 True True
   1361 7 128 True True
   1362 8 256 True True
   1363 9 512 True True
   1364 
   1365 And run an 8-queens solver.
   1366 
   1367 >>> q = Queens(8)
   1368 >>> LIMIT = 2
   1369 >>> count = 0
   1370 >>> for row2col in q.solve():
   1371 ...     count += 1
   1372 ...     if count <= LIMIT:
   1373 ...         print "Solution", count
   1374 ...         q.printsolution(row2col)
   1375 Solution 1
   1376 +-+-+-+-+-+-+-+-+
   1377 |Q| | | | | | | |
   1378 +-+-+-+-+-+-+-+-+
   1379 | | | | |Q| | | |
   1380 +-+-+-+-+-+-+-+-+
   1381 | | | | | | | |Q|
   1382 +-+-+-+-+-+-+-+-+
   1383 | | | | | |Q| | |
   1384 +-+-+-+-+-+-+-+-+
   1385 | | |Q| | | | | |
   1386 +-+-+-+-+-+-+-+-+
   1387 | | | | | | |Q| |
   1388 +-+-+-+-+-+-+-+-+
   1389 | |Q| | | | | | |
   1390 +-+-+-+-+-+-+-+-+
   1391 | | | |Q| | | | |
   1392 +-+-+-+-+-+-+-+-+
   1393 Solution 2
   1394 +-+-+-+-+-+-+-+-+
   1395 |Q| | | | | | | |
   1396 +-+-+-+-+-+-+-+-+
   1397 | | | | | |Q| | |
   1398 +-+-+-+-+-+-+-+-+
   1399 | | | | | | | |Q|
   1400 +-+-+-+-+-+-+-+-+
   1401 | | |Q| | | | | |
   1402 +-+-+-+-+-+-+-+-+
   1403 | | | | | | |Q| |
   1404 +-+-+-+-+-+-+-+-+
   1405 | | | |Q| | | | |
   1406 +-+-+-+-+-+-+-+-+
   1407 | |Q| | | | | | |
   1408 +-+-+-+-+-+-+-+-+
   1409 | | | | |Q| | | |
   1410 +-+-+-+-+-+-+-+-+
   1411 
   1412 >>> print count, "solutions in all."
   1413 92 solutions in all.
   1414 
   1415 And run a Knight's Tour on a 10x10 board.  Note that there are about
   1416 20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
   1417 
   1418 >>> k = Knights(10, 10)
   1419 >>> LIMIT = 2
   1420 >>> count = 0
   1421 >>> for x in k.solve():
   1422 ...     count += 1
   1423 ...     if count <= LIMIT:
   1424 ...         print "Solution", count
   1425 ...         k.printsolution(x)
   1426 ...     else:
   1427 ...         break
   1428 Solution 1
   1429 +---+---+---+---+---+---+---+---+---+---+
   1430 |  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
   1431 +---+---+---+---+---+---+---+---+---+---+
   1432 | 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
   1433 +---+---+---+---+---+---+---+---+---+---+
   1434 | 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
   1435 +---+---+---+---+---+---+---+---+---+---+
   1436 | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
   1437 +---+---+---+---+---+---+---+---+---+---+
   1438 | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
   1439 +---+---+---+---+---+---+---+---+---+---+
   1440 | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
   1441 +---+---+---+---+---+---+---+---+---+---+
   1442 | 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
   1443 +---+---+---+---+---+---+---+---+---+---+
   1444 | 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
   1445 +---+---+---+---+---+---+---+---+---+---+
   1446 | 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
   1447 +---+---+---+---+---+---+---+---+---+---+
   1448 | 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
   1449 +---+---+---+---+---+---+---+---+---+---+
   1450 Solution 2
   1451 +---+---+---+---+---+---+---+---+---+---+
   1452 |  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
   1453 +---+---+---+---+---+---+---+---+---+---+
   1454 | 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
   1455 +---+---+---+---+---+---+---+---+---+---+
   1456 | 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
   1457 +---+---+---+---+---+---+---+---+---+---+
   1458 | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
   1459 +---+---+---+---+---+---+---+---+---+---+
   1460 | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
   1461 +---+---+---+---+---+---+---+---+---+---+
   1462 | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
   1463 +---+---+---+---+---+---+---+---+---+---+
   1464 | 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
   1465 +---+---+---+---+---+---+---+---+---+---+
   1466 | 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
   1467 +---+---+---+---+---+---+---+---+---+---+
   1468 | 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
   1469 +---+---+---+---+---+---+---+---+---+---+
   1470 | 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
   1471 +---+---+---+---+---+---+---+---+---+---+
   1472 """
   1473 
   1474 weakref_tests = """\
   1475 Generators are weakly referencable:
   1476 
   1477 >>> import weakref
   1478 >>> def gen():
   1479 ...     yield 'foo!'
   1480 ...
   1481 >>> wr = weakref.ref(gen)
   1482 >>> wr() is gen
   1483 True
   1484 >>> p = weakref.proxy(gen)
   1485 
   1486 Generator-iterators are weakly referencable as well:
   1487 
   1488 >>> gi = gen()
   1489 >>> wr = weakref.ref(gi)
   1490 >>> wr() is gi
   1491 True
   1492 >>> p = weakref.proxy(gi)
   1493 >>> list(p)
   1494 ['foo!']
   1495 
   1496 """
   1497 
   1498 coroutine_tests = """\
   1499 Sending a value into a started generator:
   1500 
   1501 >>> def f():
   1502 ...     print (yield 1)
   1503 ...     yield 2
   1504 >>> g = f()
   1505 >>> g.next()
   1506 1
   1507 >>> g.send(42)
   1508 42
   1509 2
   1510 
   1511 Sending a value into a new generator produces a TypeError:
   1512 
   1513 >>> f().send("foo")
   1514 Traceback (most recent call last):
   1515 ...
   1516 TypeError: can't send non-None value to a just-started generator
   1517 
   1518 
   1519 Yield by itself yields None:
   1520 
   1521 >>> def f(): yield
   1522 >>> list(f())
   1523 [None]
   1524 
   1525 
   1526 
   1527 An obscene abuse of a yield expression within a generator expression:
   1528 
   1529 >>> list((yield 21) for i in range(4))
   1530 [21, None, 21, None, 21, None, 21, None]
   1531 
   1532 And a more sane, but still weird usage:
   1533 
   1534 >>> def f(): list(i for i in [(yield 26)])
   1535 >>> type(f())
   1536 <type 'generator'>
   1537 
   1538 
   1539 A yield expression with augmented assignment.
   1540 
   1541 >>> def coroutine(seq):
   1542 ...     count = 0
   1543 ...     while count < 200:
   1544 ...         count += yield
   1545 ...         seq.append(count)
   1546 >>> seq = []
   1547 >>> c = coroutine(seq)
   1548 >>> c.next()
   1549 >>> print seq
   1550 []
   1551 >>> c.send(10)
   1552 >>> print seq
   1553 [10]
   1554 >>> c.send(10)
   1555 >>> print seq
   1556 [10, 20]
   1557 >>> c.send(10)
   1558 >>> print seq
   1559 [10, 20, 30]
   1560 
   1561 
   1562 Check some syntax errors for yield expressions:
   1563 
   1564 >>> f=lambda: (yield 1),(yield 2)
   1565 Traceback (most recent call last):
   1566   ...
   1567   File "<doctest test.test_generators.__test__.coroutine[21]>", line 1
   1568 SyntaxError: 'yield' outside function
   1569 
   1570 >>> def f(): return lambda x=(yield): 1
   1571 Traceback (most recent call last):
   1572   ...
   1573 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.coroutine[22]>, line 1)
   1574 
   1575 >>> def f(): x = yield = y
   1576 Traceback (most recent call last):
   1577   ...
   1578   File "<doctest test.test_generators.__test__.coroutine[23]>", line 1
   1579 SyntaxError: assignment to yield expression not possible
   1580 
   1581 >>> def f(): (yield bar) = y
   1582 Traceback (most recent call last):
   1583   ...
   1584   File "<doctest test.test_generators.__test__.coroutine[24]>", line 1
   1585 SyntaxError: can't assign to yield expression
   1586 
   1587 >>> def f(): (yield bar) += y
   1588 Traceback (most recent call last):
   1589   ...
   1590   File "<doctest test.test_generators.__test__.coroutine[25]>", line 1
   1591 SyntaxError: can't assign to yield expression
   1592 
   1593 
   1594 Now check some throw() conditions:
   1595 
   1596 >>> def f():
   1597 ...     while True:
   1598 ...         try:
   1599 ...             print (yield)
   1600 ...         except ValueError,v:
   1601 ...             print "caught ValueError (%s)" % (v),
   1602 >>> import sys
   1603 >>> g = f()
   1604 >>> g.next()
   1605 
   1606 >>> g.throw(ValueError) # type only
   1607 caught ValueError ()
   1608 
   1609 >>> g.throw(ValueError("xyz"))  # value only
   1610 caught ValueError (xyz)
   1611 
   1612 >>> g.throw(ValueError, ValueError(1))   # value+matching type
   1613 caught ValueError (1)
   1614 
   1615 >>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
   1616 caught ValueError (1)
   1617 
   1618 >>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
   1619 caught ValueError (1)
   1620 
   1621 >>> g.throw(ValueError(1), "foo")       # bad args
   1622 Traceback (most recent call last):
   1623   ...
   1624 TypeError: instance exception may not have a separate value
   1625 
   1626 >>> g.throw(ValueError, "foo", 23)      # bad args
   1627 Traceback (most recent call last):
   1628   ...
   1629 TypeError: throw() third argument must be a traceback object
   1630 
   1631 >>> def throw(g,exc):
   1632 ...     try:
   1633 ...         raise exc
   1634 ...     except:
   1635 ...         g.throw(*sys.exc_info())
   1636 >>> throw(g,ValueError) # do it with traceback included
   1637 caught ValueError ()
   1638 
   1639 >>> g.send(1)
   1640 1
   1641 
   1642 >>> throw(g,TypeError)  # terminate the generator
   1643 Traceback (most recent call last):
   1644   ...
   1645 TypeError
   1646 
   1647 >>> print g.gi_frame
   1648 None
   1649 
   1650 >>> g.send(2)
   1651 Traceback (most recent call last):
   1652   ...
   1653 StopIteration
   1654 
   1655 >>> g.throw(ValueError,6)       # throw on closed generator
   1656 Traceback (most recent call last):
   1657   ...
   1658 ValueError: 6
   1659 
   1660 >>> f().throw(ValueError,7)     # throw on just-opened generator
   1661 Traceback (most recent call last):
   1662   ...
   1663 ValueError: 7
   1664 
   1665 >>> f().throw("abc")     # throw on just-opened generator
   1666 Traceback (most recent call last):
   1667   ...
   1668 TypeError: exceptions must be classes, or instances, not str
   1669 
   1670 Now let's try closing a generator:
   1671 
   1672 >>> def f():
   1673 ...     try: yield
   1674 ...     except GeneratorExit:
   1675 ...         print "exiting"
   1676 
   1677 >>> g = f()
   1678 >>> g.next()
   1679 >>> g.close()
   1680 exiting
   1681 >>> g.close()  # should be no-op now
   1682 
   1683 >>> f().close()  # close on just-opened generator should be fine
   1684 
   1685 >>> def f(): yield      # an even simpler generator
   1686 >>> f().close()         # close before opening
   1687 >>> g = f()
   1688 >>> g.next()
   1689 >>> g.close()           # close normally
   1690 
   1691 And finalization:
   1692 
   1693 >>> def f():
   1694 ...     try: yield
   1695 ...     finally:
   1696 ...         print "exiting"
   1697 
   1698 >>> g = f()
   1699 >>> g.next()
   1700 >>> del g
   1701 exiting
   1702 
   1703 >>> class context(object):
   1704 ...    def __enter__(self): pass
   1705 ...    def __exit__(self, *args): print 'exiting'
   1706 >>> def f():
   1707 ...     with context():
   1708 ...          yield
   1709 >>> g = f()
   1710 >>> g.next()
   1711 >>> del g
   1712 exiting
   1713 
   1714 
   1715 GeneratorExit is not caught by except Exception:
   1716 
   1717 >>> def f():
   1718 ...     try: yield
   1719 ...     except Exception: print 'except'
   1720 ...     finally: print 'finally'
   1721 
   1722 >>> g = f()
   1723 >>> g.next()
   1724 >>> del g
   1725 finally
   1726 
   1727 
   1728 Now let's try some ill-behaved generators:
   1729 
   1730 >>> def f():
   1731 ...     try: yield
   1732 ...     except GeneratorExit:
   1733 ...         yield "foo!"
   1734 >>> g = f()
   1735 >>> g.next()
   1736 >>> g.close()
   1737 Traceback (most recent call last):
   1738   ...
   1739 RuntimeError: generator ignored GeneratorExit
   1740 >>> g.close()
   1741 
   1742 
   1743 Our ill-behaved code should be invoked during GC:
   1744 
   1745 >>> import sys, StringIO
   1746 >>> old, sys.stderr = sys.stderr, StringIO.StringIO()
   1747 >>> g = f()
   1748 >>> g.next()
   1749 >>> del g
   1750 >>> sys.stderr.getvalue().startswith(
   1751 ...     "Exception RuntimeError: 'generator ignored GeneratorExit' in "
   1752 ... )
   1753 True
   1754 >>> sys.stderr = old
   1755 
   1756 
   1757 And errors thrown during closing should propagate:
   1758 
   1759 >>> def f():
   1760 ...     try: yield
   1761 ...     except GeneratorExit:
   1762 ...         raise TypeError("fie!")
   1763 >>> g = f()
   1764 >>> g.next()
   1765 >>> g.close()
   1766 Traceback (most recent call last):
   1767   ...
   1768 TypeError: fie!
   1769 
   1770 
   1771 Ensure that various yield expression constructs make their
   1772 enclosing function a generator:
   1773 
   1774 >>> def f(): x += yield
   1775 >>> type(f())
   1776 <type 'generator'>
   1777 
   1778 >>> def f(): x = yield
   1779 >>> type(f())
   1780 <type 'generator'>
   1781 
   1782 >>> def f(): lambda x=(yield): 1
   1783 >>> type(f())
   1784 <type 'generator'>
   1785 
   1786 >>> def f(): x=(i for i in (yield) if (yield))
   1787 >>> type(f())
   1788 <type 'generator'>
   1789 
   1790 >>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
   1791 >>> data = [1,2]
   1792 >>> g = f(data)
   1793 >>> type(g)
   1794 <type 'generator'>
   1795 >>> g.send(None)
   1796 'a'
   1797 >>> data
   1798 [1, 2]
   1799 >>> g.send(0)
   1800 'b'
   1801 >>> data
   1802 [27, 2]
   1803 >>> try: g.send(1)
   1804 ... except StopIteration: pass
   1805 >>> data
   1806 [27, 27]
   1807 
   1808 """
   1809 
   1810 refleaks_tests = """
   1811 Prior to adding cycle-GC support to itertools.tee, this code would leak
   1812 references. We add it to the standard suite so the routine refleak-tests
   1813 would trigger if it starts being uncleanable again.
   1814 
   1815 >>> import itertools
   1816 >>> def leak():
   1817 ...     class gen:
   1818 ...         def __iter__(self):
   1819 ...             return self
   1820 ...         def next(self):
   1821 ...             return self.item
   1822 ...     g = gen()
   1823 ...     head, tail = itertools.tee(g)
   1824 ...     g.item = head
   1825 ...     return head
   1826 >>> it = leak()
   1827 
   1828 Make sure to also test the involvement of the tee-internal teedataobject,
   1829 which stores returned items.
   1830 
   1831 >>> item = it.next()
   1832 
   1833 
   1834 
   1835 This test leaked at one point due to generator finalization/destruction.
   1836 It was copied from Lib/test/leakers/test_generator_cycle.py before the file
   1837 was removed.
   1838 
   1839 >>> def leak():
   1840 ...    def gen():
   1841 ...        while True:
   1842 ...            yield g
   1843 ...    g = gen()
   1844 
   1845 >>> leak()
   1846 
   1847 
   1848 
   1849 This test isn't really generator related, but rather exception-in-cleanup
   1850 related. The coroutine tests (above) just happen to cause an exception in
   1851 the generator's __del__ (tp_del) method. We can also test for this
   1852 explicitly, without generators. We do have to redirect stderr to avoid
   1853 printing warnings and to doublecheck that we actually tested what we wanted
   1854 to test.
   1855 
   1856 >>> import sys, StringIO
   1857 >>> old = sys.stderr
   1858 >>> try:
   1859 ...     sys.stderr = StringIO.StringIO()
   1860 ...     class Leaker:
   1861 ...         def __del__(self):
   1862 ...             raise RuntimeError
   1863 ...
   1864 ...     l = Leaker()
   1865 ...     del l
   1866 ...     err = sys.stderr.getvalue().strip()
   1867 ...     err.startswith(
   1868 ...         "Exception RuntimeError: RuntimeError() in <"
   1869 ...     )
   1870 ...     err.endswith("> ignored")
   1871 ...     len(err.splitlines())
   1872 ... finally:
   1873 ...     sys.stderr = old
   1874 True
   1875 True
   1876 1
   1877 
   1878 
   1879 
   1880 These refleak tests should perhaps be in a testfile of their own,
   1881 test_generators just happened to be the test that drew these out.
   1882 
   1883 """
   1884 
   1885 __test__ = {"tut":      tutorial_tests,
   1886             "pep":      pep_tests,
   1887             "email":    email_tests,
   1888             "fun":      fun_tests,
   1889             "syntax":   syntax_tests,
   1890             "conjoin":  conjoin_tests,
   1891             "weakref":  weakref_tests,
   1892             "coroutine":  coroutine_tests,
   1893             "refleaks": refleaks_tests,
   1894             }
   1895 
   1896 # Magic test name that regrtest.py invokes *after* importing this module.

   1897 # This worms around a bootstrap problem.

   1898 # Note that doctest and regrtest both look in sys.argv for a "-v" argument,

   1899 # so this works as expected in both ways of running regrtest.

   1900 def test_main(verbose=None):
   1901     from test import test_support, test_generators
   1902     test_support.run_doctest(test_generators, verbose)
   1903 
   1904 # This part isn't needed for regrtest, but for running the test directly.

   1905 if __name__ == "__main__":
   1906     test_main(1)
   1907