Home | History | Annotate | Download | only in Python
      1 /* Peephole optimizations for bytecode compiler. */
      2 
      3 #include "Python.h"
      4 
      5 #include "Python-ast.h"
      6 #include "node.h"
      7 #include "pyarena.h"
      8 #include "ast.h"
      9 #include "code.h"
     10 #include "compile.h"
     11 #include "symtable.h"
     12 #include "opcode.h"
     13 
     14 #define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
     15 #define UNCONDITIONAL_JUMP(op)  (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
     16 #define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
     17     || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
     18 #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \
     19     || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
     20     || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
     21 #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
     22 #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
     23 #define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
     24 #define CODESIZE(op)  (HAS_ARG(op) ? 3 : 1)
     25 #define ISBASICBLOCK(blocks, start, bytes) \
     26     (blocks[start]==blocks[start+bytes-1])
     27 
     28 /* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
     29    with    LOAD_CONST (c1, c2, ... cn).
     30    The consts table must still be in list form so that the
     31    new constant (c1, c2, ... cn) can be appended.
     32    Called with codestr pointing to the first LOAD_CONST.
     33    Bails out with no change if one or more of the LOAD_CONSTs is missing.
     34    Also works for BUILD_LIST when followed by an "in" or "not in" test.
     35 */
     36 static int
     37 tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts)
     38 {
     39     PyObject *newconst, *constant;
     40     Py_ssize_t i, arg, len_consts;
     41 
     42     /* Pre-conditions */
     43     assert(PyList_CheckExact(consts));
     44     assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
     45     assert(GETARG(codestr, (n*3)) == n);
     46     for (i=0 ; i<n ; i++)
     47         assert(codestr[i*3] == LOAD_CONST);
     48 
     49     /* Buildup new tuple of constants */
     50     newconst = PyTuple_New(n);
     51     if (newconst == NULL)
     52         return 0;
     53     len_consts = PyList_GET_SIZE(consts);
     54     for (i=0 ; i<n ; i++) {
     55         arg = GETARG(codestr, (i*3));
     56         assert(arg < len_consts);
     57         constant = PyList_GET_ITEM(consts, arg);
     58         Py_INCREF(constant);
     59         PyTuple_SET_ITEM(newconst, i, constant);
     60     }
     61 
     62     /* Append folded constant onto consts */
     63     if (PyList_Append(consts, newconst)) {
     64         Py_DECREF(newconst);
     65         return 0;
     66     }
     67     Py_DECREF(newconst);
     68 
     69     /* Write NOPs over old LOAD_CONSTS and
     70        add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
     71     memset(codestr, NOP, n*3);
     72     codestr[n*3] = LOAD_CONST;
     73     SETARG(codestr, (n*3), len_consts);
     74     return 1;
     75 }
     76 
     77 /* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
     78    with    LOAD_CONST binop(c1,c2)
     79    The consts table must still be in list form so that the
     80    new constant can be appended.
     81    Called with codestr pointing to the first LOAD_CONST.
     82    Abandons the transformation if the folding fails (i.e.  1+'a').
     83    If the new constant is a sequence, only folds when the size
     84    is below a threshold value.  That keeps pyc files from
     85    becoming large in the presence of code like:  (None,)*1000.
     86 */
     87 static int
     88 fold_binops_on_constants(unsigned char *codestr, PyObject *consts)
     89 {
     90     PyObject *newconst, *v, *w;
     91     Py_ssize_t len_consts, size;
     92     int opcode;
     93 
     94     /* Pre-conditions */
     95     assert(PyList_CheckExact(consts));
     96     assert(codestr[0] == LOAD_CONST);
     97     assert(codestr[3] == LOAD_CONST);
     98 
     99     /* Create new constant */
    100     v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
    101     w = PyList_GET_ITEM(consts, GETARG(codestr, 3));
    102     opcode = codestr[6];
    103     switch (opcode) {
    104         case BINARY_POWER:
    105             newconst = PyNumber_Power(v, w, Py_None);
    106             break;
    107         case BINARY_MULTIPLY:
    108             newconst = PyNumber_Multiply(v, w);
    109             break;
    110         case BINARY_DIVIDE:
    111             /* Cannot fold this operation statically since
    112                the result can depend on the run-time presence
    113                of the -Qnew flag */
    114             return 0;
    115         case BINARY_TRUE_DIVIDE:
    116             newconst = PyNumber_TrueDivide(v, w);
    117             break;
    118         case BINARY_FLOOR_DIVIDE:
    119             newconst = PyNumber_FloorDivide(v, w);
    120             break;
    121         case BINARY_MODULO:
    122             newconst = PyNumber_Remainder(v, w);
    123             break;
    124         case BINARY_ADD:
    125             newconst = PyNumber_Add(v, w);
    126             break;
    127         case BINARY_SUBTRACT:
    128             newconst = PyNumber_Subtract(v, w);
    129             break;
    130         case BINARY_SUBSCR:
    131             newconst = PyObject_GetItem(v, w);
    132             /* #5057: if v is unicode, there might be differences between
    133                wide and narrow builds in cases like u'\U00012345'[0].
    134                Wide builds will return a non-BMP char, whereas narrow builds
    135                will return a surrogate.  In both the cases skip the
    136                optimization in order to produce compatible pycs.
    137              */
    138             if (newconst != NULL &&
    139                 PyUnicode_Check(v) && PyUnicode_Check(newconst)) {
    140                 Py_UNICODE ch = PyUnicode_AS_UNICODE(newconst)[0];
    141 #ifdef Py_UNICODE_WIDE
    142                 if (ch > 0xFFFF) {
    143 #else
    144                 if (ch >= 0xD800 && ch <= 0xDFFF) {
    145 #endif
    146                     Py_DECREF(newconst);
    147                     return 0;
    148                 }
    149             }
    150             break;
    151         case BINARY_LSHIFT:
    152             newconst = PyNumber_Lshift(v, w);
    153             break;
    154         case BINARY_RSHIFT:
    155             newconst = PyNumber_Rshift(v, w);
    156             break;
    157         case BINARY_AND:
    158             newconst = PyNumber_And(v, w);
    159             break;
    160         case BINARY_XOR:
    161             newconst = PyNumber_Xor(v, w);
    162             break;
    163         case BINARY_OR:
    164             newconst = PyNumber_Or(v, w);
    165             break;
    166         default:
    167             /* Called with an unknown opcode */
    168             PyErr_Format(PyExc_SystemError,
    169                  "unexpected binary operation %d on a constant",
    170                      opcode);
    171             return 0;
    172     }
    173     if (newconst == NULL) {
    174         PyErr_Clear();
    175         return 0;
    176     }
    177     size = PyObject_Size(newconst);
    178     if (size == -1)
    179         PyErr_Clear();
    180     else if (size > 20) {
    181         Py_DECREF(newconst);
    182         return 0;
    183     }
    184 
    185     /* Append folded constant into consts table */
    186     len_consts = PyList_GET_SIZE(consts);
    187     if (PyList_Append(consts, newconst)) {
    188         Py_DECREF(newconst);
    189         return 0;
    190     }
    191     Py_DECREF(newconst);
    192 
    193     /* Write NOP NOP NOP NOP LOAD_CONST newconst */
    194     memset(codestr, NOP, 4);
    195     codestr[4] = LOAD_CONST;
    196     SETARG(codestr, 4, len_consts);
    197     return 1;
    198 }
    199 
    200 static int
    201 fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)
    202 {
    203     PyObject *newconst=NULL, *v;
    204     Py_ssize_t len_consts;
    205     int opcode;
    206 
    207     /* Pre-conditions */
    208     assert(PyList_CheckExact(consts));
    209     assert(codestr[0] == LOAD_CONST);
    210 
    211     /* Create new constant */
    212     v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
    213     opcode = codestr[3];
    214     switch (opcode) {
    215         case UNARY_NEGATIVE:
    216             /* Preserve the sign of -0.0 */
    217             if (PyObject_IsTrue(v) == 1)
    218                 newconst = PyNumber_Negative(v);
    219             break;
    220         case UNARY_CONVERT:
    221             newconst = PyObject_Repr(v);
    222             break;
    223         case UNARY_INVERT:
    224             newconst = PyNumber_Invert(v);
    225             break;
    226         default:
    227             /* Called with an unknown opcode */
    228             PyErr_Format(PyExc_SystemError,
    229                  "unexpected unary operation %d on a constant",
    230                      opcode);
    231             return 0;
    232     }
    233     if (newconst == NULL) {
    234         PyErr_Clear();
    235         return 0;
    236     }
    237 
    238     /* Append folded constant into consts table */
    239     len_consts = PyList_GET_SIZE(consts);
    240     if (PyList_Append(consts, newconst)) {
    241         Py_DECREF(newconst);
    242         return 0;
    243     }
    244     Py_DECREF(newconst);
    245 
    246     /* Write NOP LOAD_CONST newconst */
    247     codestr[0] = NOP;
    248     codestr[1] = LOAD_CONST;
    249     SETARG(codestr, 1, len_consts);
    250     return 1;
    251 }
    252 
    253 static unsigned int *
    254 markblocks(unsigned char *code, Py_ssize_t len)
    255 {
    256     unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int));
    257     int i,j, opcode, blockcnt = 0;
    258 
    259     if (blocks == NULL) {
    260         PyErr_NoMemory();
    261         return NULL;
    262     }
    263     memset(blocks, 0, len*sizeof(int));
    264 
    265     /* Mark labels in the first pass */
    266     for (i=0 ; i<len ; i+=CODESIZE(opcode)) {
    267         opcode = code[i];
    268         switch (opcode) {
    269             case FOR_ITER:
    270             case JUMP_FORWARD:
    271             case JUMP_IF_FALSE_OR_POP:
    272             case JUMP_IF_TRUE_OR_POP:
    273             case POP_JUMP_IF_FALSE:
    274             case POP_JUMP_IF_TRUE:
    275             case JUMP_ABSOLUTE:
    276             case CONTINUE_LOOP:
    277             case SETUP_LOOP:
    278             case SETUP_EXCEPT:
    279             case SETUP_FINALLY:
    280             case SETUP_WITH:
    281                 j = GETJUMPTGT(code, i);
    282                 blocks[j] = 1;
    283                 break;
    284         }
    285     }
    286     /* Build block numbers in the second pass */
    287     for (i=0 ; i<len ; i++) {
    288         blockcnt += blocks[i];          /* increment blockcnt over labels */
    289         blocks[i] = blockcnt;
    290     }
    291     return blocks;
    292 }
    293 
    294 /* Perform basic peephole optimizations to components of a code object.
    295    The consts object should still be in list form to allow new constants
    296    to be appended.
    297 
    298    To keep the optimizer simple, it bails out (does nothing) for code
    299    containing extended arguments or that has a length over 32,700.  That
    300    allows us to avoid overflow and sign issues.  Likewise, it bails when
    301    the lineno table has complex encoding for gaps >= 255.
    302 
    303    Optimizations are restricted to simple transformations occuring within a
    304    single basic block.  All transformations keep the code size the same or
    305    smaller.  For those that reduce size, the gaps are initially filled with
    306    NOPs.  Later those NOPs are removed and the jump addresses retargeted in
    307    a single pass.  Line numbering is adjusted accordingly. */
    308 
    309 PyObject *
    310 PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
    311                 PyObject *lineno_obj)
    312 {
    313     Py_ssize_t i, j, codelen;
    314     int nops, h, adj;
    315     int tgt, tgttgt, opcode;
    316     unsigned char *codestr = NULL;
    317     unsigned char *lineno;
    318     int *addrmap = NULL;
    319     int new_line, cum_orig_line, last_line, tabsiz;
    320     int cumlc=0, lastlc=0;      /* Count runs of consecutive LOAD_CONSTs */
    321     unsigned int *blocks = NULL;
    322     char *name;
    323 
    324     /* Bail out if an exception is set */
    325     if (PyErr_Occurred())
    326         goto exitError;
    327 
    328     /* Bypass optimization when the lineno table is too complex */
    329     assert(PyString_Check(lineno_obj));
    330     lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);
    331     tabsiz = PyString_GET_SIZE(lineno_obj);
    332     if (memchr(lineno, 255, tabsiz) != NULL)
    333         goto exitUnchanged;
    334 
    335     /* Avoid situations where jump retargeting could overflow */
    336     assert(PyString_Check(code));
    337     codelen = PyString_GET_SIZE(code);
    338     if (codelen > 32700)
    339         goto exitUnchanged;
    340 
    341     /* Make a modifiable copy of the code string */
    342     codestr = (unsigned char *)PyMem_Malloc(codelen);
    343     if (codestr == NULL)
    344         goto exitError;
    345     codestr = (unsigned char *)memcpy(codestr,
    346                                       PyString_AS_STRING(code), codelen);
    347 
    348     /* Verify that RETURN_VALUE terminates the codestring.      This allows
    349        the various transformation patterns to look ahead several
    350        instructions without additional checks to make sure they are not
    351        looking beyond the end of the code string.
    352     */
    353     if (codestr[codelen-1] != RETURN_VALUE)
    354         goto exitUnchanged;
    355 
    356     /* Mapping to new jump targets after NOPs are removed */
    357     addrmap = (int *)PyMem_Malloc(codelen * sizeof(int));
    358     if (addrmap == NULL)
    359         goto exitError;
    360 
    361     blocks = markblocks(codestr, codelen);
    362     if (blocks == NULL)
    363         goto exitError;
    364     assert(PyList_Check(consts));
    365 
    366     for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
    367       reoptimize_current:
    368         opcode = codestr[i];
    369 
    370         lastlc = cumlc;
    371         cumlc = 0;
    372 
    373         switch (opcode) {
    374             /* Replace UNARY_NOT POP_JUMP_IF_FALSE
    375                with    POP_JUMP_IF_TRUE */
    376             case UNARY_NOT:
    377                 if (codestr[i+1] != POP_JUMP_IF_FALSE
    378                     || !ISBASICBLOCK(blocks,i,4))
    379                     continue;
    380                 j = GETARG(codestr, i+1);
    381                 codestr[i] = POP_JUMP_IF_TRUE;
    382                 SETARG(codestr, i, j);
    383                 codestr[i+3] = NOP;
    384                 goto reoptimize_current;
    385 
    386                 /* not a is b -->  a is not b
    387                    not a in b -->  a not in b
    388                    not a is not b -->  a is b
    389                    not a not in b -->  a in b
    390                 */
    391             case COMPARE_OP:
    392                 j = GETARG(codestr, i);
    393                 if (j < 6  ||  j > 9  ||
    394                     codestr[i+3] != UNARY_NOT  ||
    395                     !ISBASICBLOCK(blocks,i,4))
    396                     continue;
    397                 SETARG(codestr, i, (j^1));
    398                 codestr[i+3] = NOP;
    399                 break;
    400 
    401                 /* Replace LOAD_GLOBAL/LOAD_NAME None
    402                    with LOAD_CONST None */
    403             case LOAD_NAME:
    404             case LOAD_GLOBAL:
    405                 j = GETARG(codestr, i);
    406                 name = PyString_AsString(PyTuple_GET_ITEM(names, j));
    407                 if (name == NULL  ||  strcmp(name, "None") != 0)
    408                     continue;
    409                 for (j=0 ; j < PyList_GET_SIZE(consts) ; j++) {
    410                     if (PyList_GET_ITEM(consts, j) == Py_None)
    411                         break;
    412                 }
    413                 if (j == PyList_GET_SIZE(consts)) {
    414                     if (PyList_Append(consts, Py_None) == -1)
    415                         goto exitError;
    416                 }
    417                 assert(PyList_GET_ITEM(consts, j) == Py_None);
    418                 codestr[i] = LOAD_CONST;
    419                 SETARG(codestr, i, j);
    420                 cumlc = lastlc + 1;
    421                 break;
    422 
    423                 /* Skip over LOAD_CONST trueconst
    424                    POP_JUMP_IF_FALSE xx. This improves
    425                    "while 1" performance. */
    426             case LOAD_CONST:
    427                 cumlc = lastlc + 1;
    428                 j = GETARG(codestr, i);
    429                 if (codestr[i+3] != POP_JUMP_IF_FALSE  ||
    430                     !ISBASICBLOCK(blocks,i,6)  ||
    431                     !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))
    432                     continue;
    433                 memset(codestr+i, NOP, 6);
    434                 cumlc = 0;
    435                 break;
    436 
    437                 /* Try to fold tuples of constants (includes a case for lists
    438                    which are only used for "in" and "not in" tests).
    439                    Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
    440                    Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
    441                    Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
    442             case BUILD_TUPLE:
    443             case BUILD_LIST:
    444                 j = GETARG(codestr, i);
    445                 h = i - 3 * j;
    446                 if (h >= 0  &&
    447                     j <= lastlc                  &&
    448                     ((opcode == BUILD_TUPLE &&
    449                       ISBASICBLOCK(blocks, h, 3*(j+1))) ||
    450                      (opcode == BUILD_LIST &&
    451                       codestr[i+3]==COMPARE_OP &&
    452                       ISBASICBLOCK(blocks, h, 3*(j+2)) &&
    453                       (GETARG(codestr,i+3)==6 ||
    454                        GETARG(codestr,i+3)==7))) &&
    455                     tuple_of_constants(&codestr[h], j, consts)) {
    456                     assert(codestr[i] == LOAD_CONST);
    457                     cumlc = 1;
    458                     break;
    459                 }
    460                 if (codestr[i+3] != UNPACK_SEQUENCE  ||
    461                     !ISBASICBLOCK(blocks,i,6) ||
    462                     j != GETARG(codestr, i+3))
    463                     continue;
    464                 if (j == 1) {
    465                     memset(codestr+i, NOP, 6);
    466                 } else if (j == 2) {
    467                     codestr[i] = ROT_TWO;
    468                     memset(codestr+i+1, NOP, 5);
    469                 } else if (j == 3) {
    470                     codestr[i] = ROT_THREE;
    471                     codestr[i+1] = ROT_TWO;
    472                     memset(codestr+i+2, NOP, 4);
    473                 }
    474                 break;
    475 
    476                 /* Fold binary ops on constants.
    477                    LOAD_CONST c1 LOAD_CONST c2 BINOP -->  LOAD_CONST binop(c1,c2) */
    478             case BINARY_POWER:
    479             case BINARY_MULTIPLY:
    480             case BINARY_TRUE_DIVIDE:
    481             case BINARY_FLOOR_DIVIDE:
    482             case BINARY_MODULO:
    483             case BINARY_ADD:
    484             case BINARY_SUBTRACT:
    485             case BINARY_SUBSCR:
    486             case BINARY_LSHIFT:
    487             case BINARY_RSHIFT:
    488             case BINARY_AND:
    489             case BINARY_XOR:
    490             case BINARY_OR:
    491                 if (lastlc >= 2                  &&
    492                     ISBASICBLOCK(blocks, i-6, 7)  &&
    493                     fold_binops_on_constants(&codestr[i-6], consts)) {
    494                     i -= 2;
    495                     assert(codestr[i] == LOAD_CONST);
    496                     cumlc = 1;
    497                 }
    498                 break;
    499 
    500                 /* Fold unary ops on constants.
    501                    LOAD_CONST c1  UNARY_OP -->                  LOAD_CONST unary_op(c) */
    502             case UNARY_NEGATIVE:
    503             case UNARY_CONVERT:
    504             case UNARY_INVERT:
    505                 if (lastlc >= 1                  &&
    506                     ISBASICBLOCK(blocks, i-3, 4)  &&
    507                     fold_unaryops_on_constants(&codestr[i-3], consts))                  {
    508                     i -= 2;
    509                     assert(codestr[i] == LOAD_CONST);
    510                     cumlc = 1;
    511                 }
    512                 break;
    513 
    514                 /* Simplify conditional jump to conditional jump where the
    515                    result of the first test implies the success of a similar
    516                    test or the failure of the opposite test.
    517                    Arises in code like:
    518                    "if a and b:"
    519                    "if a or b:"
    520                    "a and b or c"
    521                    "(a and b) and c"
    522                    x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_FALSE_OR_POP z
    523                       -->  x:JUMP_IF_FALSE_OR_POP z
    524                    x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_TRUE_OR_POP z
    525                       -->  x:POP_JUMP_IF_FALSE y+3
    526                    where y+3 is the instruction following the second test.
    527                 */
    528             case JUMP_IF_FALSE_OR_POP:
    529             case JUMP_IF_TRUE_OR_POP:
    530                 tgt = GETJUMPTGT(codestr, i);
    531                 j = codestr[tgt];
    532                 if (CONDITIONAL_JUMP(j)) {
    533                     /* NOTE: all possible jumps here are
    534                        absolute! */
    535                     if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {
    536                         /* The second jump will be
    537                            taken iff the first is. */
    538                         tgttgt = GETJUMPTGT(codestr, tgt);
    539                         /* The current opcode inherits
    540                            its target's stack behaviour */
    541                         codestr[i] = j;
    542                         SETARG(codestr, i, tgttgt);
    543                         goto reoptimize_current;
    544                     } else {
    545                         /* The second jump is not taken
    546                            if the first is (so jump past
    547                            it), and all conditional
    548                            jumps pop their argument when
    549                            they're not taken (so change
    550                            the first jump to pop its
    551                            argument when it's taken). */
    552                         if (JUMPS_ON_TRUE(opcode))
    553                             codestr[i] = POP_JUMP_IF_TRUE;
    554                         else
    555                             codestr[i] = POP_JUMP_IF_FALSE;
    556                         SETARG(codestr, i, (tgt + 3));
    557                         goto reoptimize_current;
    558                     }
    559                 }
    560                 /* Intentional fallthrough */
    561 
    562                 /* Replace jumps to unconditional jumps */
    563             case POP_JUMP_IF_FALSE:
    564             case POP_JUMP_IF_TRUE:
    565             case FOR_ITER:
    566             case JUMP_FORWARD:
    567             case JUMP_ABSOLUTE:
    568             case CONTINUE_LOOP:
    569             case SETUP_LOOP:
    570             case SETUP_EXCEPT:
    571             case SETUP_FINALLY:
    572             case SETUP_WITH:
    573                 tgt = GETJUMPTGT(codestr, i);
    574                 /* Replace JUMP_* to a RETURN into just a RETURN */
    575                 if (UNCONDITIONAL_JUMP(opcode) &&
    576                     codestr[tgt] == RETURN_VALUE) {
    577                     codestr[i] = RETURN_VALUE;
    578                     memset(codestr+i+1, NOP, 2);
    579                     continue;
    580                 }
    581                 if (!UNCONDITIONAL_JUMP(codestr[tgt]))
    582                     continue;
    583                 tgttgt = GETJUMPTGT(codestr, tgt);
    584                 if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */
    585                     opcode = JUMP_ABSOLUTE;
    586                 if (!ABSOLUTE_JUMP(opcode))
    587                     tgttgt -= i + 3;     /* Calc relative jump addr */
    588                 if (tgttgt < 0)                           /* No backward relative jumps */
    589                     continue;
    590                 codestr[i] = opcode;
    591                 SETARG(codestr, i, tgttgt);
    592                 break;
    593 
    594             case EXTENDED_ARG:
    595                 goto exitUnchanged;
    596 
    597                 /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
    598                 /* Remove unreachable JUMPs after RETURN */
    599             case RETURN_VALUE:
    600                 if (i+4 >= codelen)
    601                     continue;
    602                 if (codestr[i+4] == RETURN_VALUE &&
    603                     ISBASICBLOCK(blocks,i,5))
    604                     memset(codestr+i+1, NOP, 4);
    605                 else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&
    606                          ISBASICBLOCK(blocks,i,4))
    607                     memset(codestr+i+1, NOP, 3);
    608                 break;
    609         }
    610     }
    611 
    612     /* Fixup linenotab */
    613     for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
    614         addrmap[i] = i - nops;
    615         if (codestr[i] == NOP)
    616             nops++;
    617     }
    618     cum_orig_line = 0;
    619     last_line = 0;
    620     for (i=0 ; i < tabsiz ; i+=2) {
    621         cum_orig_line += lineno[i];
    622         new_line = addrmap[cum_orig_line];
    623         assert (new_line - last_line < 255);
    624         lineno[i] =((unsigned char)(new_line - last_line));
    625         last_line = new_line;
    626     }
    627 
    628     /* Remove NOPs and fixup jump targets */
    629     for (i=0, h=0 ; i<codelen ; ) {
    630         opcode = codestr[i];
    631         switch (opcode) {
    632             case NOP:
    633                 i++;
    634                 continue;
    635 
    636             case JUMP_ABSOLUTE:
    637             case CONTINUE_LOOP:
    638             case POP_JUMP_IF_FALSE:
    639             case POP_JUMP_IF_TRUE:
    640             case JUMP_IF_FALSE_OR_POP:
    641             case JUMP_IF_TRUE_OR_POP:
    642                 j = addrmap[GETARG(codestr, i)];
    643                 SETARG(codestr, i, j);
    644                 break;
    645 
    646             case FOR_ITER:
    647             case JUMP_FORWARD:
    648             case SETUP_LOOP:
    649             case SETUP_EXCEPT:
    650             case SETUP_FINALLY:
    651             case SETUP_WITH:
    652                 j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;
    653                 SETARG(codestr, i, j);
    654                 break;
    655         }
    656         adj = CODESIZE(opcode);
    657         while (adj--)
    658             codestr[h++] = codestr[i++];
    659     }
    660     assert(h + nops == codelen);
    661 
    662     code = PyString_FromStringAndSize((char *)codestr, h);
    663     PyMem_Free(addrmap);
    664     PyMem_Free(codestr);
    665     PyMem_Free(blocks);
    666     return code;
    667 
    668  exitError:
    669     code = NULL;
    670 
    671  exitUnchanged:
    672     if (blocks != NULL)
    673         PyMem_Free(blocks);
    674     if (addrmap != NULL)
    675         PyMem_Free(addrmap);
    676     if (codestr != NULL)
    677         PyMem_Free(codestr);
    678     Py_XINCREF(code);
    679     return code;
    680 }
    681