Home | History | Annotate | Download | only in include
      1 /* Inline functions for tree-flow.h
      2    Copyright (C) 2001, 2003, 2005, 2006, 2007, 2008 Free Software
      3    Foundation, Inc.
      4    Contributed by Diego Novillo <dnovillo (at) redhat.com>
      5 
      6 This file is part of GCC.
      7 
      8 GCC is free software; you can redistribute it and/or modify
      9 it under the terms of the GNU General Public License as published by
     10 the Free Software Foundation; either version 3, or (at your option)
     11 any later version.
     12 
     13 GCC is distributed in the hope that it will be useful,
     14 but WITHOUT ANY WARRANTY; without even the implied warranty of
     15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     16 GNU General Public License for more details.
     17 
     18 You should have received a copy of the GNU General Public License
     19 along with GCC; see the file COPYING3.  If not see
     20 <http://www.gnu.org/licenses/>.  */
     21 
     22 #ifndef _TREE_FLOW_INLINE_H
     23 #define _TREE_FLOW_INLINE_H 1
     24 
     25 /* Inline functions for manipulating various data structures defined in
     26    tree-flow.h.  See tree-flow.h for documentation.  */
     27 
     28 /* Return true when gimple SSA form was built.
     29    gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
     30    infrastructure is initialized.  Check for presence of the datastructures
     31    at first place.  */
     32 static inline bool
     33 gimple_in_ssa_p (const struct function *fun)
     34 {
     35   return fun && fun->gimple_df && fun->gimple_df->in_ssa_p;
     36 }
     37 
     38 /* 'true' after aliases have been computed (see compute_may_aliases).  */
     39 static inline bool
     40 gimple_aliases_computed_p (const struct function *fun)
     41 {
     42   gcc_assert (fun && fun->gimple_df);
     43   return fun->gimple_df->aliases_computed_p;
     44 }
     45 
     46 /* Addressable variables in the function.  If bit I is set, then
     47    REFERENCED_VARS (I) has had its address taken.  Note that
     48    CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related.  An
     49    addressable variable is not necessarily call-clobbered (e.g., a
     50    local addressable whose address does not escape) and not all
     51    call-clobbered variables are addressable (e.g., a local static
     52    variable).  */
     53 static inline bitmap
     54 gimple_addressable_vars (const struct function *fun)
     55 {
     56   gcc_assert (fun && fun->gimple_df);
     57   return fun->gimple_df->addressable_vars;
     58 }
     59 
     60 /* Call clobbered variables in the function.  If bit I is set, then
     61    REFERENCED_VARS (I) is call-clobbered.  */
     62 static inline bitmap
     63 gimple_call_clobbered_vars (const struct function *fun)
     64 {
     65   gcc_assert (fun && fun->gimple_df);
     66   return fun->gimple_df->call_clobbered_vars;
     67 }
     68 
     69 /* Call-used variables in the function.  If bit I is set, then
     70    REFERENCED_VARS (I) is call-used at pure function call-sites.  */
     71 static inline bitmap
     72 gimple_call_used_vars (const struct function *fun)
     73 {
     74   gcc_assert (fun && fun->gimple_df);
     75   return fun->gimple_df->call_used_vars;
     76 }
     77 
     78 /* Array of all variables referenced in the function.  */
     79 static inline htab_t
     80 gimple_referenced_vars (const struct function *fun)
     81 {
     82   if (!fun->gimple_df)
     83     return NULL;
     84   return fun->gimple_df->referenced_vars;
     85 }
     86 
     87 /* Artificial variable used to model the effects of function calls.  */
     88 static inline tree
     89 gimple_global_var (const struct function *fun)
     90 {
     91   gcc_assert (fun && fun->gimple_df);
     92   return fun->gimple_df->global_var;
     93 }
     94 
     95 /* Artificial variable used to model the effects of nonlocal
     96    variables.  */
     97 static inline tree
     98 gimple_nonlocal_all (const struct function *fun)
     99 {
    100   gcc_assert (fun && fun->gimple_df);
    101   return fun->gimple_df->nonlocal_all;
    102 }
    103 
    104 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
    105 
    106 static inline void *
    107 first_htab_element (htab_iterator *hti, htab_t table)
    108 {
    109   hti->htab = table;
    110   hti->slot = table->entries;
    111   hti->limit = hti->slot + htab_size (table);
    112   do
    113     {
    114       PTR x = *(hti->slot);
    115       if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
    116 	break;
    117     } while (++(hti->slot) < hti->limit);
    118 
    119   if (hti->slot < hti->limit)
    120     return *(hti->slot);
    121   return NULL;
    122 }
    123 
    124 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
    125    or NULL if we have  reached the end.  */
    126 
    127 static inline bool
    128 end_htab_p (const htab_iterator *hti)
    129 {
    130   if (hti->slot >= hti->limit)
    131     return true;
    132   return false;
    133 }
    134 
    135 /* Advance the hashtable iterator pointed to by HTI to the next element of the
    136    hashtable.  */
    137 
    138 static inline void *
    139 next_htab_element (htab_iterator *hti)
    140 {
    141   while (++(hti->slot) < hti->limit)
    142     {
    143       PTR x = *(hti->slot);
    144       if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
    145 	return x;
    146     };
    147   return NULL;
    148 }
    149 
    150 /* Initialize ITER to point to the first referenced variable in the
    151    referenced_vars hashtable, and return that variable.  */
    152 
    153 static inline tree
    154 first_referenced_var (referenced_var_iterator *iter)
    155 {
    156   return (tree) first_htab_element (&iter->hti,
    157 				    gimple_referenced_vars (cfun));
    158 }
    159 
    160 /* Return true if we have hit the end of the referenced variables ITER is
    161    iterating through.  */
    162 
    163 static inline bool
    164 end_referenced_vars_p (const referenced_var_iterator *iter)
    165 {
    166   return end_htab_p (&iter->hti);
    167 }
    168 
    169 /* Make ITER point to the next referenced_var in the referenced_var hashtable,
    170    and return that variable.  */
    171 
    172 static inline tree
    173 next_referenced_var (referenced_var_iterator *iter)
    174 {
    175   return (tree) next_htab_element (&iter->hti);
    176 }
    177 
    178 /* Fill up VEC with the variables in the referenced vars hashtable.  */
    179 
    180 static inline void
    181 fill_referenced_var_vec (VEC (tree, heap) **vec)
    182 {
    183   referenced_var_iterator rvi;
    184   tree var;
    185   *vec = NULL;
    186   FOR_EACH_REFERENCED_VAR (var, rvi)
    187     VEC_safe_push (tree, heap, *vec, var);
    188 }
    189 
    190 /* Return the variable annotation for T, which must be a _DECL node.
    191    Return NULL if the variable annotation doesn't already exist.  */
    192 static inline var_ann_t
    193 var_ann (const_tree t)
    194 {
    195   var_ann_t ann;
    196 
    197   if (!t->base.ann)
    198     return NULL;
    199   ann = (var_ann_t) t->base.ann;
    200 
    201   gcc_assert (ann->common.type == VAR_ANN);
    202 
    203   return ann;
    204 }
    205 
    206 /* Return the variable annotation for T, which must be a _DECL node.
    207    Create the variable annotation if it doesn't exist.  */
    208 static inline var_ann_t
    209 get_var_ann (tree var)
    210 {
    211   var_ann_t ann = var_ann (var);
    212   return (ann) ? ann : create_var_ann (var);
    213 }
    214 
    215 /* Return the function annotation for T, which must be a FUNCTION_DECL node.
    216    Return NULL if the function annotation doesn't already exist.  */
    217 static inline function_ann_t
    218 function_ann (const_tree t)
    219 {
    220   gcc_assert (t);
    221   gcc_assert (TREE_CODE (t) == FUNCTION_DECL);
    222   gcc_assert (!t->base.ann
    223 	      || t->base.ann->common.type == FUNCTION_ANN);
    224 
    225   return (function_ann_t) t->base.ann;
    226 }
    227 
    228 /* Return the function annotation for T, which must be a FUNCTION_DECL node.
    229    Create the function annotation if it doesn't exist.  */
    230 static inline function_ann_t
    231 get_function_ann (tree var)
    232 {
    233   function_ann_t ann = function_ann (var);
    234   gcc_assert (!var->base.ann || var->base.ann->common.type == FUNCTION_ANN);
    235   return (ann) ? ann : create_function_ann (var);
    236 }
    237 
    238 /* Get the number of the next statement uid to be allocated.  */
    239 static inline unsigned int
    240 gimple_stmt_max_uid (struct function *fn)
    241 {
    242   return fn->last_stmt_uid;
    243 }
    244 
    245 /* Set the number of the next statement uid to be allocated.  */
    246 static inline void
    247 set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
    248 {
    249   fn->last_stmt_uid = maxid;
    250 }
    251 
    252 /* Set the number of the next statement uid to be allocated.  */
    253 static inline unsigned int
    254 inc_gimple_stmt_max_uid (struct function *fn)
    255 {
    256   return fn->last_stmt_uid++;
    257 }
    258 
    259 /* Return the annotation type for annotation ANN.  */
    260 static inline enum tree_ann_type
    261 ann_type (tree_ann_t ann)
    262 {
    263   return ann->common.type;
    264 }
    265 
    266 /* Return the may_aliases bitmap for variable VAR, or NULL if it has
    267    no may aliases.  */
    268 static inline bitmap
    269 may_aliases (const_tree var)
    270 {
    271   return MTAG_ALIASES (var);
    272 }
    273 
    274 /* Return the line number for EXPR, or return -1 if we have no line
    275    number information for it.  */
    276 static inline int
    277 get_lineno (const_gimple stmt)
    278 {
    279   location_t loc;
    280 
    281   if (!stmt)
    282     return -1;
    283 
    284   loc = gimple_location (stmt);
    285   if (loc == UNKNOWN_LOCATION)
    286     return -1;
    287 
    288   return LOCATION_LINE (loc);
    289 }
    290 
    291 /* Delink an immediate_uses node from its chain.  */
    292 static inline void
    293 delink_imm_use (ssa_use_operand_t *linknode)
    294 {
    295   /* Return if this node is not in a list.  */
    296   if (linknode->prev == NULL)
    297     return;
    298 
    299   linknode->prev->next = linknode->next;
    300   linknode->next->prev = linknode->prev;
    301   linknode->prev = NULL;
    302   linknode->next = NULL;
    303 }
    304 
    305 /* Link ssa_imm_use node LINKNODE into the chain for LIST.  */
    306 static inline void
    307 link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
    308 {
    309   /* Link the new node at the head of the list.  If we are in the process of
    310      traversing the list, we won't visit any new nodes added to it.  */
    311   linknode->prev = list;
    312   linknode->next = list->next;
    313   list->next->prev = linknode;
    314   list->next = linknode;
    315 }
    316 
    317 /* Link ssa_imm_use node LINKNODE into the chain for DEF.  */
    318 static inline void
    319 link_imm_use (ssa_use_operand_t *linknode, tree def)
    320 {
    321   ssa_use_operand_t *root;
    322 
    323   if (!def || TREE_CODE (def) != SSA_NAME)
    324     linknode->prev = NULL;
    325   else
    326     {
    327       root = &(SSA_NAME_IMM_USE_NODE (def));
    328 #ifdef ENABLE_CHECKING
    329       if (linknode->use)
    330         gcc_assert (*(linknode->use) == def);
    331 #endif
    332       link_imm_use_to_list (linknode, root);
    333     }
    334 }
    335 
    336 /* Set the value of a use pointed to by USE to VAL.  */
    337 static inline void
    338 set_ssa_use_from_ptr (use_operand_p use, tree val)
    339 {
    340   delink_imm_use (use);
    341   *(use->use) = val;
    342   link_imm_use (use, val);
    343 }
    344 
    345 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
    346    in STMT.  */
    347 static inline void
    348 link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
    349 {
    350   if (stmt)
    351     link_imm_use (linknode, def);
    352   else
    353     link_imm_use (linknode, NULL);
    354   linknode->loc.stmt = stmt;
    355 }
    356 
    357 /* Relink a new node in place of an old node in the list.  */
    358 static inline void
    359 relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
    360 {
    361   /* The node one had better be in the same list.  */
    362   gcc_assert (*(old->use) == *(node->use));
    363   node->prev = old->prev;
    364   node->next = old->next;
    365   if (old->prev)
    366     {
    367       old->prev->next = node;
    368       old->next->prev = node;
    369       /* Remove the old node from the list.  */
    370       old->prev = NULL;
    371     }
    372 }
    373 
    374 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
    375    in STMT.  */
    376 static inline void
    377 relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
    378 		     gimple stmt)
    379 {
    380   if (stmt)
    381     relink_imm_use (linknode, old);
    382   else
    383     link_imm_use (linknode, NULL);
    384   linknode->loc.stmt = stmt;
    385 }
    386 
    387 
    388 /* Return true is IMM has reached the end of the immediate use list.  */
    389 static inline bool
    390 end_readonly_imm_use_p (const imm_use_iterator *imm)
    391 {
    392   return (imm->imm_use == imm->end_p);
    393 }
    394 
    395 /* Initialize iterator IMM to process the list for VAR.  */
    396 static inline use_operand_p
    397 first_readonly_imm_use (imm_use_iterator *imm, tree var)
    398 {
    399   gcc_assert (TREE_CODE (var) == SSA_NAME);
    400 
    401   imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
    402   imm->imm_use = imm->end_p->next;
    403 #ifdef ENABLE_CHECKING
    404   imm->iter_node.next = imm->imm_use->next;
    405 #endif
    406   if (end_readonly_imm_use_p (imm))
    407     return NULL_USE_OPERAND_P;
    408   return imm->imm_use;
    409 }
    410 
    411 /* Bump IMM to the next use in the list.  */
    412 static inline use_operand_p
    413 next_readonly_imm_use (imm_use_iterator *imm)
    414 {
    415   use_operand_p old = imm->imm_use;
    416 
    417 #ifdef ENABLE_CHECKING
    418   /* If this assertion fails, it indicates the 'next' pointer has changed
    419      since the last bump.  This indicates that the list is being modified
    420      via stmt changes, or SET_USE, or somesuch thing, and you need to be
    421      using the SAFE version of the iterator.  */
    422   gcc_assert (imm->iter_node.next == old->next);
    423   imm->iter_node.next = old->next->next;
    424 #endif
    425 
    426   imm->imm_use = old->next;
    427   if (end_readonly_imm_use_p (imm))
    428     return NULL_USE_OPERAND_P;
    429   return imm->imm_use;
    430 }
    431 
    432 /* Return true if VAR has no uses.  */
    433 static inline bool
    434 has_zero_uses (const_tree var)
    435 {
    436   const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
    437   /* A single use means there is no items in the list.  */
    438   return (ptr == ptr->next);
    439 }
    440 
    441 /* Return true if VAR has a single use.  */
    442 static inline bool
    443 has_single_use (const_tree var)
    444 {
    445   const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
    446   /* A single use means there is one item in the list.  */
    447   return (ptr != ptr->next && ptr == ptr->next->next);
    448 }
    449 
    450 
    451 /* If VAR has only a single immediate use, return true, and set USE_P and STMT
    452    to the use pointer and stmt of occurrence.  */
    453 static inline bool
    454 single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
    455 {
    456   const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
    457   if (ptr != ptr->next && ptr == ptr->next->next)
    458     {
    459       *use_p = ptr->next;
    460       *stmt = ptr->next->loc.stmt;
    461       return true;
    462     }
    463   *use_p = NULL_USE_OPERAND_P;
    464   *stmt = NULL;
    465   return false;
    466 }
    467 
    468 /* Return the number of immediate uses of VAR.  */
    469 static inline unsigned int
    470 num_imm_uses (const_tree var)
    471 {
    472   const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
    473   const ssa_use_operand_t *ptr;
    474   unsigned int num = 0;
    475 
    476   for (ptr = start->next; ptr != start; ptr = ptr->next)
    477      num++;
    478 
    479   return num;
    480 }
    481 
    482 /* Return the tree pointed-to by USE.  */
    483 static inline tree
    484 get_use_from_ptr (use_operand_p use)
    485 {
    486   return *(use->use);
    487 }
    488 
    489 /* Return the tree pointed-to by DEF.  */
    490 static inline tree
    491 get_def_from_ptr (def_operand_p def)
    492 {
    493   return *def;
    494 }
    495 
    496 /* Return a use_operand_p pointer for argument I of PHI node GS.  */
    497 
    498 static inline use_operand_p
    499 gimple_phi_arg_imm_use_ptr (gimple gs, int i)
    500 {
    501   return &gimple_phi_arg (gs, i)->imm_use;
    502 }
    503 
    504 /* Return the tree operand for argument I of PHI node GS.  */
    505 
    506 static inline tree
    507 gimple_phi_arg_def (gimple gs, size_t index)
    508 {
    509   struct phi_arg_d *pd = gimple_phi_arg (gs, index);
    510   return get_use_from_ptr (&pd->imm_use);
    511 }
    512 
    513 /* Return a pointer to the tree operand for argument I of PHI node GS.  */
    514 
    515 static inline tree *
    516 gimple_phi_arg_def_ptr (gimple gs, size_t index)
    517 {
    518   return &gimple_phi_arg (gs, index)->def;
    519 }
    520 
    521 /* Return the edge associated with argument I of phi node GS.  */
    522 
    523 static inline edge
    524 gimple_phi_arg_edge (gimple gs, size_t i)
    525 {
    526   return EDGE_PRED (gimple_bb (gs), i);
    527 }
    528 
    529 /* Return the source location of gimple argument I of phi node GS.  */
    530 
    531 static inline source_location
    532 gimple_phi_arg_location (gimple gs, size_t i)
    533 {
    534   return gimple_phi_arg (gs, i)->locus;
    535 }
    536 
    537 /* Return the source location of the argument on edge E of phi node GS.  */
    538 
    539 static inline source_location
    540 gimple_phi_arg_location_from_edge (gimple gs, edge e)
    541 {
    542   return gimple_phi_arg (gs, e->dest_idx)->locus;
    543 }
    544 
    545 /* Set the source location of gimple argument I of phi node GS to LOC.  */
    546 
    547 static inline void
    548 gimple_phi_arg_set_location (gimple gs, size_t i, source_location loc)
    549 {
    550   gimple_phi_arg (gs, i)->locus = loc;
    551 }
    552 
    553 /* Return TRUE if argument I of phi node GS has a location record.  */
    554 
    555 static inline bool
    556 gimple_phi_arg_has_location (gimple gs, size_t i)
    557 {
    558   return gimple_phi_arg_location (gs, i) != UNKNOWN_LOCATION;
    559 }
    560 
    561 
    562 /* Return the PHI nodes for basic block BB, or NULL if there are no
    563    PHI nodes.  */
    564 static inline gimple_seq
    565 phi_nodes (const_basic_block bb)
    566 {
    567   gcc_assert (!(bb->flags & BB_RTL));
    568   if (!bb->il.gimple)
    569     return NULL;
    570   return bb->il.gimple->phi_nodes;
    571 }
    572 
    573 /* Set PHI nodes of a basic block BB to SEQ.  */
    574 
    575 static inline void
    576 set_phi_nodes (basic_block bb, gimple_seq seq)
    577 {
    578   gimple_stmt_iterator i;
    579 
    580   gcc_assert (!(bb->flags & BB_RTL));
    581   bb->il.gimple->phi_nodes = seq;
    582   if (seq)
    583     for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
    584       gimple_set_bb (gsi_stmt (i), bb);
    585 }
    586 
    587 /* Return the phi argument which contains the specified use.  */
    588 
    589 static inline int
    590 phi_arg_index_from_use (use_operand_p use)
    591 {
    592   struct phi_arg_d *element, *root;
    593   size_t index;
    594   gimple phi;
    595 
    596   /* Since the use is the first thing in a PHI argument element, we can
    597      calculate its index based on casting it to an argument, and performing
    598      pointer arithmetic.  */
    599 
    600   phi = USE_STMT (use);
    601   gcc_assert (gimple_code (phi) == GIMPLE_PHI);
    602 
    603   element = (struct phi_arg_d *)use;
    604   root = gimple_phi_arg (phi, 0);
    605   index = element - root;
    606 
    607 #ifdef ENABLE_CHECKING
    608   /* Make sure the calculation doesn't have any leftover bytes.  If it does,
    609      then imm_use is likely not the first element in phi_arg_d.  */
    610   gcc_assert (
    611 	  (((char *)element - (char *)root) % sizeof (struct phi_arg_d)) == 0);
    612   gcc_assert (index < gimple_phi_capacity (phi));
    613 #endif
    614 
    615  return index;
    616 }
    617 
    618 /* Mark VAR as used, so that it'll be preserved during rtl expansion.  */
    619 
    620 static inline void
    621 set_is_used (tree var)
    622 {
    623   var_ann_t ann = get_var_ann (var);
    624   ann->used = 1;
    625 }
    626 
    627 
    628 /* Return true if T (assumed to be a DECL) is a global variable.  */
    629 
    630 static inline bool
    631 is_global_var (const_tree t)
    632 {
    633   if (MTAG_P (t))
    634     return MTAG_GLOBAL (t);
    635   else
    636     return (TREE_STATIC (t) || DECL_EXTERNAL (t));
    637 }
    638 
    639 /* PHI nodes should contain only ssa_names and invariants.  A test
    640    for ssa_name is definitely simpler; don't let invalid contents
    641    slip in in the meantime.  */
    642 
    643 static inline bool
    644 phi_ssa_name_p (const_tree t)
    645 {
    646   if (TREE_CODE (t) == SSA_NAME)
    647     return true;
    648 #ifdef ENABLE_CHECKING
    649   gcc_assert (is_gimple_min_invariant (t));
    650 #endif
    651   return false;
    652 }
    653 
    654 
    655 /* Returns the loop of the statement STMT.  */
    656 
    657 static inline struct loop *
    658 loop_containing_stmt (gimple stmt)
    659 {
    660   basic_block bb = gimple_bb (stmt);
    661   if (!bb)
    662     return NULL;
    663 
    664   return bb->loop_father;
    665 }
    666 
    667 
    668 /* Return the memory partition tag associated with symbol SYM.  */
    669 
    670 static inline tree
    671 memory_partition (tree sym)
    672 {
    673   tree tag;
    674 
    675   /* MPTs belong to their own partition.  */
    676   if (TREE_CODE (sym) == MEMORY_PARTITION_TAG)
    677     return sym;
    678 
    679   gcc_assert (!is_gimple_reg (sym));
    680   /* Autoparallelization moves statements from the original function (which has
    681      aliases computed) to the new one (which does not).  When rebuilding
    682      operands for the statement in the new function, we do not want to
    683      record the memory partition tags of the original function.  */
    684   if (!gimple_aliases_computed_p (cfun))
    685     return NULL_TREE;
    686   tag = get_var_ann (sym)->mpt;
    687 
    688 #if defined ENABLE_CHECKING
    689   if (tag)
    690     gcc_assert (TREE_CODE (tag) == MEMORY_PARTITION_TAG);
    691 #endif
    692 
    693   return tag;
    694 }
    695 
    696 /* Return true if NAME is a memory factoring SSA name (i.e., an SSA
    697    name for a memory partition.  */
    698 
    699 static inline bool
    700 factoring_name_p (const_tree name)
    701 {
    702   return TREE_CODE (SSA_NAME_VAR (name)) == MEMORY_PARTITION_TAG;
    703 }
    704 
    705 /* Return true if VAR is used by function calls.  */
    706 static inline bool
    707 is_call_used (const_tree var)
    708 {
    709   return (var_ann (var)->call_clobbered
    710 	  || bitmap_bit_p (gimple_call_used_vars (cfun), DECL_UID (var)));
    711 }
    712 
    713 /* Return true if VAR is clobbered by function calls.  */
    714 static inline bool
    715 is_call_clobbered (const_tree var)
    716 {
    717   return var_ann (var)->call_clobbered;
    718 }
    719 
    720 /* Mark variable VAR as being clobbered by function calls.  */
    721 static inline void
    722 mark_call_clobbered (tree var, unsigned int escape_type)
    723 {
    724   var_ann (var)->escape_mask |= escape_type;
    725   var_ann (var)->call_clobbered = true;
    726   bitmap_set_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
    727 }
    728 
    729 /* Clear the call-clobbered attribute from variable VAR.  */
    730 static inline void
    731 clear_call_clobbered (tree var)
    732 {
    733   var_ann_t ann = var_ann (var);
    734   ann->escape_mask = 0;
    735   if (MTAG_P (var))
    736     MTAG_GLOBAL (var) = 0;
    737   var_ann (var)->call_clobbered = false;
    738   bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
    739 }
    740 
    741 /* Return the common annotation for T.  Return NULL if the annotation
    742    doesn't already exist.  */
    743 static inline tree_ann_common_t
    744 tree_common_ann (const_tree t)
    745 {
    746   /* Watch out static variables with unshared annotations.  */
    747   if (DECL_P (t) && TREE_CODE (t) == VAR_DECL)
    748     return &var_ann (t)->common;
    749   return &t->base.ann->common;
    750 }
    751 
    752 /* Return a common annotation for T.  Create the constant annotation if it
    753    doesn't exist.  */
    754 static inline tree_ann_common_t
    755 get_tree_common_ann (tree t)
    756 {
    757   tree_ann_common_t ann = tree_common_ann (t);
    758   return (ann) ? ann : create_tree_common_ann (t);
    759 }
    760 
    761 /*  -----------------------------------------------------------------------  */
    762 
    763 /* The following set of routines are used to iterator over various type of
    764    SSA operands.  */
    765 
    766 /* Return true if PTR is finished iterating.  */
    767 static inline bool
    768 op_iter_done (const ssa_op_iter *ptr)
    769 {
    770   return ptr->done;
    771 }
    772 
    773 /* Get the next iterator use value for PTR.  */
    774 static inline use_operand_p
    775 op_iter_next_use (ssa_op_iter *ptr)
    776 {
    777   use_operand_p use_p;
    778 #ifdef ENABLE_CHECKING
    779   gcc_assert (ptr->iter_type == ssa_op_iter_use);
    780 #endif
    781   if (ptr->uses)
    782     {
    783       use_p = USE_OP_PTR (ptr->uses);
    784       ptr->uses = ptr->uses->next;
    785       return use_p;
    786     }
    787   if (ptr->vuses)
    788     {
    789       use_p = VUSE_OP_PTR (ptr->vuses, ptr->vuse_index);
    790       if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
    791         {
    792 	  ptr->vuse_index = 0;
    793 	  ptr->vuses = ptr->vuses->next;
    794 	}
    795       return use_p;
    796     }
    797   if (ptr->mayuses)
    798     {
    799       use_p = VDEF_OP_PTR (ptr->mayuses, ptr->mayuse_index);
    800       if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
    801         {
    802 	  ptr->mayuse_index = 0;
    803 	  ptr->mayuses = ptr->mayuses->next;
    804 	}
    805       return use_p;
    806     }
    807   if (ptr->phi_i < ptr->num_phi)
    808     {
    809       return PHI_ARG_DEF_PTR (ptr->phi_stmt, (ptr->phi_i)++);
    810     }
    811   ptr->done = true;
    812   return NULL_USE_OPERAND_P;
    813 }
    814 
    815 /* Get the next iterator def value for PTR.  */
    816 static inline def_operand_p
    817 op_iter_next_def (ssa_op_iter *ptr)
    818 {
    819   def_operand_p def_p;
    820 #ifdef ENABLE_CHECKING
    821   gcc_assert (ptr->iter_type == ssa_op_iter_def);
    822 #endif
    823   if (ptr->defs)
    824     {
    825       def_p = DEF_OP_PTR (ptr->defs);
    826       ptr->defs = ptr->defs->next;
    827       return def_p;
    828     }
    829   if (ptr->vdefs)
    830     {
    831       def_p = VDEF_RESULT_PTR (ptr->vdefs);
    832       ptr->vdefs = ptr->vdefs->next;
    833       return def_p;
    834     }
    835   ptr->done = true;
    836   return NULL_DEF_OPERAND_P;
    837 }
    838 
    839 /* Get the next iterator tree value for PTR.  */
    840 static inline tree
    841 op_iter_next_tree (ssa_op_iter *ptr)
    842 {
    843   tree val;
    844 #ifdef ENABLE_CHECKING
    845   gcc_assert (ptr->iter_type == ssa_op_iter_tree);
    846 #endif
    847   if (ptr->uses)
    848     {
    849       val = USE_OP (ptr->uses);
    850       ptr->uses = ptr->uses->next;
    851       return val;
    852     }
    853   if (ptr->vuses)
    854     {
    855       val = VUSE_OP (ptr->vuses, ptr->vuse_index);
    856       if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
    857         {
    858 	  ptr->vuse_index = 0;
    859 	  ptr->vuses = ptr->vuses->next;
    860 	}
    861       return val;
    862     }
    863   if (ptr->mayuses)
    864     {
    865       val = VDEF_OP (ptr->mayuses, ptr->mayuse_index);
    866       if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
    867         {
    868 	  ptr->mayuse_index = 0;
    869 	  ptr->mayuses = ptr->mayuses->next;
    870 	}
    871       return val;
    872     }
    873   if (ptr->defs)
    874     {
    875       val = DEF_OP (ptr->defs);
    876       ptr->defs = ptr->defs->next;
    877       return val;
    878     }
    879   if (ptr->vdefs)
    880     {
    881       val = VDEF_RESULT (ptr->vdefs);
    882       ptr->vdefs = ptr->vdefs->next;
    883       return val;
    884     }
    885 
    886   ptr->done = true;
    887   return NULL_TREE;
    888 
    889 }
    890 
    891 
    892 /* This functions clears the iterator PTR, and marks it done.  This is normally
    893    used to prevent warnings in the compile about might be uninitialized
    894    components.  */
    895 
    896 static inline void
    897 clear_and_done_ssa_iter (ssa_op_iter *ptr)
    898 {
    899   ptr->defs = NULL;
    900   ptr->uses = NULL;
    901   ptr->vuses = NULL;
    902   ptr->vdefs = NULL;
    903   ptr->mayuses = NULL;
    904   ptr->iter_type = ssa_op_iter_none;
    905   ptr->phi_i = 0;
    906   ptr->num_phi = 0;
    907   ptr->phi_stmt = NULL;
    908   ptr->done = true;
    909   ptr->vuse_index = 0;
    910   ptr->mayuse_index = 0;
    911 }
    912 
    913 /* Initialize the iterator PTR to the virtual defs in STMT.  */
    914 static inline void
    915 op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
    916 {
    917   ptr->defs = (flags & SSA_OP_DEF) ? gimple_def_ops (stmt) : NULL;
    918   ptr->uses = (flags & SSA_OP_USE) ? gimple_use_ops (stmt) : NULL;
    919   ptr->vuses = (flags & SSA_OP_VUSE) ? gimple_vuse_ops (stmt) : NULL;
    920   ptr->vdefs = (flags & SSA_OP_VDEF) ? gimple_vdef_ops (stmt) : NULL;
    921   ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? gimple_vdef_ops (stmt) : NULL;
    922   ptr->done = false;
    923 
    924   ptr->phi_i = 0;
    925   ptr->num_phi = 0;
    926   ptr->phi_stmt = NULL;
    927   ptr->vuse_index = 0;
    928   ptr->mayuse_index = 0;
    929 }
    930 
    931 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
    932    the first use.  */
    933 static inline use_operand_p
    934 op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
    935 {
    936   gcc_assert ((flags & SSA_OP_ALL_DEFS) == 0);
    937   op_iter_init (ptr, stmt, flags);
    938   ptr->iter_type = ssa_op_iter_use;
    939   return op_iter_next_use (ptr);
    940 }
    941 
    942 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
    943    the first def.  */
    944 static inline def_operand_p
    945 op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
    946 {
    947   gcc_assert ((flags & SSA_OP_ALL_USES) == 0);
    948   op_iter_init (ptr, stmt, flags);
    949   ptr->iter_type = ssa_op_iter_def;
    950   return op_iter_next_def (ptr);
    951 }
    952 
    953 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
    954    the first operand as a tree.  */
    955 static inline tree
    956 op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
    957 {
    958   op_iter_init (ptr, stmt, flags);
    959   ptr->iter_type = ssa_op_iter_tree;
    960   return op_iter_next_tree (ptr);
    961 }
    962 
    963 /* Get the next iterator mustdef value for PTR, returning the mustdef values in
    964    KILL and DEF.  */
    965 static inline void
    966 op_iter_next_vdef (vuse_vec_p *use, def_operand_p *def,
    967 			 ssa_op_iter *ptr)
    968 {
    969 #ifdef ENABLE_CHECKING
    970   gcc_assert (ptr->iter_type == ssa_op_iter_vdef);
    971 #endif
    972   if (ptr->mayuses)
    973     {
    974       *def = VDEF_RESULT_PTR (ptr->mayuses);
    975       *use = VDEF_VECT (ptr->mayuses);
    976       ptr->mayuses = ptr->mayuses->next;
    977       return;
    978     }
    979 
    980   *def = NULL_DEF_OPERAND_P;
    981   *use = NULL;
    982   ptr->done = true;
    983   return;
    984 }
    985 
    986 
    987 static inline void
    988 op_iter_next_mustdef (use_operand_p *use, def_operand_p *def,
    989 			 ssa_op_iter *ptr)
    990 {
    991   vuse_vec_p vp;
    992   op_iter_next_vdef (&vp, def, ptr);
    993   if (vp != NULL)
    994     {
    995       gcc_assert (VUSE_VECT_NUM_ELEM (*vp) == 1);
    996       *use = VUSE_ELEMENT_PTR (*vp, 0);
    997     }
    998   else
    999     *use = NULL_USE_OPERAND_P;
   1000 }
   1001 
   1002 /* Initialize iterator PTR to the operands in STMT.  Return the first operands
   1003    in USE and DEF.  */
   1004 static inline void
   1005 op_iter_init_vdef (ssa_op_iter *ptr, gimple stmt, vuse_vec_p *use,
   1006 		     def_operand_p *def)
   1007 {
   1008   gcc_assert (gimple_code (stmt) != GIMPLE_PHI);
   1009 
   1010   op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
   1011   ptr->iter_type = ssa_op_iter_vdef;
   1012   op_iter_next_vdef (use, def, ptr);
   1013 }
   1014 
   1015 
   1016 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
   1017    return NULL.  */
   1018 static inline tree
   1019 single_ssa_tree_operand (gimple stmt, int flags)
   1020 {
   1021   tree var;
   1022   ssa_op_iter iter;
   1023 
   1024   var = op_iter_init_tree (&iter, stmt, flags);
   1025   if (op_iter_done (&iter))
   1026     return NULL_TREE;
   1027   op_iter_next_tree (&iter);
   1028   if (op_iter_done (&iter))
   1029     return var;
   1030   return NULL_TREE;
   1031 }
   1032 
   1033 
   1034 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
   1035    return NULL.  */
   1036 static inline use_operand_p
   1037 single_ssa_use_operand (gimple stmt, int flags)
   1038 {
   1039   use_operand_p var;
   1040   ssa_op_iter iter;
   1041 
   1042   var = op_iter_init_use (&iter, stmt, flags);
   1043   if (op_iter_done (&iter))
   1044     return NULL_USE_OPERAND_P;
   1045   op_iter_next_use (&iter);
   1046   if (op_iter_done (&iter))
   1047     return var;
   1048   return NULL_USE_OPERAND_P;
   1049 }
   1050 
   1051 
   1052 
   1053 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
   1054    return NULL.  */
   1055 static inline def_operand_p
   1056 single_ssa_def_operand (gimple stmt, int flags)
   1057 {
   1058   def_operand_p var;
   1059   ssa_op_iter iter;
   1060 
   1061   var = op_iter_init_def (&iter, stmt, flags);
   1062   if (op_iter_done (&iter))
   1063     return NULL_DEF_OPERAND_P;
   1064   op_iter_next_def (&iter);
   1065   if (op_iter_done (&iter))
   1066     return var;
   1067   return NULL_DEF_OPERAND_P;
   1068 }
   1069 
   1070 
   1071 /* Return true if there are zero operands in STMT matching the type
   1072    given in FLAGS.  */
   1073 static inline bool
   1074 zero_ssa_operands (gimple stmt, int flags)
   1075 {
   1076   ssa_op_iter iter;
   1077 
   1078   op_iter_init_tree (&iter, stmt, flags);
   1079   return op_iter_done (&iter);
   1080 }
   1081 
   1082 
   1083 /* Return the number of operands matching FLAGS in STMT.  */
   1084 static inline int
   1085 num_ssa_operands (gimple stmt, int flags)
   1086 {
   1087   ssa_op_iter iter;
   1088   tree t;
   1089   int num = 0;
   1090 
   1091   FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
   1092     num++;
   1093   return num;
   1094 }
   1095 
   1096 
   1097 /* Delink all immediate_use information for STMT.  */
   1098 static inline void
   1099 delink_stmt_imm_use (gimple stmt)
   1100 {
   1101    ssa_op_iter iter;
   1102    use_operand_p use_p;
   1103 
   1104    if (ssa_operands_active ())
   1105      FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
   1106        delink_imm_use (use_p);
   1107 }
   1108 
   1109 
   1110 /* This routine will compare all the operands matching FLAGS in STMT1 to those
   1111    in STMT2.  TRUE is returned if they are the same.  STMTs can be NULL.  */
   1112 static inline bool
   1113 compare_ssa_operands_equal (gimple stmt1, gimple stmt2, int flags)
   1114 {
   1115   ssa_op_iter iter1, iter2;
   1116   tree op1 = NULL_TREE;
   1117   tree op2 = NULL_TREE;
   1118   bool look1, look2;
   1119 
   1120   if (stmt1 == stmt2)
   1121     return true;
   1122 
   1123   look1 = stmt1 != NULL;
   1124   look2 = stmt2 != NULL;
   1125 
   1126   if (look1)
   1127     {
   1128       op1 = op_iter_init_tree (&iter1, stmt1, flags);
   1129       if (!look2)
   1130         return op_iter_done (&iter1);
   1131     }
   1132   else
   1133     clear_and_done_ssa_iter (&iter1);
   1134 
   1135   if (look2)
   1136     {
   1137       op2 = op_iter_init_tree (&iter2, stmt2, flags);
   1138       if (!look1)
   1139         return op_iter_done (&iter2);
   1140     }
   1141   else
   1142     clear_and_done_ssa_iter (&iter2);
   1143 
   1144   while (!op_iter_done (&iter1) && !op_iter_done (&iter2))
   1145     {
   1146       if (op1 != op2)
   1147 	return false;
   1148       op1 = op_iter_next_tree (&iter1);
   1149       op2 = op_iter_next_tree (&iter2);
   1150     }
   1151 
   1152   return (op_iter_done (&iter1) && op_iter_done (&iter2));
   1153 }
   1154 
   1155 
   1156 /* If there is a single DEF in the PHI node which matches FLAG, return it.
   1157    Otherwise return NULL_DEF_OPERAND_P.  */
   1158 static inline tree
   1159 single_phi_def (gimple stmt, int flags)
   1160 {
   1161   tree def = PHI_RESULT (stmt);
   1162   if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
   1163     return def;
   1164   if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
   1165     return def;
   1166   return NULL_TREE;
   1167 }
   1168 
   1169 /* Initialize the iterator PTR for uses matching FLAGS in PHI.  FLAGS should
   1170    be either SSA_OP_USES or SSA_OP_VIRTUAL_USES.  */
   1171 static inline use_operand_p
   1172 op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
   1173 {
   1174   tree phi_def = gimple_phi_result (phi);
   1175   int comp;
   1176 
   1177   clear_and_done_ssa_iter (ptr);
   1178   ptr->done = false;
   1179 
   1180   gcc_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
   1181 
   1182   comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
   1183 
   1184   /* If the PHI node doesn't the operand type we care about, we're done.  */
   1185   if ((flags & comp) == 0)
   1186     {
   1187       ptr->done = true;
   1188       return NULL_USE_OPERAND_P;
   1189     }
   1190 
   1191   ptr->phi_stmt = phi;
   1192   ptr->num_phi = gimple_phi_num_args (phi);
   1193   ptr->iter_type = ssa_op_iter_use;
   1194   return op_iter_next_use (ptr);
   1195 }
   1196 
   1197 
   1198 /* Start an iterator for a PHI definition.  */
   1199 
   1200 static inline def_operand_p
   1201 op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
   1202 {
   1203   tree phi_def = PHI_RESULT (phi);
   1204   int comp;
   1205 
   1206   clear_and_done_ssa_iter (ptr);
   1207   ptr->done = false;
   1208 
   1209   gcc_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
   1210 
   1211   comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
   1212 
   1213   /* If the PHI node doesn't the operand type we care about, we're done.  */
   1214   if ((flags & comp) == 0)
   1215     {
   1216       ptr->done = true;
   1217       return NULL_USE_OPERAND_P;
   1218     }
   1219 
   1220   ptr->iter_type = ssa_op_iter_def;
   1221   /* The first call to op_iter_next_def will terminate the iterator since
   1222      all the fields are NULL.  Simply return the result here as the first and
   1223      therefore only result.  */
   1224   return PHI_RESULT_PTR (phi);
   1225 }
   1226 
   1227 /* Return true is IMM has reached the end of the immediate use stmt list.  */
   1228 
   1229 static inline bool
   1230 end_imm_use_stmt_p (const imm_use_iterator *imm)
   1231 {
   1232   return (imm->imm_use == imm->end_p);
   1233 }
   1234 
   1235 /* Finished the traverse of an immediate use stmt list IMM by removing the
   1236    placeholder node from the list.  */
   1237 
   1238 static inline void
   1239 end_imm_use_stmt_traverse (imm_use_iterator *imm)
   1240 {
   1241   delink_imm_use (&(imm->iter_node));
   1242 }
   1243 
   1244 /* Immediate use traversal of uses within a stmt require that all the
   1245    uses on a stmt be sequentially listed.  This routine is used to build up
   1246    this sequential list by adding USE_P to the end of the current list
   1247    currently delimited by HEAD and LAST_P.  The new LAST_P value is
   1248    returned.  */
   1249 
   1250 static inline use_operand_p
   1251 move_use_after_head (use_operand_p use_p, use_operand_p head,
   1252 		      use_operand_p last_p)
   1253 {
   1254   gcc_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
   1255   /* Skip head when we find it.  */
   1256   if (use_p != head)
   1257     {
   1258       /* If use_p is already linked in after last_p, continue.  */
   1259       if (last_p->next == use_p)
   1260 	last_p = use_p;
   1261       else
   1262 	{
   1263 	  /* Delink from current location, and link in at last_p.  */
   1264 	  delink_imm_use (use_p);
   1265 	  link_imm_use_to_list (use_p, last_p);
   1266 	  last_p = use_p;
   1267 	}
   1268     }
   1269   return last_p;
   1270 }
   1271 
   1272 
   1273 /* This routine will relink all uses with the same stmt as HEAD into the list
   1274    immediately following HEAD for iterator IMM.  */
   1275 
   1276 static inline void
   1277 link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
   1278 {
   1279   use_operand_p use_p;
   1280   use_operand_p last_p = head;
   1281   gimple head_stmt = USE_STMT (head);
   1282   tree use = USE_FROM_PTR (head);
   1283   ssa_op_iter op_iter;
   1284   int flag;
   1285 
   1286   /* Only look at virtual or real uses, depending on the type of HEAD.  */
   1287   flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
   1288 
   1289   if (gimple_code (head_stmt) == GIMPLE_PHI)
   1290     {
   1291       FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
   1292 	if (USE_FROM_PTR (use_p) == use)
   1293 	  last_p = move_use_after_head (use_p, head, last_p);
   1294     }
   1295   else
   1296     {
   1297       FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
   1298 	if (USE_FROM_PTR (use_p) == use)
   1299 	  last_p = move_use_after_head (use_p, head, last_p);
   1300     }
   1301   /* Link iter node in after last_p.  */
   1302   if (imm->iter_node.prev != NULL)
   1303     delink_imm_use (&imm->iter_node);
   1304   link_imm_use_to_list (&(imm->iter_node), last_p);
   1305 }
   1306 
   1307 /* Initialize IMM to traverse over uses of VAR.  Return the first statement.  */
   1308 static inline gimple
   1309 first_imm_use_stmt (imm_use_iterator *imm, tree var)
   1310 {
   1311   gcc_assert (TREE_CODE (var) == SSA_NAME);
   1312 
   1313   imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
   1314   imm->imm_use = imm->end_p->next;
   1315   imm->next_imm_name = NULL_USE_OPERAND_P;
   1316 
   1317   /* iter_node is used as a marker within the immediate use list to indicate
   1318      where the end of the current stmt's uses are.  Initialize it to NULL
   1319      stmt and use, which indicates a marker node.  */
   1320   imm->iter_node.prev = NULL_USE_OPERAND_P;
   1321   imm->iter_node.next = NULL_USE_OPERAND_P;
   1322   imm->iter_node.loc.stmt = NULL;
   1323   imm->iter_node.use = NULL_USE_OPERAND_P;
   1324 
   1325   if (end_imm_use_stmt_p (imm))
   1326     return NULL;
   1327 
   1328   link_use_stmts_after (imm->imm_use, imm);
   1329 
   1330   return USE_STMT (imm->imm_use);
   1331 }
   1332 
   1333 /* Bump IMM to the next stmt which has a use of var.  */
   1334 
   1335 static inline gimple
   1336 next_imm_use_stmt (imm_use_iterator *imm)
   1337 {
   1338   imm->imm_use = imm->iter_node.next;
   1339   if (end_imm_use_stmt_p (imm))
   1340     {
   1341       if (imm->iter_node.prev != NULL)
   1342 	delink_imm_use (&imm->iter_node);
   1343       return NULL;
   1344     }
   1345 
   1346   link_use_stmts_after (imm->imm_use, imm);
   1347   return USE_STMT (imm->imm_use);
   1348 }
   1349 
   1350 /* This routine will return the first use on the stmt IMM currently refers
   1351    to.  */
   1352 
   1353 static inline use_operand_p
   1354 first_imm_use_on_stmt (imm_use_iterator *imm)
   1355 {
   1356   imm->next_imm_name = imm->imm_use->next;
   1357   return imm->imm_use;
   1358 }
   1359 
   1360 /*  Return TRUE if the last use on the stmt IMM refers to has been visited.  */
   1361 
   1362 static inline bool
   1363 end_imm_use_on_stmt_p (const imm_use_iterator *imm)
   1364 {
   1365   return (imm->imm_use == &(imm->iter_node));
   1366 }
   1367 
   1368 /* Bump to the next use on the stmt IMM refers to, return NULL if done.  */
   1369 
   1370 static inline use_operand_p
   1371 next_imm_use_on_stmt (imm_use_iterator *imm)
   1372 {
   1373   imm->imm_use = imm->next_imm_name;
   1374   if (end_imm_use_on_stmt_p (imm))
   1375     return NULL_USE_OPERAND_P;
   1376   else
   1377     {
   1378       imm->next_imm_name = imm->imm_use->next;
   1379       return imm->imm_use;
   1380     }
   1381 }
   1382 
   1383 /* Return true if VAR cannot be modified by the program.  */
   1384 
   1385 static inline bool
   1386 unmodifiable_var_p (const_tree var)
   1387 {
   1388   if (TREE_CODE (var) == SSA_NAME)
   1389     var = SSA_NAME_VAR (var);
   1390 
   1391   if (MTAG_P (var))
   1392     return false;
   1393 
   1394   return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
   1395 }
   1396 
   1397 /* Return true if REF, an ARRAY_REF, has an INDIRECT_REF somewhere in it.  */
   1398 
   1399 static inline bool
   1400 array_ref_contains_indirect_ref (const_tree ref)
   1401 {
   1402   gcc_assert (TREE_CODE (ref) == ARRAY_REF);
   1403 
   1404   do {
   1405     ref = TREE_OPERAND (ref, 0);
   1406   } while (handled_component_p (ref));
   1407 
   1408   return TREE_CODE (ref) == INDIRECT_REF;
   1409 }
   1410 
   1411 /* Return true if REF, a handled component reference, has an ARRAY_REF
   1412    somewhere in it.  */
   1413 
   1414 static inline bool
   1415 ref_contains_array_ref (const_tree ref)
   1416 {
   1417   gcc_assert (handled_component_p (ref));
   1418 
   1419   do {
   1420     if (TREE_CODE (ref) == ARRAY_REF)
   1421       return true;
   1422     ref = TREE_OPERAND (ref, 0);
   1423   } while (handled_component_p (ref));
   1424 
   1425   return false;
   1426 }
   1427 
   1428 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
   1429    overlap.  SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
   1430    range is open-ended.  Otherwise return false.  */
   1431 
   1432 static inline bool
   1433 ranges_overlap_p (unsigned HOST_WIDE_INT pos1,
   1434 		  unsigned HOST_WIDE_INT size1,
   1435 		  unsigned HOST_WIDE_INT pos2,
   1436 		  unsigned HOST_WIDE_INT size2)
   1437 {
   1438   if (pos1 >= pos2
   1439       && (size2 == (unsigned HOST_WIDE_INT)-1
   1440 	  || pos1 < (pos2 + size2)))
   1441     return true;
   1442   if (pos2 >= pos1
   1443       && (size1 == (unsigned HOST_WIDE_INT)-1
   1444 	  || pos2 < (pos1 + size1)))
   1445     return true;
   1446 
   1447   return false;
   1448 }
   1449 
   1450 /* Return the memory tag associated with symbol SYM.  */
   1451 
   1452 static inline tree
   1453 symbol_mem_tag (tree sym)
   1454 {
   1455   tree tag = get_var_ann (sym)->symbol_mem_tag;
   1456 
   1457 #if defined ENABLE_CHECKING
   1458   if (tag)
   1459     gcc_assert (TREE_CODE (tag) == SYMBOL_MEMORY_TAG);
   1460 #endif
   1461 
   1462   return tag;
   1463 }
   1464 
   1465 
   1466 /* Set the memory tag associated with symbol SYM.  */
   1467 
   1468 static inline void
   1469 set_symbol_mem_tag (tree sym, tree tag)
   1470 {
   1471 #if defined ENABLE_CHECKING
   1472   if (tag)
   1473     gcc_assert (TREE_CODE (tag) == SYMBOL_MEMORY_TAG);
   1474 #endif
   1475 
   1476   get_var_ann (sym)->symbol_mem_tag = tag;
   1477 }
   1478 
   1479 /* Accessor to tree-ssa-operands.c caches.  */
   1480 static inline struct ssa_operands *
   1481 gimple_ssa_operands (const struct function *fun)
   1482 {
   1483   return &fun->gimple_df->ssa_operands;
   1484 }
   1485 
   1486 /* Map describing reference statistics for function FN.  */
   1487 static inline struct mem_ref_stats_d *
   1488 gimple_mem_ref_stats (const struct function *fn)
   1489 {
   1490   return &fn->gimple_df->mem_ref_stats;
   1491 }
   1492 
   1493 /* Given an edge_var_map V, return the PHI arg definition.  */
   1494 
   1495 static inline tree
   1496 redirect_edge_var_map_def (edge_var_map *v)
   1497 {
   1498   return v->def;
   1499 }
   1500 
   1501 /* Given an edge_var_map V, return the PHI result.  */
   1502 
   1503 static inline tree
   1504 redirect_edge_var_map_result (edge_var_map *v)
   1505 {
   1506   return v->result;
   1507 }
   1508 
   1509 /* Given an edge_var_map V, return the PHI arg location.  */
   1510 
   1511 static inline source_location
   1512 redirect_edge_var_map_location (edge_var_map *v)
   1513 {
   1514   return v->locus;
   1515 }
   1516 
   1517 
   1518 /* Return an SSA_NAME node for variable VAR defined in statement STMT
   1519    in function cfun.  */
   1520 
   1521 static inline tree
   1522 make_ssa_name (tree var, gimple stmt)
   1523 {
   1524   return make_ssa_name_fn (cfun, var, stmt);
   1525 }
   1526 
   1527 #endif /* _TREE_FLOW_INLINE_H  */
   1528