Home | History | Annotate | Download | only in include
      1 /* Natural loop functions
      2    Copyright (C) 1987-2013 Free Software Foundation, Inc.
      3 
      4 This file is part of GCC.
      5 
      6 GCC is free software; you can redistribute it and/or modify it under
      7 the terms of the GNU General Public License as published by the Free
      8 Software Foundation; either version 3, or (at your option) any later
      9 version.
     10 
     11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14 for more details.
     15 
     16 You should have received a copy of the GNU General Public License
     17 along with GCC; see the file COPYING3.  If not see
     18 <http://www.gnu.org/licenses/>.  */
     19 
     20 #ifndef GCC_CFGLOOP_H
     21 #define GCC_CFGLOOP_H
     22 
     23 #include "basic-block.h"
     24 #include "double-int.h"
     25 
     26 #include "bitmap.h"
     27 #include "sbitmap.h"
     28 
     29 /* Structure to hold decision about unrolling/peeling.  */
     30 enum lpt_dec
     31 {
     32   LPT_NONE,
     33   LPT_PEEL_COMPLETELY,
     34   LPT_PEEL_SIMPLE,
     35   LPT_UNROLL_CONSTANT,
     36   LPT_UNROLL_RUNTIME,
     37   LPT_UNROLL_STUPID
     38 };
     39 
     40 struct GTY (()) lpt_decision {
     41   enum lpt_dec decision;
     42   unsigned times;
     43 };
     44 
     45 /* The type of extend applied to an IV.  */
     46 enum iv_extend_code
     47 {
     48   IV_SIGN_EXTEND,
     49   IV_ZERO_EXTEND,
     50   IV_UNKNOWN_EXTEND
     51 };
     52 
     53 /* The structure describing a bound on number of iterations of a loop.  */
     54 
     55 struct GTY ((chain_next ("%h.next"))) nb_iter_bound {
     56   /* The statement STMT is executed at most ...  */
     57   gimple stmt;
     58 
     59   /* ... BOUND + 1 times (BOUND must be an unsigned constant).
     60      The + 1 is added for the following reasons:
     61 
     62      a) 0 would otherwise be unused, while we would need to care more about
     63         overflows (as MAX + 1 is sometimes produced as the estimate on number
     64 	of executions of STMT).
     65      b) it is consistent with the result of number_of_iterations_exit.  */
     66   double_int bound;
     67 
     68   /* True if the statement will cause the loop to be leaved the (at most)
     69      BOUND + 1-st time it is executed, that is, all the statements after it
     70      are executed at most BOUND times.  */
     71   bool is_exit;
     72 
     73   /* The next bound in the list.  */
     74   struct nb_iter_bound *next;
     75 };
     76 
     77 /* Description of the loop exit.  */
     78 
     79 struct GTY (()) loop_exit {
     80   /* The exit edge.  */
     81   edge e;
     82 
     83   /* Previous and next exit in the list of the exits of the loop.  */
     84   struct loop_exit *prev;
     85   struct loop_exit *next;
     86 
     87   /* Next element in the list of loops from that E exits.  */
     88   struct loop_exit *next_e;
     89 };
     90 
     91 typedef struct loop *loop_p;
     92 
     93 /* An integer estimation of the number of iterations.  Estimate_state
     94    describes what is the state of the estimation.  */
     95 enum loop_estimation
     96 {
     97   /* Estimate was not computed yet.  */
     98   EST_NOT_COMPUTED,
     99   /* Estimate is ready.  */
    100   EST_AVAILABLE
    101 };
    102 
    103 /* Structure to hold information for each natural loop.  */
    104 struct GTY ((chain_next ("%h.next"))) loop {
    105   /* Index into loops array.  */
    106   int num;
    107 
    108   /* Number of loop insns.  */
    109   unsigned ninsns;
    110 
    111   /* Basic block of loop header.  */
    112   basic_block header;
    113 
    114   /* Basic block of loop latch.  */
    115   basic_block latch;
    116 
    117   /* For loop unrolling/peeling decision.  */
    118   struct lpt_decision lpt_decision;
    119 
    120   /* Average number of executed insns per iteration.  */
    121   unsigned av_ninsns;
    122 
    123   /* Number of blocks contained within the loop.  */
    124   unsigned num_nodes;
    125 
    126   /* Superloops of the loop, starting with the outermost loop.  */
    127   vec<loop_p, va_gc> *superloops;
    128 
    129   /* The first inner (child) loop or NULL if innermost loop.  */
    130   struct loop *inner;
    131 
    132   /* Link to the next (sibling) loop.  */
    133   struct loop *next;
    134 
    135   /* Auxiliary info specific to a pass.  */
    136   PTR GTY ((skip (""))) aux;
    137 
    138   /* The number of times the latch of the loop is executed.  This can be an
    139      INTEGER_CST, or a symbolic expression representing the number of
    140      iterations like "N - 1", or a COND_EXPR containing the runtime
    141      conditions under which the number of iterations is non zero.
    142 
    143      Don't access this field directly: number_of_latch_executions
    144      computes and caches the computed information in this field.  */
    145   tree nb_iterations;
    146 
    147   /* An integer guaranteed to be greater or equal to nb_iterations.  Only
    148      valid if any_upper_bound is true.  */
    149   double_int nb_iterations_upper_bound;
    150 
    151   /* An integer giving an estimate on nb_iterations.  Unlike
    152      nb_iterations_upper_bound, there is no guarantee that it is at least
    153      nb_iterations.  */
    154   double_int nb_iterations_estimate;
    155 
    156   bool any_upper_bound;
    157   bool any_estimate;
    158 
    159   /* True if the loop can be parallel.  */
    160   bool can_be_parallel;
    161 
    162   /* True if -Waggressive-loop-optimizations warned about this loop
    163      already.  */
    164   bool warned_aggressive_loop_optimizations;
    165 
    166   /* An integer estimation of the number of iterations.  Estimate_state
    167      describes what is the state of the estimation.  */
    168   enum loop_estimation estimate_state;
    169 
    170   /* Upper bound on number of iterations of a loop.  */
    171   struct nb_iter_bound *bounds;
    172 
    173   /* Head of the cyclic list of the exits of the loop.  */
    174   struct loop_exit *exits;
    175 };
    176 
    177 /* Flags for state of loop structure.  */
    178 enum
    179 {
    180   LOOPS_HAVE_PREHEADERS = 1,
    181   LOOPS_HAVE_SIMPLE_LATCHES = 2,
    182   LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS = 4,
    183   LOOPS_HAVE_RECORDED_EXITS = 8,
    184   LOOPS_MAY_HAVE_MULTIPLE_LATCHES = 16,
    185   LOOP_CLOSED_SSA = 32,
    186   LOOPS_NEED_FIXUP = 64,
    187   LOOPS_HAVE_FALLTHRU_PREHEADERS = 128
    188 };
    189 
    190 #define LOOPS_NORMAL (LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES \
    191 		      | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)
    192 #define AVOID_CFG_MODIFICATIONS (LOOPS_MAY_HAVE_MULTIPLE_LATCHES)
    193 
    194 /* Structure to hold CFG information about natural loops within a function.  */
    195 struct GTY (()) loops {
    196   /* State of loops.  */
    197   int state;
    198 
    199   /* Array of the loops.  */
    200   vec<loop_p, va_gc> *larray;
    201 
    202   /* Maps edges to the list of their descriptions as loop exits.  Edges
    203      whose sources or destinations have loop_father == NULL (which may
    204      happen during the cfg manipulations) should not appear in EXITS.  */
    205   htab_t GTY((param_is (struct loop_exit))) exits;
    206 
    207   /* Pointer to root of loop hierarchy tree.  */
    208   struct loop *tree_root;
    209 };
    210 
    211 /* Loop recognition.  */
    212 bool bb_loop_header_p (basic_block);
    213 extern struct loops *flow_loops_find (struct loops *);
    214 extern void disambiguate_loops_with_multiple_latches (void);
    215 extern void flow_loops_free (struct loops *);
    216 extern void flow_loops_dump (FILE *,
    217 			     void (*)(const struct loop *, FILE *, int), int);
    218 extern void flow_loop_dump (const struct loop *, FILE *,
    219 			    void (*)(const struct loop *, FILE *, int), int);
    220 struct loop *alloc_loop (void);
    221 extern void flow_loop_free (struct loop *);
    222 int flow_loop_nodes_find (basic_block, struct loop *);
    223 unsigned fix_loop_structure (bitmap changed_bbs);
    224 bool mark_irreducible_loops (void);
    225 void release_recorded_exits (void);
    226 void record_loop_exits (void);
    227 void rescan_loop_exit (edge, bool, bool);
    228 
    229 /* Loop data structure manipulation/querying.  */
    230 extern void flow_loop_tree_node_add (struct loop *, struct loop *);
    231 extern void flow_loop_tree_node_remove (struct loop *);
    232 extern void add_loop (struct loop *, struct loop *);
    233 extern bool flow_loop_nested_p	(const struct loop *, const struct loop *);
    234 extern bool flow_bb_inside_loop_p (const struct loop *, const_basic_block);
    235 extern struct loop * find_common_loop (struct loop *, struct loop *);
    236 struct loop *superloop_at_depth (struct loop *, unsigned);
    237 struct eni_weights_d;
    238 extern unsigned tree_num_loop_insns (struct loop *, struct eni_weights_d *);
    239 extern int num_loop_insns (const struct loop *);
    240 extern int average_num_loop_insns (const struct loop *);
    241 extern unsigned get_loop_level (const struct loop *);
    242 extern bool loop_exit_edge_p (const struct loop *, const_edge);
    243 extern bool loop_exits_to_bb_p (struct loop *, basic_block);
    244 extern bool loop_exits_from_bb_p (struct loop *, basic_block);
    245 extern void mark_loop_exit_edges (void);
    246 extern location_t get_loop_location (struct loop *loop);
    247 
    248 /* Loops & cfg manipulation.  */
    249 extern basic_block *get_loop_body (const struct loop *);
    250 extern unsigned get_loop_body_with_size (const struct loop *, basic_block *,
    251 					 unsigned);
    252 extern basic_block *get_loop_body_in_dom_order (const struct loop *);
    253 extern basic_block *get_loop_body_in_bfs_order (const struct loop *);
    254 extern basic_block *get_loop_body_in_custom_order (const struct loop *,
    255 			       int (*) (const void *, const void *));
    256 
    257 extern vec<edge> get_loop_exit_edges (const struct loop *);
    258 extern edge single_exit (const struct loop *);
    259 extern edge single_likely_exit (struct loop *loop);
    260 extern unsigned num_loop_branches (const struct loop *);
    261 
    262 extern edge loop_preheader_edge (const struct loop *);
    263 extern edge loop_latch_edge (const struct loop *);
    264 
    265 extern void add_bb_to_loop (basic_block, struct loop *);
    266 extern void remove_bb_from_loops (basic_block);
    267 
    268 extern void cancel_loop_tree (struct loop *);
    269 extern void delete_loop (struct loop *);
    270 
    271 enum
    272 {
    273   CP_SIMPLE_PREHEADERS = 1,
    274   CP_FALLTHRU_PREHEADERS = 2
    275 };
    276 
    277 basic_block create_preheader (struct loop *, int);
    278 extern void create_preheaders (int);
    279 extern void force_single_succ_latches (void);
    280 
    281 extern void verify_loop_structure (void);
    282 
    283 /* Loop analysis.  */
    284 extern bool just_once_each_iteration_p (const struct loop *, const_basic_block);
    285 gcov_type expected_loop_iterations_unbounded (const struct loop *);
    286 extern unsigned expected_loop_iterations (const struct loop *);
    287 extern rtx doloop_condition_get (rtx);
    288 
    289 void estimate_numbers_of_iterations_loop (struct loop *);
    290 void record_niter_bound (struct loop *, double_int, bool, bool);
    291 bool estimated_loop_iterations (struct loop *, double_int *);
    292 bool max_loop_iterations (struct loop *, double_int *);
    293 HOST_WIDE_INT estimated_loop_iterations_int (struct loop *);
    294 HOST_WIDE_INT max_loop_iterations_int (struct loop *);
    295 bool max_stmt_executions (struct loop *, double_int *);
    296 bool estimated_stmt_executions (struct loop *, double_int *);
    297 HOST_WIDE_INT max_stmt_executions_int (struct loop *);
    298 HOST_WIDE_INT estimated_stmt_executions_int (struct loop *);
    299 
    300 /* Loop manipulation.  */
    301 extern bool can_duplicate_loop_p (const struct loop *loop);
    302 
    303 #define DLTHE_FLAG_UPDATE_FREQ	1	/* Update frequencies in
    304 					   duplicate_loop_to_header_edge.  */
    305 #define DLTHE_RECORD_COPY_NUMBER 2	/* Record copy number in the aux
    306 					   field of newly create BB.  */
    307 #define DLTHE_FLAG_COMPLETTE_PEEL 4	/* Update frequencies expecting
    308 					   a complete peeling.  */
    309 
    310 extern edge create_empty_if_region_on_edge (edge, tree);
    311 extern struct loop *create_empty_loop_on_edge (edge, tree, tree, tree, tree,
    312 					       tree *, tree *, struct loop *);
    313 extern struct loop * duplicate_loop (struct loop *, struct loop *);
    314 extern void copy_loop_info (struct loop *loop, struct loop *target);
    315 extern void duplicate_subloops (struct loop *, struct loop *);
    316 extern bool duplicate_loop_to_header_edge (struct loop *, edge,
    317 					   unsigned, sbitmap, edge,
    318  					   vec<edge> *, int);
    319 extern struct loop *loopify (edge, edge,
    320 			     basic_block, edge, edge, bool,
    321 			     unsigned, unsigned);
    322 struct loop * loop_version (struct loop *, void *,
    323 			    basic_block *, unsigned, unsigned, unsigned, bool);
    324 extern bool remove_path (edge);
    325 extern void unloop (struct loop *, bool *, bitmap);
    326 extern void scale_loop_frequencies (struct loop *, int, int);
    327 
    328 /* Induction variable analysis.  */
    329 
    330 /* The description of induction variable.  The things are a bit complicated
    331    due to need to handle subregs and extends.  The value of the object described
    332    by it can be obtained as follows (all computations are done in extend_mode):
    333 
    334    Value in i-th iteration is
    335      delta + mult * extend_{extend_mode} (subreg_{mode} (base + i * step)).
    336 
    337    If first_special is true, the value in the first iteration is
    338      delta + mult * base
    339 
    340    If extend = UNKNOWN, first_special must be false, delta 0, mult 1 and value is
    341      subreg_{mode} (base + i * step)
    342 
    343    The get_iv_value function can be used to obtain these expressions.
    344 
    345    ??? Add a third mode field that would specify the mode in that inner
    346    computation is done, which would enable it to be different from the
    347    outer one?  */
    348 
    349 struct rtx_iv
    350 {
    351   /* Its base and step (mode of base and step is supposed to be extend_mode,
    352      see the description above).  */
    353   rtx base, step;
    354 
    355   /* The type of extend applied to it (IV_SIGN_EXTEND, IV_ZERO_EXTEND,
    356      or IV_UNKNOWN_EXTEND).  */
    357   enum iv_extend_code extend;
    358 
    359   /* Operations applied in the extended mode.  */
    360   rtx delta, mult;
    361 
    362   /* The mode it is extended to.  */
    363   enum machine_mode extend_mode;
    364 
    365   /* The mode the variable iterates in.  */
    366   enum machine_mode mode;
    367 
    368   /* Whether the first iteration needs to be handled specially.  */
    369   unsigned first_special : 1;
    370 };
    371 
    372 /* The description of an exit from the loop and of the number of iterations
    373    till we take the exit.  */
    374 
    375 struct niter_desc
    376 {
    377   /* The edge out of the loop.  */
    378   edge out_edge;
    379 
    380   /* The other edge leading from the condition.  */
    381   edge in_edge;
    382 
    383   /* True if we are able to say anything about number of iterations of the
    384      loop.  */
    385   bool simple_p;
    386 
    387   /* True if the loop iterates the constant number of times.  */
    388   bool const_iter;
    389 
    390   /* Number of iterations if constant.  */
    391   unsigned HOST_WIDEST_INT niter;
    392 
    393   /* Assumptions under that the rest of the information is valid.  */
    394   rtx assumptions;
    395 
    396   /* Assumptions under that the loop ends before reaching the latch,
    397      even if value of niter_expr says otherwise.  */
    398   rtx noloop_assumptions;
    399 
    400   /* Condition under that the loop is infinite.  */
    401   rtx infinite;
    402 
    403   /* Whether the comparison is signed.  */
    404   bool signed_p;
    405 
    406   /* The mode in that niter_expr should be computed.  */
    407   enum machine_mode mode;
    408 
    409   /* The number of iterations of the loop.  */
    410   rtx niter_expr;
    411 };
    412 
    413 extern void iv_analysis_loop_init (struct loop *);
    414 extern bool iv_analyze (rtx, rtx, struct rtx_iv *);
    415 extern bool iv_analyze_result (rtx, rtx, struct rtx_iv *);
    416 extern bool iv_analyze_expr (rtx, rtx, enum machine_mode, struct rtx_iv *);
    417 extern rtx get_iv_value (struct rtx_iv *, rtx);
    418 extern bool biv_p (rtx, rtx);
    419 extern void find_simple_exit (struct loop *, struct niter_desc *);
    420 extern void iv_analysis_done (void);
    421 
    422 extern struct niter_desc *get_simple_loop_desc (struct loop *loop);
    423 extern void free_simple_loop_desc (struct loop *loop);
    424 
    425 static inline struct niter_desc *
    426 simple_loop_desc (struct loop *loop)
    427 {
    428   return (struct niter_desc *) loop->aux;
    429 }
    430 
    431 /* Accessors for the loop structures.  */
    432 
    433 /* Returns the loop with index NUM from current_loops.  */
    434 
    435 static inline struct loop *
    436 get_loop (unsigned num)
    437 {
    438   return (*current_loops->larray)[num];
    439 }
    440 
    441 /* Returns the number of superloops of LOOP.  */
    442 
    443 static inline unsigned
    444 loop_depth (const struct loop *loop)
    445 {
    446   return vec_safe_length (loop->superloops);
    447 }
    448 
    449 /* Returns the loop depth of the loop BB belongs to.  */
    450 
    451 static inline int
    452 bb_loop_depth (const_basic_block bb)
    453 {
    454   return bb->loop_father ? loop_depth (bb->loop_father) : 0;
    455 }
    456 
    457 /* Returns the immediate superloop of LOOP, or NULL if LOOP is the outermost
    458    loop.  */
    459 
    460 static inline struct loop *
    461 loop_outer (const struct loop *loop)
    462 {
    463   unsigned n = vec_safe_length (loop->superloops);
    464 
    465   if (n == 0)
    466     return NULL;
    467 
    468   return (*loop->superloops)[n - 1];
    469 }
    470 
    471 /* Returns true if LOOP has at least one exit edge.  */
    472 
    473 static inline bool
    474 loop_has_exit_edges (const struct loop *loop)
    475 {
    476   return loop->exits->next->e != NULL;
    477 }
    478 
    479 /* Returns the list of loops in current_loops.  */
    480 
    481 static inline vec<loop_p, va_gc> *
    482 get_loops (void)
    483 {
    484   if (!current_loops)
    485     return NULL;
    486 
    487   return current_loops->larray;
    488 }
    489 
    490 /* Returns the number of loops in current_loops (including the removed
    491    ones and the fake loop that forms the root of the loop tree).  */
    492 
    493 static inline unsigned
    494 number_of_loops (void)
    495 {
    496   if (!current_loops)
    497     return 0;
    498 
    499   return vec_safe_length (current_loops->larray);
    500 }
    501 
    502 /* Returns true if state of the loops satisfies all properties
    503    described by FLAGS.  */
    504 
    505 static inline bool
    506 loops_state_satisfies_p (unsigned flags)
    507 {
    508   return (current_loops->state & flags) == flags;
    509 }
    510 
    511 /* Sets FLAGS to the loops state.  */
    512 
    513 static inline void
    514 loops_state_set (unsigned flags)
    515 {
    516   current_loops->state |= flags;
    517 }
    518 
    519 /* Clears FLAGS from the loops state.  */
    520 
    521 static inline void
    522 loops_state_clear (unsigned flags)
    523 {
    524   if (!current_loops)
    525     return;
    526   current_loops->state &= ~flags;
    527 }
    528 
    529 /* Loop iterators.  */
    530 
    531 /* Flags for loop iteration.  */
    532 
    533 enum li_flags
    534 {
    535   LI_INCLUDE_ROOT = 1,		/* Include the fake root of the loop tree.  */
    536   LI_FROM_INNERMOST = 2,	/* Iterate over the loops in the reverse order,
    537 				   starting from innermost ones.  */
    538   LI_ONLY_INNERMOST = 4		/* Iterate only over innermost loops.  */
    539 };
    540 
    541 /* The iterator for loops.  */
    542 
    543 typedef struct
    544 {
    545   /* The list of loops to visit.  */
    546   vec<int> to_visit;
    547 
    548   /* The index of the actual loop.  */
    549   unsigned idx;
    550 } loop_iterator;
    551 
    552 static inline void
    553 fel_next (loop_iterator *li, loop_p *loop)
    554 {
    555   int anum;
    556 
    557   while (li->to_visit.iterate (li->idx, &anum))
    558     {
    559       li->idx++;
    560       *loop = get_loop (anum);
    561       if (*loop)
    562 	return;
    563     }
    564 
    565   li->to_visit.release ();
    566   *loop = NULL;
    567 }
    568 
    569 static inline void
    570 fel_init (loop_iterator *li, loop_p *loop, unsigned flags)
    571 {
    572   struct loop *aloop;
    573   unsigned i;
    574   int mn;
    575 
    576   li->idx = 0;
    577   if (!current_loops)
    578     {
    579       li->to_visit.create (0);
    580       *loop = NULL;
    581       return;
    582     }
    583 
    584   li->to_visit.create (number_of_loops ());
    585   mn = (flags & LI_INCLUDE_ROOT) ? 0 : 1;
    586 
    587   if (flags & LI_ONLY_INNERMOST)
    588     {
    589       for (i = 0; vec_safe_iterate (current_loops->larray, i, &aloop); i++)
    590 	if (aloop != NULL
    591 	    && aloop->inner == NULL
    592 	    && aloop->num >= mn)
    593 	  li->to_visit.quick_push (aloop->num);
    594     }
    595   else if (flags & LI_FROM_INNERMOST)
    596     {
    597       /* Push the loops to LI->TO_VISIT in postorder.  */
    598       for (aloop = current_loops->tree_root;
    599 	   aloop->inner != NULL;
    600 	   aloop = aloop->inner)
    601 	continue;
    602 
    603       while (1)
    604 	{
    605 	  if (aloop->num >= mn)
    606 	    li->to_visit.quick_push (aloop->num);
    607 
    608 	  if (aloop->next)
    609 	    {
    610 	      for (aloop = aloop->next;
    611 		   aloop->inner != NULL;
    612 		   aloop = aloop->inner)
    613 		continue;
    614 	    }
    615 	  else if (!loop_outer (aloop))
    616 	    break;
    617 	  else
    618 	    aloop = loop_outer (aloop);
    619 	}
    620     }
    621   else
    622     {
    623       /* Push the loops to LI->TO_VISIT in preorder.  */
    624       aloop = current_loops->tree_root;
    625       while (1)
    626 	{
    627 	  if (aloop->num >= mn)
    628 	    li->to_visit.quick_push (aloop->num);
    629 
    630 	  if (aloop->inner != NULL)
    631 	    aloop = aloop->inner;
    632 	  else
    633 	    {
    634 	      while (aloop != NULL && aloop->next == NULL)
    635 		aloop = loop_outer (aloop);
    636 	      if (aloop == NULL)
    637 		break;
    638 	      aloop = aloop->next;
    639 	    }
    640 	}
    641     }
    642 
    643   fel_next (li, loop);
    644 }
    645 
    646 #define FOR_EACH_LOOP(LI, LOOP, FLAGS) \
    647   for (fel_init (&(LI), &(LOOP), FLAGS); \
    648        (LOOP); \
    649        fel_next (&(LI), &(LOOP)))
    650 
    651 #define FOR_EACH_LOOP_BREAK(LI) \
    652   { \
    653     (LI).to_visit.release (); \
    654     break; \
    655   }
    656 
    657 /* The properties of the target.  */
    658 struct target_cfgloop {
    659   /* Number of available registers.  */
    660   unsigned x_target_avail_regs;
    661 
    662   /* Number of available registers that are call-clobbered.  */
    663   unsigned x_target_clobbered_regs;
    664 
    665   /* Number of registers reserved for temporary expressions.  */
    666   unsigned x_target_res_regs;
    667 
    668   /* The cost for register when there still is some reserve, but we are
    669      approaching the number of available registers.  */
    670   unsigned x_target_reg_cost[2];
    671 
    672   /* The cost for register when we need to spill.  */
    673   unsigned x_target_spill_cost[2];
    674 };
    675 
    676 extern struct target_cfgloop default_target_cfgloop;
    677 #if SWITCHABLE_TARGET
    678 extern struct target_cfgloop *this_target_cfgloop;
    679 #else
    680 #define this_target_cfgloop (&default_target_cfgloop)
    681 #endif
    682 
    683 #define target_avail_regs \
    684   (this_target_cfgloop->x_target_avail_regs)
    685 #define target_clobbered_regs \
    686   (this_target_cfgloop->x_target_clobbered_regs)
    687 #define target_res_regs \
    688   (this_target_cfgloop->x_target_res_regs)
    689 #define target_reg_cost \
    690   (this_target_cfgloop->x_target_reg_cost)
    691 #define target_spill_cost \
    692   (this_target_cfgloop->x_target_spill_cost)
    693 
    694 /* Register pressure estimation for induction variable optimizations & loop
    695    invariant motion.  */
    696 extern unsigned estimate_reg_pressure_cost (unsigned, unsigned, bool, bool);
    697 extern void init_set_costs (void);
    698 
    699 /* Loop optimizer initialization.  */
    700 extern void loop_optimizer_init (unsigned);
    701 extern void loop_optimizer_finalize (void);
    702 
    703 /* Optimization passes.  */
    704 extern void unswitch_loops (void);
    705 
    706 enum
    707 {
    708   UAP_PEEL = 1,		/* Enables loop peeling.  */
    709   UAP_UNROLL = 2,	/* Enables unrolling of loops if it seems profitable.  */
    710   UAP_UNROLL_ALL = 4	/* Enables unrolling of all loops.  */
    711 };
    712 
    713 extern void unroll_and_peel_loops (int);
    714 extern void doloop_optimize_loops (void);
    715 extern void move_loop_invariants (void);
    716 extern bool finite_loop_p (struct loop *);
    717 extern void scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound);
    718 extern vec<basic_block> get_loop_hot_path (const struct loop *loop);
    719 
    720 /* Returns the outermost loop of the loop nest that contains LOOP.*/
    721 static inline struct loop *
    722 loop_outermost (struct loop *loop)
    723 {
    724   unsigned n = vec_safe_length (loop->superloops);
    725 
    726   if (n <= 1)
    727     return loop;
    728 
    729   return (*loop->superloops)[1];
    730 }
    731 
    732 
    733 #endif /* GCC_CFGLOOP_H */
    734