GCC Middle and Back End API Reference
cfgloop.h
Go to the documentation of this file.
1 /* Natural loop functions
2  Copyright (C) 1987-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19 
20 #ifndef GCC_CFGLOOP_H
21 #define GCC_CFGLOOP_H
22 
23 #include "basic-block.h"
24 #include "double-int.h"
25 
26 #include "bitmap.h"
27 #include "sbitmap.h"
28 
29 /* Structure to hold decision about unrolling/peeling. */
30 enum lpt_dec
31 {
38 };
39 
40 struct GTY (()) lpt_decision {
42  unsigned times;
43 };
44 
45 /* The type of extend applied to an IV. */
47 {
51 };
52 
53 /* The structure describing a bound on number of iterations of a loop. */
54 
55 struct GTY ((chain_next ("%h.next"))) nb_iter_bound {
56  /* The statement STMT is executed at most ... */
57  gimple stmt;
58 
59  /* ... BOUND + 1 times (BOUND must be an unsigned constant).
60  The + 1 is added for the following reasons:
61 
62  a) 0 would otherwise be unused, while we would need to care more about
63  overflows (as MAX + 1 is sometimes produced as the estimate on number
64  of executions of STMT).
65  b) it is consistent with the result of number_of_iterations_exit. */
66  double_int bound;
67 
68  /* True if the statement will cause the loop to be leaved the (at most)
69  BOUND + 1-st time it is executed, that is, all the statements after it
70  are executed at most BOUND times. */
71  bool is_exit;
72 
73  /* The next bound in the list. */
74  struct nb_iter_bound *next;
75 };
76 
77 /* Description of the loop exit. */
78 
79 struct GTY (()) loop_exit {
80  /* The exit edge. */
81  edge e;
82 
83  /* Previous and next exit in the list of the exits of the loop. */
84  struct loop_exit *prev;
85  struct loop_exit *next;
86 
87  /* Next element in the list of loops from that E exits. */
88  struct loop_exit *next_e;
89 };
90 
91 typedef struct loop *loop_p;
92 
93 /* An integer estimation of the number of iterations. Estimate_state
94  describes what is the state of the estimation. */
95 enum loop_estimation
96 {
97  /* Estimate was not computed yet. */
98  EST_NOT_COMPUTED,
99  /* Estimate is ready. */
100  EST_AVAILABLE,
101  EST_LAST
102 };
103 
104 /* Structure to hold information for each natural loop. */
105 struct GTY ((chain_next ("%h.next"))) loop {
106  /* Index into loops array. */
107  int num;
108 
109  /* Number of loop insns. */
110  unsigned ninsns;
111 
112  /* Basic block of loop header. */
113  basic_block header;
114 
115  /* Basic block of loop latch. */
116  basic_block latch;
117 
118  /* For loop unrolling/peeling decision. */
119  struct lpt_decision lpt_decision;
120 
121  /* Average number of executed insns per iteration. */
122  unsigned av_ninsns;
123 
124  /* Number of blocks contained within the loop. */
125  unsigned num_nodes;
126 
127  /* Superloops of the loop, starting with the outermost loop. */
128  vec<loop_p, va_gc> *superloops;
129 
130  /* The first inner (child) loop or NULL if innermost loop. */
131  struct loop *inner;
132 
133  /* Link to the next (sibling) loop. */
134  struct loop *next;
135 
136  /* Auxiliary info specific to a pass. */
137  PTR GTY ((skip (""))) aux;
138 
139  /* The number of times the latch of the loop is executed. This can be an
140  INTEGER_CST, or a symbolic expression representing the number of
141  iterations like "N - 1", or a COND_EXPR containing the runtime
142  conditions under which the number of iterations is non zero.
143 
144  Don't access this field directly: number_of_latch_executions
145  computes and caches the computed information in this field. */
146  tree nb_iterations;
147 
148  /* An integer guaranteed to be greater or equal to nb_iterations. Only
149  valid if any_upper_bound is true. */
150  double_int nb_iterations_upper_bound;
151 
152  /* An integer giving an estimate on nb_iterations. Unlike
153  nb_iterations_upper_bound, there is no guarantee that it is at least
154  nb_iterations. */
155  double_int nb_iterations_estimate;
156 
157  bool any_upper_bound;
158  bool any_estimate;
159 
160  /* True if the loop can be parallel. */
161  bool can_be_parallel;
162 
163  /* True if -Waggressive-loop-optimizations warned about this loop
164  already. */
165  bool warned_aggressive_loop_optimizations;
166 
167  /* An integer estimation of the number of iterations. Estimate_state
168  describes what is the state of the estimation. */
169  enum loop_estimation estimate_state;
170 
171  /* If > 0, an integer, where the user asserted that for any
172  I in [ 0, nb_iterations ) and for any J in
173  [ I, min ( I + safelen, nb_iterations ) ), the Ith and Jth iterations
174  of the loop can be safely evaluated concurrently. */
175  int safelen;
176 
177  /* True if we should try harder to vectorize this loop. */
178  bool force_vect;
179 
180  /* For SIMD loops, this is a unique identifier of the loop, referenced
181  by IFN_GOMP_SIMD_VF, IFN_GOMP_SIMD_LANE and IFN_GOMP_SIMD_LAST_LANE
182  builtins. */
183  tree simduid;
184 
185  /* Upper bound on number of iterations of a loop. */
186  struct nb_iter_bound *bounds;
187 
188  /* Head of the cyclic list of the exits of the loop. */
189  struct loop_exit *exits;
190 
191  /* Number of iteration analysis data for RTL. */
192  struct niter_desc *simple_loop_desc;
193 };
194 
195 /* Flags for state of loop structure. */
196 enum
197 {
198  LOOPS_HAVE_PREHEADERS = 1,
199  LOOPS_HAVE_SIMPLE_LATCHES = 2,
200  LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS = 4,
201  LOOPS_HAVE_RECORDED_EXITS = 8,
202  LOOPS_MAY_HAVE_MULTIPLE_LATCHES = 16,
203  LOOP_CLOSED_SSA = 32,
204  LOOPS_NEED_FIXUP = 64,
205  LOOPS_HAVE_FALLTHRU_PREHEADERS = 128
206 };
207 
208 #define LOOPS_NORMAL (LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES \
209  | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)
210 #define AVOID_CFG_MODIFICATIONS (LOOPS_MAY_HAVE_MULTIPLE_LATCHES)
211 
212 /* Structure to hold CFG information about natural loops within a function. */
213 struct GTY (()) loops {
214  /* State of loops. */
215  int state;
216 
217  /* Array of the loops. */
218  vec<loop_p, va_gc> *larray;
219 
220  /* Maps edges to the list of their descriptions as loop exits. Edges
221  whose sources or destinations have loop_father == NULL (which may
222  happen during the cfg manipulations) should not appear in EXITS. */
223  htab_t GTY((param_is (struct loop_exit))) exits;
224 
225  /* Pointer to root of loop hierarchy tree. */
226  struct loop *tree_root;
227 };
228 
229 /* Loop recognition. */
230 bool bb_loop_header_p (basic_block);
231 void init_loops_structure (struct function *, struct loops *, unsigned);
232 extern struct loops *flow_loops_find (struct loops *);
233 extern void disambiguate_loops_with_multiple_latches (void);
234 extern void flow_loops_free (struct loops *);
235 extern void flow_loops_dump (FILE *,
236  void (*)(const struct loop *, FILE *, int), int);
237 extern void flow_loop_dump (const struct loop *, FILE *,
238  void (*)(const struct loop *, FILE *, int), int);
239 struct loop *alloc_loop (void);
240 extern void flow_loop_free (struct loop *);
241 int flow_loop_nodes_find (basic_block, struct loop *);
242 unsigned fix_loop_structure (bitmap changed_bbs);
243 bool mark_irreducible_loops (void);
244 void release_recorded_exits (void);
245 void record_loop_exits (void);
246 void rescan_loop_exit (edge, bool, bool);
247 
248 /* Loop data structure manipulation/querying. */
249 extern void flow_loop_tree_node_add (struct loop *, struct loop *);
250 extern void flow_loop_tree_node_remove (struct loop *);
251 extern void place_new_loop (struct function *, struct loop *);
252 extern void add_loop (struct loop *, struct loop *);
253 extern bool flow_loop_nested_p (const struct loop *, const struct loop *);
254 extern bool flow_bb_inside_loop_p (const struct loop *, const_basic_block);
255 extern struct loop * find_common_loop (struct loop *, struct loop *);
256 struct loop *superloop_at_depth (struct loop *, unsigned);
257 struct eni_weights_d;
258 extern unsigned tree_num_loop_insns (struct loop *, struct eni_weights_d *);
259 extern int num_loop_insns (const struct loop *);
260 extern int average_num_loop_insns (const struct loop *);
261 extern unsigned get_loop_level (const struct loop *);
262 extern bool loop_exit_edge_p (const struct loop *, const_edge);
263 extern bool loop_exits_to_bb_p (struct loop *, basic_block);
264 extern bool loop_exits_from_bb_p (struct loop *, basic_block);
265 extern void mark_loop_exit_edges (void);
266 extern location_t get_loop_location (struct loop *loop);
267 
268 /* Loops & cfg manipulation. */
269 extern basic_block *get_loop_body (const struct loop *);
270 extern unsigned get_loop_body_with_size (const struct loop *, basic_block *,
271  unsigned);
272 extern basic_block *get_loop_body_in_dom_order (const struct loop *);
273 extern basic_block *get_loop_body_in_bfs_order (const struct loop *);
274 extern basic_block *get_loop_body_in_custom_order (const struct loop *,
275  int (*) (const void *, const void *));
276 
277 extern vec<edge> get_loop_exit_edges (const struct loop *);
278 extern edge single_exit (const struct loop *);
279 extern edge single_likely_exit (struct loop *loop);
280 extern unsigned num_loop_branches (const struct loop *);
281 
282 extern edge loop_preheader_edge (const struct loop *);
283 extern edge loop_latch_edge (const struct loop *);
284 
285 extern void add_bb_to_loop (basic_block, struct loop *);
286 extern void remove_bb_from_loops (basic_block);
287 
288 extern void cancel_loop_tree (struct loop *);
289 extern void delete_loop (struct loop *);
290 
291 enum
292 {
293  CP_SIMPLE_PREHEADERS = 1,
294  CP_FALLTHRU_PREHEADERS = 2
295 };
296 
297 basic_block create_preheader (struct loop *, int);
298 extern void create_preheaders (int);
299 extern void force_single_succ_latches (void);
300 
301 extern void verify_loop_structure (void);
302 
303 /* Loop analysis. */
304 extern bool just_once_each_iteration_p (const struct loop *, const_basic_block);
305 gcov_type expected_loop_iterations_unbounded (const struct loop *);
306 extern unsigned expected_loop_iterations (const struct loop *);
307 extern rtx doloop_condition_get (rtx);
308 
309 void estimate_numbers_of_iterations_loop (struct loop *);
310 void record_niter_bound (struct loop *, double_int, bool, bool);
311 bool estimated_loop_iterations (struct loop *, double_int *);
312 bool max_loop_iterations (struct loop *, double_int *);
313 HOST_WIDE_INT estimated_loop_iterations_int (struct loop *);
314 HOST_WIDE_INT max_loop_iterations_int (struct loop *);
315 bool max_stmt_executions (struct loop *, double_int *);
316 bool estimated_stmt_executions (struct loop *, double_int *);
317 HOST_WIDE_INT max_stmt_executions_int (struct loop *);
318 HOST_WIDE_INT estimated_stmt_executions_int (struct loop *);
319 
320 /* Loop manipulation. */
321 extern bool can_duplicate_loop_p (const struct loop *loop);
322 
323 #define DLTHE_FLAG_UPDATE_FREQ 1 /* Update frequencies in
324  duplicate_loop_to_header_edge. */
325 #define DLTHE_RECORD_COPY_NUMBER 2 /* Record copy number in the aux
326  field of newly create BB. */
327 #define DLTHE_FLAG_COMPLETTE_PEEL 4 /* Update frequencies expecting
328  a complete peeling. */
329 
330 extern edge create_empty_if_region_on_edge (edge, tree);
331 extern struct loop *create_empty_loop_on_edge (edge, tree, tree, tree, tree,
332  tree *, tree *, struct loop *);
333 extern struct loop * duplicate_loop (struct loop *, struct loop *);
334 extern void copy_loop_info (struct loop *loop, struct loop *target);
335 extern void duplicate_subloops (struct loop *, struct loop *);
336 extern bool duplicate_loop_to_header_edge (struct loop *, edge,
337  unsigned, sbitmap, edge,
338  vec<edge> *, int);
339 extern struct loop *loopify (edge, edge,
340  basic_block, edge, edge, bool,
341  unsigned, unsigned);
342 struct loop * loop_version (struct loop *, void *,
343  basic_block *, unsigned, unsigned, unsigned, bool);
344 extern bool remove_path (edge);
345 extern void unloop (struct loop *, bool *, bitmap);
346 extern void scale_loop_frequencies (struct loop *, int, int);
347 
348 /* Induction variable analysis. */
349 
350 /* The description of induction variable. The things are a bit complicated
351  due to need to handle subregs and extends. The value of the object described
352  by it can be obtained as follows (all computations are done in extend_mode):
353 
354  Value in i-th iteration is
355  delta + mult * extend_{extend_mode} (subreg_{mode} (base + i * step)).
356 
357  If first_special is true, the value in the first iteration is
358  delta + mult * base
359 
360  If extend = UNKNOWN, first_special must be false, delta 0, mult 1 and value is
361  subreg_{mode} (base + i * step)
362 
363  The get_iv_value function can be used to obtain these expressions.
364 
365  ??? Add a third mode field that would specify the mode in that inner
366  computation is done, which would enable it to be different from the
367  outer one? */
368 
369 struct rtx_iv
370 {
371  /* Its base and step (mode of base and step is supposed to be extend_mode,
372  see the description above). */
373  rtx base, step;
374 
375  /* The type of extend applied to it (IV_SIGN_EXTEND, IV_ZERO_EXTEND,
376  or IV_UNKNOWN_EXTEND). */
377  enum iv_extend_code extend;
378 
379  /* Operations applied in the extended mode. */
380  rtx delta, mult;
381 
382  /* The mode it is extended to. */
383  enum machine_mode extend_mode;
384 
385  /* The mode the variable iterates in. */
386  enum machine_mode mode;
387 
388  /* Whether the first iteration needs to be handled specially. */
389  unsigned first_special : 1;
390 };
391 
392 /* The description of an exit from the loop and of the number of iterations
393  till we take the exit. */
394 
395 struct GTY(()) niter_desc
396 {
397  /* The edge out of the loop. */
398  edge out_edge;
399 
400  /* The other edge leading from the condition. */
401  edge in_edge;
402 
403  /* True if we are able to say anything about number of iterations of the
404  loop. */
405  bool simple_p;
406 
407  /* True if the loop iterates the constant number of times. */
408  bool const_iter;
409 
410  /* Number of iterations if constant. */
411  unsigned HOST_WIDEST_INT niter;
412 
413  /* Assumptions under that the rest of the information is valid. */
414  rtx assumptions;
415 
416  /* Assumptions under that the loop ends before reaching the latch,
417  even if value of niter_expr says otherwise. */
418  rtx noloop_assumptions;
419 
420  /* Condition under that the loop is infinite. */
421  rtx infinite;
422 
423  /* Whether the comparison is signed. */
424  bool signed_p;
425 
426  /* The mode in that niter_expr should be computed. */
427  enum machine_mode mode;
428 
429  /* The number of iterations of the loop. */
430  rtx niter_expr;
431 };
432 
433 extern void iv_analysis_loop_init (struct loop *);
434 extern bool iv_analyze (rtx, rtx, struct rtx_iv *);
435 extern bool iv_analyze_result (rtx, rtx, struct rtx_iv *);
436 extern bool iv_analyze_expr (rtx, rtx, enum machine_mode, struct rtx_iv *);
437 extern rtx get_iv_value (struct rtx_iv *, rtx);
438 extern bool biv_p (rtx, rtx);
439 extern void find_simple_exit (struct loop *, struct niter_desc *);
440 extern void iv_analysis_done (void);
441 
442 extern struct niter_desc *get_simple_loop_desc (struct loop *loop);
443 extern void free_simple_loop_desc (struct loop *loop);
444 
445 static inline struct niter_desc *
446 simple_loop_desc (struct loop *loop)
447 {
448  return loop->simple_loop_desc;
449 }
450 
451 /* Accessors for the loop structures. */
452 
453 /* Returns the loop with index NUM from FNs loop tree. */
454 
455 static inline struct loop *
456 get_loop (struct function *fn, unsigned num)
457 {
458  return (*loops_for_fn (fn)->larray)[num];
459 }
460 
461 /* Returns the number of superloops of LOOP. */
462 
463 static inline unsigned
464 loop_depth (const struct loop *loop)
465 {
466  return vec_safe_length (loop->superloops);
467 }
468 
469 /* Returns the loop depth of the loop BB belongs to. */
470 
471 static inline int
472 bb_loop_depth (const_basic_block bb)
473 {
474  return bb->loop_father ? loop_depth (bb->loop_father) : 0;
475 }
476 
477 /* Returns the immediate superloop of LOOP, or NULL if LOOP is the outermost
478  loop. */
479 
480 static inline struct loop *
481 loop_outer (const struct loop *loop)
482 {
483  unsigned n = vec_safe_length (loop->superloops);
484 
485  if (n == 0)
486  return NULL;
487 
488  return (*loop->superloops)[n - 1];
489 }
490 
491 /* Returns true if LOOP has at least one exit edge. */
492 
493 static inline bool
494 loop_has_exit_edges (const struct loop *loop)
495 {
496  return loop->exits->next->e != NULL;
497 }
498 
499 /* Returns the list of loops in FN. */
500 
501 inline vec<loop_p, va_gc> *
502 get_loops (struct function *fn)
503 {
504  struct loops *loops = loops_for_fn (fn);
505  if (!loops)
506  return NULL;
507 
508  return loops->larray;
509 }
510 
511 /* Returns the number of loops in FN (including the removed
512  ones and the fake loop that forms the root of the loop tree). */
513 
514 static inline unsigned
515 number_of_loops (struct function *fn)
516 {
517  struct loops *loops = loops_for_fn (fn);
518  if (!fn)
519  return 0;
520 
521  return vec_safe_length (loops->larray);
522 }
523 
524 /* Returns true if state of the loops satisfies all properties
525  described by FLAGS. */
526 
527 static inline bool
528 loops_state_satisfies_p (unsigned flags)
529 {
530  return (current_loops->state & flags) == flags;
531 }
532 
533 /* Sets FLAGS to the loops state. */
534 
535 static inline void
536 loops_state_set (unsigned flags)
537 {
538  current_loops->state |= flags;
539 }
540 
541 /* Clears FLAGS from the loops state. */
542 
543 static inline void
544 loops_state_clear (unsigned flags)
545 {
546  if (!current_loops)
547  return;
548  current_loops->state &= ~flags;
549 }
550 
551 /* Loop iterators. */
552 
553 /* Flags for loop iteration. */
554 
555 enum li_flags
556 {
557  LI_INCLUDE_ROOT = 1, /* Include the fake root of the loop tree. */
558  LI_FROM_INNERMOST = 2, /* Iterate over the loops in the reverse order,
559  starting from innermost ones. */
560  LI_ONLY_INNERMOST = 4 /* Iterate only over innermost loops. */
561 };
562 
563 /* The iterator for loops. */
564 
565 typedef struct
566 {
567  /* The list of loops to visit. */
568  vec<int> to_visit;
569 
570  /* The index of the actual loop. */
571  unsigned idx;
572 } loop_iterator;
573 
574 static inline void
575 fel_next (loop_iterator *li, loop_p *loop)
576 {
577  int anum;
578 
579  while (li->to_visit.iterate (li->idx, &anum))
580  {
581  li->idx++;
582  *loop = get_loop (cfun, anum);
583  if (*loop)
584  return;
585  }
586 
587  li->to_visit.release ();
588  *loop = NULL;
589 }
590 
591 static inline void
592 fel_init (loop_iterator *li, loop_p *loop, unsigned flags)
593 {
594  struct loop *aloop;
595  unsigned i;
596  int mn;
597 
598  li->idx = 0;
599  if (!current_loops)
600  {
601  li->to_visit.create (0);
602  *loop = NULL;
603  return;
604  }
605 
606  li->to_visit.create (number_of_loops (cfun));
607  mn = (flags & LI_INCLUDE_ROOT) ? 0 : 1;
608 
609  if (flags & LI_ONLY_INNERMOST)
610  {
611  for (i = 0; vec_safe_iterate (current_loops->larray, i, &aloop); i++)
612  if (aloop != NULL
613  && aloop->inner == NULL
614  && aloop->num >= mn)
615  li->to_visit.quick_push (aloop->num);
616  }
617  else if (flags & LI_FROM_INNERMOST)
618  {
619  /* Push the loops to LI->TO_VISIT in postorder. */
620  for (aloop = current_loops->tree_root;
621  aloop->inner != NULL;
622  aloop = aloop->inner)
623  continue;
624 
625  while (1)
626  {
627  if (aloop->num >= mn)
628  li->to_visit.quick_push (aloop->num);
629 
630  if (aloop->next)
631  {
632  for (aloop = aloop->next;
633  aloop->inner != NULL;
634  aloop = aloop->inner)
635  continue;
636  }
637  else if (!loop_outer (aloop))
638  break;
639  else
640  aloop = loop_outer (aloop);
641  }
642  }
643  else
644  {
645  /* Push the loops to LI->TO_VISIT in preorder. */
646  aloop = current_loops->tree_root;
647  while (1)
648  {
649  if (aloop->num >= mn)
650  li->to_visit.quick_push (aloop->num);
651 
652  if (aloop->inner != NULL)
653  aloop = aloop->inner;
654  else
655  {
656  while (aloop != NULL && aloop->next == NULL)
657  aloop = loop_outer (aloop);
658  if (aloop == NULL)
659  break;
660  aloop = aloop->next;
661  }
662  }
663  }
664 
665  fel_next (li, loop);
666 }
667 
668 #define FOR_EACH_LOOP(LI, LOOP, FLAGS) \
669  for (fel_init (&(LI), &(LOOP), FLAGS); \
670  (LOOP); \
671  fel_next (&(LI), &(LOOP)))
672 
673 #define FOR_EACH_LOOP_BREAK(LI) \
674  { \
675  (LI).to_visit.release (); \
676  break; \
677  }
678 
679 /* The properties of the target. */
680 struct target_cfgloop {
681  /* Number of available registers. */
682  unsigned x_target_avail_regs;
683 
684  /* Number of available registers that are call-clobbered. */
685  unsigned x_target_clobbered_regs;
686 
687  /* Number of registers reserved for temporary expressions. */
688  unsigned x_target_res_regs;
689 
690  /* The cost for register when there still is some reserve, but we are
691  approaching the number of available registers. */
692  unsigned x_target_reg_cost[2];
693 
694  /* The cost for register when we need to spill. */
695  unsigned x_target_spill_cost[2];
696 };
697 
698 extern struct target_cfgloop default_target_cfgloop;
699 #if SWITCHABLE_TARGET
700 extern struct target_cfgloop *this_target_cfgloop;
701 #else
702 #define this_target_cfgloop (&default_target_cfgloop)
703 #endif
704 
705 #define target_avail_regs \
706  (this_target_cfgloop->x_target_avail_regs)
707 #define target_clobbered_regs \
708  (this_target_cfgloop->x_target_clobbered_regs)
709 #define target_res_regs \
710  (this_target_cfgloop->x_target_res_regs)
711 #define target_reg_cost \
712  (this_target_cfgloop->x_target_reg_cost)
713 #define target_spill_cost \
714  (this_target_cfgloop->x_target_spill_cost)
715 
716 /* Register pressure estimation for induction variable optimizations & loop
717  invariant motion. */
718 extern unsigned estimate_reg_pressure_cost (unsigned, unsigned, bool, bool);
719 extern void init_set_costs (void);
720 
721 /* Loop optimizer initialization. */
722 extern void loop_optimizer_init (unsigned);
723 extern void loop_optimizer_finalize (void);
724 
725 /* Optimization passes. */
726 extern void unswitch_loops (void);
727 
728 enum
729 {
730  UAP_PEEL = 1, /* Enables loop peeling. */
731  UAP_UNROLL = 2, /* Enables unrolling of loops if it seems profitable. */
732  UAP_UNROLL_ALL = 4 /* Enables unrolling of all loops. */
733 };
734 
735 extern void unroll_and_peel_loops (int);
736 extern void doloop_optimize_loops (void);
737 extern void move_loop_invariants (void);
738 extern bool finite_loop_p (struct loop *);
739 extern void scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound);
740 extern vec<basic_block> get_loop_hot_path (const struct loop *loop);
741 
742 /* Returns the outermost loop of the loop nest that contains LOOP.*/
743 static inline struct loop *
744 loop_outermost (struct loop *loop)
745 {
746  unsigned n = vec_safe_length (loop->superloops);
747 
748  if (n <= 1)
749  return loop;
750 
751  return (*loop->superloops)[1];
752 }
753 
754 
755 #endif /* GCC_CFGLOOP_H */