GCC Middle and Back End API Reference
ira-build.c File Reference

Macros

#define BB_TO_VISIT   BB_VISITED

Functions

static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx, ira_loop_tree_node_t)
static void init_loop_tree_node ()
static void create_loop_tree_nodes ()
static bool more_one_region_p ()
static void finish_loop_tree_node ()
static void finish_loop_tree_nodes ()
static void add_loop_to_tree ()
static int setup_loop_tree_level ()
static void form_loop_tree ()
static void rebuild_regno_allocno_maps ()
static void initiate_allocnos ()
static ira_object_t ira_create_object ()
ira_allocno_t ira_create_allocno (int regno, bool cap_p, ira_loop_tree_node_t loop_tree_node)
void ira_set_allocno_class ()
void ira_create_allocno_objects ()
static void create_allocno_objects ()
static void merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to, bool total_only)
void ior_hard_reg_conflicts ()
bool ira_conflict_vector_profitable_p ()
void ira_allocate_conflict_vec ()
static void allocate_conflict_bit_vec ()
void ira_allocate_object_conflicts ()
static void add_to_conflicts ()
static void ira_add_conflict ()
static void clear_conflicts ()
static void compress_conflict_vec ()
static void compress_conflict_vecs ()
void ira_print_expanded_allocno ()
static ira_allocno_t create_cap_allocno ()
live_range_t ira_create_live_range (ira_object_t obj, int start, int finish, live_range_t next)
void ira_add_live_range_to_object ()
static live_range_t copy_live_range ()
live_range_t ira_copy_live_range_list ()
live_range_t ira_merge_live_ranges ()
bool ira_live_ranges_intersect_p ()
void ira_finish_live_range ()
void ira_finish_live_range_list ()
void ira_free_allocno_updated_costs ()
static void ira_free_allocno_costs ()
static void finish_allocno ()
static void finish_allocnos ()
static void initiate_prefs ()
static ira_pref_t find_allocno_pref ()
ira_pref_t ira_create_pref ()
static void add_allocno_pref_to_list ()
void ira_add_allocno_pref ()
static void print_pref ()
void ira_debug_pref ()
static void print_prefs ()
void ira_debug_prefs ()
static void print_allocno_prefs ()
void ira_debug_allocno_prefs ()
static void finish_pref ()
void ira_remove_pref ()
void ira_remove_allocno_prefs ()
static void finish_prefs ()
static void initiate_copies ()
ira_copy_t ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq, bool constraint_p, rtx insn, ira_loop_tree_node_t loop_tree_node)
static void add_allocno_copy_to_list ()
static void swap_allocno_copy_ends_if_necessary ()
ira_copy_t ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq, bool constraint_p, rtx insn, ira_loop_tree_node_t loop_tree_node)
static void print_copy ()
DEBUG_FUNCTION void debug ()
void ira_debug_copy ()
static void print_copies ()
void ira_debug_copies ()
static void print_allocno_copies ()
void ira_debug_allocno_copies ()
static void finish_copy ()
static void finish_copies ()
static void initiate_cost_vectors ()
int * ira_allocate_cost_vector ()
void ira_free_cost_vector ()
static void finish_cost_vectors ()
static vec< ira_loop_tree_node_tira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node, vec< ira_loop_tree_node_t > loop_preorder)
void ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node, void(*preorder_func)(ira_loop_tree_node_t), void(*postorder_func)(ira_loop_tree_node_t))
static void create_insn_allocnos ()
static void create_bb_allocnos ()
static void create_loop_allocnos ()
static void create_loop_tree_node_allocnos ()
static void propagate_modified_regnos ()
static void propagate_allocno_info ()
static void create_allocnos ()
static void change_object_in_range_list ()
static void move_allocno_live_ranges ()
static void copy_allocno_live_ranges ()
static bool low_pressure_loop_node_p ()
static int loop_compare_func ()
static void mark_loops_for_removal ()
static void mark_all_loops_for_removal ()
static void remove_uneccesary_loop_nodes_from_loop_tree ()
static bool loop_is_inside_p ()
static int regno_allocno_order_compare_func ()
static void ira_rebuild_regno_allocno_list ()
static void propagate_some_info_from_allocno ()
static void remove_unnecessary_allocnos ()
static void remove_low_level_allocnos ()
static void remove_unnecessary_regions ()
static void update_bad_spill_attribute ()
static void setup_min_max_allocno_live_range_point ()
static int object_range_compare_func ()
static void sort_conflict_id_map ()
static void setup_min_max_conflict_allocno_ids ()
static void create_caps ()
ira_allocno_t ira_parent_allocno ()
ira_allocno_t ira_parent_or_cap_allocno ()
static bool copy_info_to_removed_store_destinations ()
void ira_flattening ()
static void update_conflict_hard_reg_costs ()
bool ira_build ()
void ira_destroy ()

Variables

ira_loop_tree_node_t ira_loop_tree_root
int ira_loop_tree_height
ira_loop_tree_node_t ira_bb_nodes
ira_loop_tree_node_t ira_loop_nodes
unsigned int ira_loop_nodes_count
ira_allocno_tira_regno_allocno_map
ira_allocno_tira_allocnos
int ira_allocnos_num
int ira_objects_num
ira_object_tira_object_id_map
ira_pref_tira_prefs
int ira_prefs_num
ira_copy_tira_copies
int ira_copies_num
static int last_basic_block_before_change
static alloc_pool allocno_pool
static alloc_pool live_range_pool
static alloc_pool object_pool
static vec< ira_allocno_tallocno_vec
static vec< ira_object_tira_object_id_map_vec
static int * conflict_check
static int curr_conflict_check_tick
static alloc_pool pref_pool
static vec< ira_pref_tpref_vec
static alloc_pool copy_pool
static vec< ira_copy_tcopy_vec
static alloc_pool cost_vector_pool [N_REG_CLASSES]
ira_loop_tree_node_t ira_curr_loop_tree_node
ira_allocno_tira_curr_regno_allocno_map
static basic_block curr_bb
static vec< ira_loop_tree_node_tremoved_loop_vec
static vec< ira_loop_tree_node_tchildren_vec
static ira_allocno_tregno_allocnos
static ira_allocno_tregno_top_level_allocno_map

Macro Definition Documentation

#define BB_TO_VISIT   BB_VISITED

Function Documentation

static void add_allocno_copy_to_list ( )
static
   Attach a copy CP to allocnos involved into the copy.  
static void add_allocno_pref_to_list ( )
static
   Attach a pref PREF to the cooresponding allocno.  

References ira_allocno_pref::num, and pool_free().

Referenced by finish_allocnos().

static void add_loop_to_tree ( )
static
   The following recursive function adds LOOP to the loop tree
   hierarchy.  LOOP is added only once.  If LOOP is NULL we adding
   loop designating the whole function when CFG loops are not
   built.  
     We can not use loop node access macros here because of potential
     checking and because the nodes are not initialized enough
     yet.  
         We have not added loop node to the tree yet.  

References loop_outer(), loop::num, and ira_loop_tree_node::regno_allocno_map.

static void add_to_conflicts ( )
static
   Add OBJ2 to the conflicts of OBJ1.  
             Expand head of the bit vector.  
                 Expand tail of the bit vector.  
static void allocate_conflict_bit_vec ( )
static
   Allocate and initialize the conflict bit vector of OBJ.  
static void change_object_in_range_list ( )
static
   The page contains function to remove some regions from a separate
   register allocation.  We remove regions whose separate allocation
   will hardly improve the result.  As a result we speed up regional
   register allocation.  
   The function changes the object in range list given by R to OBJ.  
static void clear_conflicts ( )
static
   Clear all conflicts of OBJ.  

Referenced by copy_info_to_removed_store_destinations().

static void compress_conflict_vec ( )
static
   Remove duplications in conflict vector of OBJ.  
static void compress_conflict_vecs ( )
static
   Remove duplications in conflict vectors of all allocnos.  
static void copy_allocno_live_ranges ( )
static
static bool copy_info_to_removed_store_destinations ( )
static
   Process all allocnos originated from pseudo REGNO and copy live
   ranges, hard reg conflicts, and allocno stack reg attributes from
   low level allocnos to final allocnos which are destinations of
   removed stores at a loop exit.  Return true if we copied live
   ranges.  
           This allocno will be removed.  
         Caps will be removed.  

References clear_conflicts(), live_range::next, and live_range::object.

static live_range_t copy_live_range ( )
static
   Copy allocno live range R and return the result.  
static void create_allocno_objects ( )
static
   For each allocno, set ALLOCNO_NUM_OBJECTS and create the
   ALLOCNO_OBJECT structures.  This must be called after the allocno
   classes are known.  
static void create_allocnos ( )
static
   Create allocnos corresponding to pseudo-registers in the current
   function.  Traverse the loop tree for this.  
     We need to process BB first to correctly link allocnos by member
     next_regno_allocno.  
static void create_bb_allocnos ( )
static
   Create allocnos corresponding to pseudo-registers living in the
   basic block represented by the corresponding loop tree node
   BB_NODE.  
     It might be a allocno living through from one subloop to
     another.  
static ira_allocno_t create_cap_allocno ( )
static
   Create and return the cap representing allocno A in the
   parent loop.  

References ira_create_live_range().

static void create_caps ( )
static
static void create_insn_allocnos ( )
static
   This recursive function creates allocnos corresponding to
   pseudo-registers containing in X.  True OUTPUT_P means that X is
   a lvalue.  
static void create_loop_allocnos ( )
static
   Create allocnos corresponding to pseudo-registers living on edge E
   (a loop entry or exit).  Also mark the allocnos as living on the
   loop border. 
               The order of creations is important for right
               ira_regno_allocno_map.  

References create_loop_tree_node_allocnos(), ira_traverse_loop_tree(), and propagate_modified_regnos().

static void create_loop_tree_node_allocnos ( )
static
   Create allocnos corresponding to pseudo-registers living in loop
   represented by the corresponding loop tree node LOOP_NODE.  This
   function is called by ira_traverse_loop_tree.  

Referenced by create_loop_allocnos().

static void create_loop_tree_nodes ( )
static
   The following function allocates the loop tree nodes.  If
   CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
   the root which corresponds the all function) will be not allocated
   but nodes will still be allocated for basic blocks.  
DEBUG_FUNCTION void debug ( )
static ira_copy_t find_allocno_copy ( ira_allocno_t  a1,
ira_allocno_t  a2,
rtx  insn,
ira_loop_tree_node_t  loop_tree_node 
)
static
@verbatim 

Building internal representation for IRA. Copyright (C) 2006-2013 Free Software Foundation, Inc. Contributed by Vladimir Makarov vmaka.nosp@m.rov@.nosp@m.redha.nosp@m.t.co.nosp@m.m.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.

You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/.

   Return copy connecting A1 and A2 and originated from INSN of
   LOOP_TREE_NODE if any.  
static ira_pref_t find_allocno_pref ( )
static
   Return pref for A and HARD_REGNO if any.  

References print_prefs().

Referenced by finish_allocnos().

static void finish_allocno ( )
static
   Free the memory allocated for allocno A.  

References ira_allocno_pref::allocno, ira_allocno_pref::next_pref, and pref.

Referenced by ira_finish_live_range_list().

static void finish_allocnos ( )
static
   Free the memory allocated for all allocnos.  

References add_allocno_pref_to_list(), find_allocno_pref(), ira_allocno_pref::freq, ira_create_pref(), and pref.

static void finish_copies ( )
static
   Free memory allocated for all copies.  

References ira_loop_tree_node::bb, BB_TO_VISIT, basic_block_def::flags, and basic_block_def::preds.

static void finish_copy ( )
static
   The function frees memory allocated for copy CP.  

References ira_loop_tree_node::bb, BB_TO_VISIT, and basic_block_def::flags.

Referenced by debug().

static void finish_cost_vectors ( )
static
   Finish work with hard register cost vectors.  Release allocation
   pool for each allocno class.  

References ira_loop_tree_node::bb, ira_loop_tree_node::regno_allocno_map, and vNULL.

static void finish_loop_tree_node ( )
static
   Free the loop tree node of a loop.  

Referenced by remove_unnecessary_allocnos().

static void finish_loop_tree_nodes ( )
static
   Free the loop tree nodes.  
static void finish_pref ( )
static
   The function frees memory allocated for PREF.  

Referenced by print_pref().

static void finish_prefs ( )
static
   Free memory allocated for all prefs.  
static void form_loop_tree ( )
static
   Create the loop tree.  The algorithm is designed to provide correct
   order of loops (they are ordered by their last loop BB) and basic
   blocks in the chain formed by member next.  
     We can not use loop/bb node access macros because of potential
     checking and because the nodes are not initialized enough
     yet.  
static void init_loop_tree_node ( )
static
   Initialize some members in loop tree node NODE.  Use LOOP_NUM for
   the member loop_num.  

References ira_allocate(), last_basic_block_before_change, memset(), ira_loop_tree_node::reg_pressure, and ira_loop_tree_node::regno_allocno_map.

static void initiate_allocnos ( )
static
   Initialize data concerning allocnos.  
static void initiate_copies ( )
static
   The function initializes data concerning allocno copies.  
static void initiate_cost_vectors ( )
static
   The function initiates work with hard register cost vectors.  It
   creates allocation pool for each allocno class.  

References BB_TO_VISIT.

static void initiate_prefs ( )
static
   The function initializes data concerning allocno prefs.  

References print_pref().

void ior_hard_reg_conflicts ( )
   Update hard register conflict information for all objects associated with
   A to include the regs in SET.  
ira_copy_t ira_add_allocno_copy ( ira_allocno_t  first,
ira_allocno_t  second,
int  freq,
bool  constraint_p,
rtx  insn,
ira_loop_tree_node_t  loop_tree_node 
)
   Create (or update frequency if the copy already exists) and return
   the copy of allocnos FIRST and SECOND with frequency FREQ
   corresponding to move insn INSN (if any) and originated from
   LOOP_TREE_NODE.  

References print_allocno_copies().

Referenced by add_copies().

void ira_add_allocno_pref ( )
   Create (or update frequency if the pref already exists) the pref of
   allocnos A preferring HARD_REGNO with frequency FREQ.  
static void ira_add_conflict ( )
static
   Add OBJ1 to the conflicts of OBJ2 and vice versa.  
void ira_add_live_range_to_object ( )
   Create a new live range for OBJECT and queue it at the head of its
   live range list.  

References live_range::finish, ira_finish_live_range(), live_range::next, and live_range::start.

void ira_allocate_conflict_vec ( )
   Allocates and initialize the conflict vector of OBJ for NUM
   conflicting objects.  

References ira_allocate(), ira_free(), memcpy(), and loop::num.

int* ira_allocate_cost_vector ( )
   Allocate and return a cost vector VEC for ACLASS.  

Referenced by ira_copy_iter_cond().

void ira_allocate_object_conflicts ( )
   Allocate and initialize the conflict vector or conflict bit vector
   of OBJ for NUM conflicting allocnos whatever is more profitable.  

References memset().

bool ira_build ( void  )
   Create a internal representation (IR) for IRA (allocnos, copies,
   loop tree nodes).  The function returns TRUE if we generate loop
   structure (besides nodes representing all function and the basic
   blocks) for regional allocation.  A true return means that we
   really need to flatten IR before the reload.  
         Remove all regions but root one.  
         We don't save hard registers around calls for fast allocation
         -- add caller clobbered registers as conflicting ones to
         allocno crossing calls.  

Referenced by split_live_ranges_for_shrink_wrap().

bool ira_conflict_vector_profitable_p ( )
   Return TRUE if a conflict vector with NUM elements is more
   profitable than a conflict bit vector for OBJ.  
       We prefer a bit vector in such case because it does not result
       in allocation.  
live_range_t ira_copy_live_range_list ( )
   Copy allocno live range list given by its head R and return the
   result.  
ira_allocno_t ira_create_allocno ( int  regno,
bool  cap_p,
ira_loop_tree_node_t  loop_tree_node 
)
   Create and return the allocno corresponding to REGNO in
   LOOP_TREE_NODE.  Add the allocno to the list of allocnos with the
   same regno if CAP_P is FALSE.  
           Remember that we can create temporary allocnos to break
           cycles in register shuffle on region borders (see
           ira-emit.c).  
void ira_create_allocno_objects ( )
   Determine the number of objects we should associate with allocno A
   and allocate them.  

Referenced by ira_set_allocno_class(), and modify_move_list().

ira_copy_t ira_create_copy ( ira_allocno_t  first,
ira_allocno_t  second,
int  freq,
bool  constraint_p,
rtx  insn,
ira_loop_tree_node_t  loop_tree_node 
)
   Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
   SECOND, FREQ, CONSTRAINT_P, and INSN.  

References ira_allocno_copy::constraint_p, ira_allocno_copy::first, ira_allocno_copy::freq, ira_allocno_copy::insn, ira_allocno_copy::num, and ira_allocno_copy::second.

live_range_t ira_create_live_range ( ira_object_t  obj,
int  start,
int  finish,
live_range_t  next 
)
   Create and return a live range for OBJECT with given attributes.  

References first, last, and live_range::start.

Referenced by create_cap_allocno().

static ira_object_t ira_create_object ( )
static
   Create and return an object corresponding to a new allocno A.  

References ira_loop_tree_node::all_allocnos, bitmap_set_bit(), ira_allocnos_num, pool_alloc(), and ira_loop_tree_node::regno_allocno_map.

ira_pref_t ira_create_pref ( )
   Create and return pref with given attributes A, HARD_REGNO, and FREQ.  

References ira_allocno_pref::freq, ira_allocno_pref::hard_regno, ira_allocno_pref::next_pref, ira_allocno_pref::num, and pref.

Referenced by finish_allocnos().

void ira_debug_allocno_copies ( )
   Print info about copies involving allocno A into stderr.  
void ira_debug_allocno_prefs ( )
   Print info about prefs involving allocno A into stderr.  

References ira_allocno_copy::first.

void ira_debug_copies ( void  )
   Print info about all copies into stderr.  
void ira_debug_copy ( )
   Print info about copy CP into stderr.  
void ira_debug_pref ( )
   Print info about PREF into stderr.  
void ira_debug_prefs ( void  )
   Print info about all prefs into stderr.  
void ira_destroy ( void  )
   Release the data created by function ira_build.  
void ira_finish_live_range ( )
   Free allocno live range R.  

References ira_free_allocno_costs(), and pool_free().

Referenced by ira_add_live_range_to_object().

void ira_finish_live_range_list ( )
   Free list of allocno live ranges starting with R.  

References finish_allocno(), free_alloc_pool(), and ira_free().

void ira_flattening ( )
   Flatten the IR.  In other words, this function transforms IR as if
   it were built with one region (without loops).  We could make it
   much simpler by rebuilding IR with one region, but unfortunately it
   takes a lot of time.  MAX_REGNO_BEFORE_EMIT and
   IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
   IRA_MAX_POINT before emitting insns on the loop borders.  
           Caps are not in the regno allocno maps and they are never
           will be transformed into allocnos existing after IR
           flattening.  
     Fix final allocno attributes.  
         Rebuild conflicts.  
                         Don't set up conflict for the allocno with itself.  
     Mark some copies for removing and change allocnos in the rest
     copies.  
             Check that the copy was not propagated from level on
             which we will have different pseudos.  
     Remove unnecessary allocnos on lower levels of the loop tree.  
         Restore updated costs for assignments from reload.  
     Remove unnecessary copies.  

Referenced by move_unallocated_pseudos().

static void ira_free_allocno_costs ( )
static
   Free and nullify all cost vectors allocated earlier for allocno
   A.  

Referenced by ira_finish_live_range().

void ira_free_allocno_updated_costs ( )
   Free updated register costs of allocno A.  

References create_alloc_pool(), and get_max_uid().

void ira_free_cost_vector ( )
   Free a cost vector VEC for ACLASS.  
bool ira_live_ranges_intersect_p ( )
   Return TRUE if live ranges R1 and R2 intersect.  
     Remember the live ranges are always kept ordered.  

Referenced by coalesce_allocnos().

static vec<ira_loop_tree_node_t> ira_loop_tree_body_rev_postorder ( ira_loop_tree_node_t  loop_node,
vec< ira_loop_tree_node_t loop_preorder 
)
static
   Compute a post-ordering of the reverse control flow of the loop body
   designated by the children nodes of LOOP_NODE, whose body nodes in
   pre-order are input as LOOP_PREORDER.  Return a VEC with a post-order
   of the reverse loop body.

   For the post-order of the reverse CFG, we visit the basic blocks in
   LOOP_PREORDER array in the reverse order of where they appear.
   This is important: We do not just want to compute a post-order of
   the reverse CFG, we want to make a best-guess for a visiting order that
   minimizes the number of chain elements per allocno live range.  If the
   blocks would be visited in a different order, we would still compute a
   correct post-ordering but it would be less likely that two nodes
   connected by an edge in the CFG are neighbours in the topsort.  
         This is a bit of strange abuse of the BB_VISITED flag:  We use
         the flag to mark blocks we still have to visit to add them to
         our post-order.  Define an alias to avoid confusion.  
live_range_t ira_merge_live_ranges ( )
   Merge ranges R1 and R2 and returns the result.  The function
   maintains the order of ranges and tries to minimize number of the
   result ranges.  
             Intersected ranges: merge r1 and r2 into r1.  
                 To try to merge with subsequent ranges in r1.  
             Add r1 to the result.  
                 To try to merge with subsequent ranges in r2.  
ira_allocno_t ira_parent_allocno ( )
   Find the allocno that corresponds to A at a level one higher up in the
   loop tree.  Returns NULL if A is a cap, or if it has no parent.  
ira_allocno_t ira_parent_or_cap_allocno ( )
   Find the allocno that corresponds to A at a level one higher up in the
   loop tree.  If ALLOCNO_CAP is set for A, return that.  

Referenced by add_copies().

void ira_print_expanded_allocno ( )
   This recursive function outputs allocno A and if it is a cap the
   function outputs its members.  

Referenced by bucket_allocno_compare_func(), and ira_loop_edge_freq().

static void ira_rebuild_regno_allocno_list ( )
static
   Restore allocno order for REGNO in the regno allocno list.  
void ira_remove_allocno_prefs ( )
   Remove all prefs of allocno A.  
void ira_remove_pref ( )
   Remove PREF from the list of allocno prefs and free memory for
   it.  
void ira_set_allocno_class ( )
   Set up register class for A and update its conflict hard
   registers.  

References ira_create_allocno_objects().

Referenced by modify_move_list().

void ira_traverse_loop_tree ( bool  bb_p,
ira_loop_tree_node_t  loop_node,
void(*)(ira_loop_tree_node_t preorder_func,
void(*)(ira_loop_tree_node_t postorder_func 
)
   This recursive function traverses loop tree with root LOOP_NODE
   calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
   correspondingly in preorder and postorder.  The function sets up
   IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP.  If BB_P,
   basic block nodes of LOOP_NODE is also processed (before its
   subloop nodes).
   
   If BB_P is set and POSTORDER_FUNC is given, the basic blocks in
   the loop are passed in the *reverse* post-order of the *reverse*
   CFG.  This is only used by ira_create_allocno_live_ranges, which
   wants to visit basic blocks in this order to minimize the number
   of elements per live range chain.
   Note that the loop tree nodes are still visited in the normal,
   forward post-order of  the loop tree.  
         Add all nodes to the set of nodes to visit.  The IRA loop tree
         is set up such that nodes in the loop body appear in a pre-order
         of their place in the CFG.  

Referenced by create_loop_allocnos(), print_conflicts(), and process_bb_for_costs().

static int loop_compare_func ( )
static
   Sort loops for marking them for removal.  We put already marked
   loops first, then less frequent loops next, and then outer loops
   next.  
     Make sorting stable.  
static bool loop_is_inside_p ( )
static
   Return TRUE if NODE is inside PARENT.  
static bool low_pressure_loop_node_p ( )
static
   Return TRUE if NODE represents a loop with low register
   pressure.  
static void mark_all_loops_for_removal ( )
static
   Mark all loops but root for removing.  
               Don't remove the root.  

Referenced by remove_unnecessary_allocnos().

static void mark_loops_for_removal ( )
static
   Mark loops which should be removed from regional allocation.  We
   remove a loop with low register pressure inside another loop with
   register pressure.  In this case a separate allocation of the loop
   hardly helps (for irregular register file architecture it could
   help by choosing a better hard register in the loop but we prefer
   faster allocation even in this case).  We also remove cheap loops
   if there are more than IRA_MAX_LOOPS_NUM of them.  Loop with EH
   exit or enter edges are removed too because the allocation might
   require put pseudo moves on the EH edges (we could still do this
   for pseudos with caller saved hard registers in some cases but it
   is impossible to say here or during top-down allocation pass what
   hard register the pseudos get finally).  
               Don't remove the root.  

Referenced by remove_unnecessary_allocnos().

static void merge_hard_reg_conflicts ( ira_allocno_t  from,
ira_allocno_t  to,
bool  total_only 
)
static
   Merge hard register conflict information for all objects associated with
   allocno TO into the corresponding objects associated with FROM.
   If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS.  
static bool more_one_region_p ( )
static
   The function returns TRUE if there are more one allocation
   region.  
static void move_allocno_live_ranges ( )
static
   Move all live ranges associated with allocno FROM to allocno TO.  

References cfun, get_loops(), ira_allocate(), number_of_loops(), ira_loop_tree_node::to_remove_p, and vec_safe_iterate().

static int object_range_compare_func ( )
static
   Sort allocnos according to their live ranges.  Allocnos with
   smaller allocno class are put first unless we use priority
   coloring.  Allocnos with the same class are ordered according
   their start (min).  Allocnos with the same start are ordered
   according their finish (max).  

Referenced by update_bad_spill_attribute().

static void print_allocno_copies ( )
static
   Print info about copies involving allocno A into file F.  

Referenced by ira_add_allocno_copy().

static void print_allocno_prefs ( )
static
   Print info about prefs involving allocno A into file F.  

References create_alloc_pool(), get_max_uid(), and ira_copies_num.

static void print_copies ( )
static
   Print info about all copies into file F.  
static void print_copy ( )
static
   Print info about copy CP into file F.  

References copy_pool, and pool_free().

static void print_pref ( )
static
   Print info about PREF into file F.  

References finish_pref(), ira_allocno_pref::next_pref, and pref.

Referenced by initiate_prefs().

static void print_prefs ( )
static
   Print info about all prefs into file F.  

Referenced by find_allocno_pref().

static void propagate_allocno_info ( )
static
   Propagate new info about allocno A (see comments about accumulated
   info in allocno definition) to the corresponding allocno on upper
   loop tree level.  So allocnos on upper levels accumulate
   information about the corresponding allocnos in nested regions.
   The new info means allocno info finally calculated in this
   file.  
             There are no caps yet at this point.  So use
             border_allocnos to find allocnos for the propagation.  
static void propagate_modified_regnos ( )
static
   Propagate information about allocnos modified inside the loop given
   by its LOOP_TREE_NODE to its parent.  

Referenced by create_loop_allocnos().

static void propagate_some_info_from_allocno ( )
static
   Propagate info from allocno FROM_A to allocno A.  
static void rebuild_regno_allocno_maps ( )
static
   Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
   tree nodes.  
           Caps are not in the regno allocno maps.  
           Remember that we can create temporary allocnos to break
           cycles in register shuffle.  
static int regno_allocno_order_compare_func ( )
static
   Sort allocnos according to their order in regno allocno list.  
     If allocnos are equally good, sort by allocno numbers, so that
     the results of qsort leave nothing to chance.  We put allocnos
     with higher number first in the list because it is the original
     order for allocnos from loops on the same levels.  
static void remove_low_level_allocnos ( )
static
   Remove allocnos from all loops but the root.  
         Remove the allocno and update info of allocno in the upper
         region.  

Referenced by remove_unnecessary_allocnos().

static void remove_uneccesary_loop_nodes_from_loop_tree ( )
static
   Remove subregions of NODE if their separate allocation will not
   improve the result.  

References ira_loop_tree_node::all_allocnos, bitmap_set_bit(), and ira_loop_tree_node::regno_allocno_map.

Referenced by remove_unnecessary_allocnos().

static void remove_unnecessary_allocnos ( )
static
   Remove allocnos from loops removed from the allocation
   consideration.  
                     There are no allocnos with the same regno in
                     upper region -- just move the allocno to the
                     upper region.  
                     Remove the allocno and update info of allocno in
                     the upper region.  
                     Remove it from the corresponding regno allocno
                     map to avoid info propagation of subsequent
                     allocno into this already removed allocno.  
           We need to restore the order in regno allocno list.  

References cfun, finish_loop_tree_node(), mark_all_loops_for_removal(), mark_loops_for_removal(), number_of_loops(), remove_low_level_allocnos(), and remove_uneccesary_loop_nodes_from_loop_tree().

static void remove_unnecessary_regions ( )
static
   Remove loops from consideration.  We remove all loops except for
   root if ALL_P or loops for which a separate allocation will not
   improve the result.  We have to do this after allocno creation and
   their costs and allocno class evaluation because only after that
   the register pressure can be known and is calculated.  
static int setup_loop_tree_level ( )
static
   The following recursive function sets up levels of nodes of the
   tree given its root LOOP_NODE.  The enumeration starts with LEVEL.
   The function returns maximal value of level in the tree + 1.  

References ira_loop_tree_node::bb, ira_loop_tree_node::children, basic_block_def::index, ira_loop_tree_node::loop, basic_block_def::loop_father, loop_outer(), ira_loop_tree_node::next, loop::num, ira_loop_tree_node::regno_allocno_map, ira_loop_tree_node::subloop_next, and ira_loop_tree_node::subloops.

static void setup_min_max_allocno_live_range_point ( )
static
   Set up minimal and maximal live range points for allocnos.  
               Accumulation of range info.  
static void setup_min_max_conflict_allocno_ids ( )
static
   Set up minimal and maximal conflict ids of allocnos with which
   given allocno can conflict.  
             If we skip an allocno, the allocno with smaller ids will
             be also skipped because of the secondary sorting the
             range finishes (see function
             object_range_compare_func).  
           We could increase min further in this case but it is good
           enough.  
           We could decrease max further in this case but it is good
           enough.  
         In filling, we can go further A range finish to recognize
         intersection quickly because if the finish of subsequently
         processed allocno (it has smaller conflict id) range is
         further A range finish than they are definitely intersected
         (the reason for this is the allocnos with bigger conflict id
         have their range starts not smaller than allocnos with
         smaller ids.  
     For allocnos with more than one object, we may later record extra conflicts in
     subobject 0 that we cannot really know about here.
     For now, simply widen the min/max range of these subobjects.  
static void sort_conflict_id_map ( )
static
   Sort ira_object_id_map and set up conflict id of allocnos.  
static void swap_allocno_copy_ends_if_necessary ( )
static
   Make a copy CP a canonical copy where number of the
   first allocno is less than the second one.  

References ira_allocno_copy::first, ira_allocno_copy::freq, ira_allocno_copy::next_first_allocno_copy, ira_allocno_copy::next_second_allocno_copy, ira_allocno_copy::num, and ira_allocno_copy::second.

static void update_bad_spill_attribute ( )
static
   At this point true value of allocno attribute bad_spill_p means
   that there is an insn where allocno occurs and where the allocno
   can not be used as memory.  The function updates the attribute, now
   it can be true only for allocnos which can not be used as memory in
   an insn and in whose live ranges there is other allocno deaths.
   Spilling allocnos with true value will not improve the code because
   it will not make other allocnos colorable and additional reloads
   for the corresponding pseudo will be generated in reload pass for
   each insn it occurs.

   This is a trick mentioned in one classic article of Chaitin etc
   which is frequently omitted in other implementations of RA based on
   graph coloring.  

References ira_objects_num, loop::num, and object_range_compare_func().

static void update_conflict_hard_reg_costs ( )
static
   Identify allocnos which prefer a register class with a single hard register.
   Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
   less likely to use the preferred singleton register.  

Variable Documentation

alloc_pool allocno_pool
static
   Pools for allocnos, allocno live ranges and objects.  
vec<ira_allocno_t> allocno_vec
static
   Vec containing references to all created allocnos.  It is a
   container of array allocnos.  
vec<ira_loop_tree_node_t> children_vec
static
   Vec containing references to all children of loop tree nodes.  
int* conflict_check
static
   The array used to find duplications in conflict vectors of
   allocnos.  
alloc_pool copy_pool
static
   Pools for copies.  

Referenced by debug(), and print_copy().

vec<ira_copy_t> copy_vec
static
   Vec containing references to all created copies.  It is a
   container of array ira_copies.  

Referenced by debug().

alloc_pool cost_vector_pool[N_REG_CLASSES]
static
   Pools for cost vectors.  It is defined only for allocno classes.  
basic_block curr_bb
static
   The basic block currently being processed.  
int curr_conflict_check_tick
static
   The value used to mark allocation presence in conflict vector of
   the current allocno.  
ira_allocno_t* ira_allocnos
   Array of references to all allocnos.  The order number of the
   allocno corresponds to the index in the array.  Removed allocnos
   have NULL element value.  

Referenced by update_left_conflict_sizes_p().

int ira_allocnos_num
ira_loop_tree_node_t ira_bb_nodes
   All nodes representing basic blocks are referred through the
   following array.  We can not use basic block member `aux' for this
   because it is used for insertion of insns on edges.  
ira_copy_t* ira_copies
   Array of references to all copies.  The order number of the copy
   corresponds to the index in the array.  Removed copies have NULL
   element value.  
int ira_copies_num
   Size of the previous array.  

Referenced by print_allocno_prefs().

ira_loop_tree_node_t ira_curr_loop_tree_node
   The current loop tree node and its regno allocno map.  
ira_allocno_t* ira_curr_regno_allocno_map

Referenced by mark_pseudo_reg_live().

ira_loop_tree_node_t ira_loop_nodes
   All nodes representing loops are referred through the following
   array.  
unsigned int ira_loop_nodes_count
   And size of the ira_loop_nodes array.  
int ira_loop_tree_height
   Height of the loop tree.  
ira_loop_tree_node_t ira_loop_tree_root
   The root of the loop tree corresponding to the all function.  

Referenced by add_copies(), fix_reg_equiv_init(), print_conflicts(), and process_bb_for_costs().

ira_object_t* ira_object_id_map
   Map a conflict id to its conflict record.  

Referenced by dec_register_pressure(), process_bb_node_lives(), and propagate_copies().

vec<ira_object_t> ira_object_id_map_vec
static
   Vec containing references to all created ira_objects.  It is a
   container of ira_object_id_map.  
int ira_objects_num
   Count of conflict record structures we've created, used when creating
   a new conflict id.  

Referenced by ira_print_live_range_list(), modify_move_list(), and update_bad_spill_attribute().

ira_pref_t* ira_prefs
   Array of references to all allocno preferences.  The order number
   of the preference corresponds to the index in the array.  
int ira_prefs_num
   Size of the previous array.  
ira_allocno_t* ira_regno_allocno_map
   Map regno -> allocnos with given regno (see comments for
   allocno member `next_regno_allocno').  

Referenced by coalesced_pseudo_reg_slot_compare(), ira_create_new_reg(), ira_reassign_pseudos(), merge_allocnos(), and slot_coalesced_allocno_live_ranges_intersect_p().

int last_basic_block_before_change
static
   LAST_BASIC_BLOCK before generating additional insns because of live
   range splitting.  Emitting insns on a critical edge creates a new
   basic block.  

Referenced by init_loop_tree_node().

alloc_pool live_range_pool
static
alloc_pool object_pool
static
alloc_pool pref_pool
static
   Pools for allocno preferences.  
vec<ira_pref_t> pref_vec
static
   Vec containing references to all created preferences.  It is a
   container of array ira_prefs.  
ira_allocno_t* regno_allocnos
static
   This array is used to sort allocnos to restore allocno order in
   the regno allocno list.  
ira_allocno_t* regno_top_level_allocno_map
static
   The page contains code transforming more one region internal
   representation (IR) to one region IR which is necessary for reload.
   This transformation is called IR flattening.  We might just rebuild
   the IR for one region but we don't do it because it takes a lot of
   time.  
   Map: regno -> allocnos which will finally represent the regno for
   IR with one region.  
vec<ira_loop_tree_node_t> removed_loop_vec
static
   Definition of vector of loop tree nodes.  
   Vec containing references to all removed loop tree nodes.