GCC Middle and Back End API Reference
dse.c File Reference

Data Structures

struct  store_info
struct  read_info
struct  insn_info
struct  bb_info
struct  group_info
struct  deferred_change
struct  clear_alias_mode_holder
struct  invariant_group_base_hasher
struct  note_add_store_info

Typedefs

typedef struct store_infostore_info_t
typedef struct read_inforead_info_t
typedef struct insn_infoinsn_info_t
typedef struct bb_infobb_info_t
typedef struct group_infogroup_info_t
typedef struct group_infoconst_group_info_t
typedef struct deferred_changedeferred_change_t

Functions

static unsigned HOST_WIDE_INT lowpart_bitmask ()
static bool gate_dse1 (void)
static bool gate_dse2 (void)
static struct
clear_alias_mode_holder
clear_alias_set_lookup ()
static group_info_t get_group_info ()
static void dse_step0 ()
static void free_store_info ()
static void note_add_store ()
static int emit_inc_dec_insn_before (rtx mem, rtx op, rtx dest, rtx src, rtx srcoff, void *arg)
static bool check_for_inc_dec_1 ()
bool check_for_inc_dec ()
static void delete_dead_store_insn ()
static bool local_variable_can_escape ()
static bool can_escape ()
static void set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width, tree expr)
static void reset_active_stores ()
static void free_read_records ()
static void add_wild_read ()
static void add_non_frame_wild_read ()
static bool const_or_frame_p ()
static bool canon_address (rtx mem, alias_set_type *alias_set_out, int *group_id, HOST_WIDE_INT *offset, cselib_val **base)
static void clear_rhs_from_active_local_stores ()
static void set_position_unneeded ()
static void set_all_positions_unneeded ()
static bool any_positions_needed_p ()
static bool all_positions_needed_p ()
static rtx get_stored_val (store_info_t, enum machine_mode, HOST_WIDE_INT, HOST_WIDE_INT, basic_block, bool)
static int record_store ()
static void dump_insn_info ()
static rtx find_shift_sequence (int access_size, store_info_t store_info, enum machine_mode read_mode, int shift, bool speed, bool require_cst)
static void look_for_hardregs ()
static bool replace_read (store_info_t store_info, insn_info_t store_insn, read_info_t read_info, insn_info_t read_insn, rtx *loc, bitmap regs_live)
static int check_mem_read_rtx ()
static void check_mem_read_use ()
static bool get_call_args ()
static bitmap copy_fixed_regs ()
static void scan_insn ()
static void remove_useless_values ()
static void dse_step1 ()
static void dse_step2_init ()
static bool dse_step2_nospill ()
static int get_bitmap_index ()
static void scan_stores_nospill ()
static void scan_stores_spill ()
static void scan_reads_nospill ()
static void scan_reads_spill ()
static insn_info_t find_insn_before_first_wild_read ()
static void dse_step3_scan ()
static void dse_step3_exit_block_scan ()
static void mark_reachable_blocks ()
static void dse_step3 ()
static void dse_confluence_0 ()
static bool dse_confluence_n ()
static bool dse_transfer_function ()
static void dse_step4 ()
static void dse_step5_nospill ()
static void dse_step6 ()
static void dse_step7 ()
static unsigned int rest_of_handle_dse ()
rtl_opt_passmake_pass_rtl_dse1 ()
rtl_opt_passmake_pass_rtl_dse2 ()

Variables

static bitmap_obstack dse_bitmap_obstack
static struct obstack dse_obstack
static bitmap scratch = NULL
static alloc_pool cse_store_info_pool
static alloc_pool rtx_store_info_pool
static alloc_pool read_info_pool
static alloc_pool insn_info_pool
static insn_info_t active_local_stores
static int active_local_stores_len
static alloc_pool bb_info_pool
static bb_info_tbb_table
static alloc_pool rtx_group_info_pool
static int rtx_group_next_id
static vec< group_info_trtx_group_vec
static alloc_pool deferred_change_pool
static deferred_change_t deferred_change_list = NULL
static group_info_t clear_alias_group
static htab_t clear_alias_mode_table
static bool stores_off_frame_dead_at_return
static int globally_deleted
static int locally_deleted
static int spill_deleted
static bitmap all_blocks
static bitmap kill_on_calls
static unsigned int current_position
static hash_table
< invariant_group_base_hasher
rtx_group_table

Typedef Documentation

typedef struct bb_info* bb_info_t
typedef struct group_info* const_group_info_t
typedef struct group_info* group_info_t
typedef struct insn_info* insn_info_t
typedef struct read_info* read_info_t
typedef struct store_info* store_info_t

Function Documentation

static void add_non_frame_wild_read ( )
static
   Set the BB_INFO so that the last insn is marked as a wild read of
   non-frame locations.  
static void add_wild_read ( )
static
   Set the BB_INFO so that the last insn is marked as a wild read.  

Referenced by replace_read(), and set_position_unneeded().

static bool all_positions_needed_p ( )
inlinestatic
   Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
   store are needed.  

Referenced by check_mem_read_rtx().

static bool any_positions_needed_p ( )
inlinestatic
   Return TRUE if any bytes from S_INFO store are needed.  
static bool can_escape ( )
static
   Return whether EXPR can possibly escape the current function scope.  

References free_read_records(), bb_info::last_insn, reset_active_stores(), and insn_info::wild_read.

static bool canon_address ( rtx  mem,
alias_set_type alias_set_out,
int *  group_id,
HOST_WIDE_INT offset,
cselib_val **  base 
)
static
   Take all reasonable action to put the address of MEM into the form
   that we can do analysis on.

   The gold standard is to get the address into the form: address +
   OFFSET where address is something that rtx_varies_p considers a
   constant.  When we can get the address in this form, we can do
   global analysis on it.  Note that for constant bases, address is
   not actually returned, only the group_id.  The address can be
   obtained from that.

   If that fails, we try cselib to get a value we can at least use
   locally.  If that fails we return false.

   The GROUP_ID is set to -1 for cselib bases and the index of the
   group for non_varying bases.

   FOR_READ is true if this is a mem read and false if not.  
     First see if just canon_rtx (mem_address) is const or frame,
     if not, try cselib_expand_value_rtx and call canon_rtx on that.  
             Use cselib to replace all of the reg references with the full
             expression.  This will take care of the case where we have

             r_x = base + offset;
             val = *r_x;

             by making it into

             val = *(base + offset);  
             If this fails, just go with the address from first
             iteration.  
         Split the address into canonical BASE + OFFSET terms.  
bool check_for_inc_dec ( )
   Entry point for postreload.  If you work on reload_cse, or you need this
   anywhere else, consider if you can provide register liveness information
   and add a parameter to this function so that it can be passed down in
   insn_info.fixed_regs_live.  
static bool check_for_inc_dec_1 ( )
static
   Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
   is there, is split into a separate insn.
   Return true on success (or if there was nothing to do), false on failure.  

Referenced by dse_step3().

static int check_mem_read_rtx ( )
static
   A for_each_rtx callback in which DATA is the bb_info.  Check to see
   if LOC is a mem and if it is look at the address and kill any
   appropriate stores that may be active.  
     If it is reading readonly mem, then there can be no conflict with
     another write. 
     For alias_set != 0 canon_true_dependence should be never called.  
     We ignore the clobbers in store_info.  The is mildly aggressive,
     but there really should not be a clobber followed by a read.  
             Skip the clobbers.  
         This is the restricted case where the base is a constant or
         the frame pointer and offset is a constant.  
             Skip the clobbers.  
             There are three cases here.  
               We have a cselib store followed by a read from a
               const base. 
                 This is a block mode load.  We may get lucky and
                 canon_true_dependence may save the day.  
                 If this read is just reading back something that we just
                 stored, rewrite the read.  
                     The bases are the same, just see if the offsets
                     overlap.  
             else
             The else case that is missing here is that the
             bases are constant but different.  There is nothing
             to do here because there is no overlap.  
             Skip the clobbers.  
             If this read is just reading back something that we just
             stored, rewrite the read.  

References all_positions_needed_p(), store_info::begin, bb_info::regs_live, replace_read(), and store_info::rhs.

static void check_mem_read_use ( )
static
   A for_each_rtx callback in which DATA points the INSN_INFO for
   as check_mem_read_rtx.  Nullify the pointer if i_m_r_m_r returns
   true for any part of *LOC.  

References store_info::is_set, store_info::next, and insn_info::store_rec.

static struct clear_alias_mode_holder* clear_alias_set_lookup ( )
staticread
   Find the entry associated with ALIAS_SET.  
static void clear_rhs_from_active_local_stores ( )
static
   Clear the rhs field from the active_local_stores array.  
         Skip the clobbers.  

Referenced by set_all_positions_unneeded().

static bool const_or_frame_p ( )
static
   Return true if X is a constant or one of the registers that behave
   as a constant over the life of a function.  This is equivalent to
   !rtx_varies_p for memory addresses.  
         Note that we have to test for the actual rtx used for the frame
         and arg pointers and not just the register number in case we have
         eliminated the frame and/or arg pointer and are using it
         for pseudos.  
             The arg pointer varies if it is not a fixed register.  
static bitmap copy_fixed_regs ( )
static
   Return a bitmap of the fixed registers contained in IN.  
static void delete_dead_store_insn ( )
static
static void dse_confluence_0 ( )
static
   Confluence function for blocks with no successors.  Create an out
   set from the gen set of the exit block.  This block logically has
   the exit block as a successor.  

References bitmap_clear(), dump_file, and dump_flags.

static bool dse_confluence_n ( )
static
   Propagate the information from the in set of the dest of E to the
   out set of the src of E.  If the various in or out sets are not
   there, that means they are all ones.  
static void dse_step0 ( )
static
   Initialization of data structures.  

Referenced by dse_step4().

static void dse_step1 ( )
static
   Do all of step 1.  
             Scan the insns.  
             This is something of a hack, because the global algorithm
             is supposed to take care of the case where stores go dead
             at the end of the function.  However, the global
             algorithm must take a more conservative view of block
             mode reads than the local alg does.  So to get the case
             where you have a store to the frame followed by a non
             overlapping block more read, we look at the active local
             stores at the end of the function and delete all of the
             frame and spill based ones.  
                     Skip the clobbers.  
             Get rid of the loads that were discovered in
             replace_read.  Cselib is finished with this block.  
                 There is no reason to validate this change.  That was
                 done earlier.  
             Get rid of all of the cselib based store_infos in this
             block and mark the containing insns as not being
             deletable.  
                     Free at least positions_needed bitmaps.  

Referenced by dse_step4().

static void dse_step2_init ( )
static
         For all non stack related bases, we only consider a store to
         be deletable if there are two or more stores for that
         position.  This is because it takes one store to make the
         other store redundant.  However, for the stores that are
         stack related, we consider them if there is only one store
         for the position.  We do this because the stack related
         stores can be deleted if their is no read between them and
         the end of the function.

         To make this work in the current framework, we take the stack
         related bases add all of the bits from store1 into store2.
         This has the effect of making the eligible even if there is
         only one store.   

References bitmap_and_compl_into(), bitmap_ior_into(), group_info::frame_related, group_info::group_kill, and group_info::process_globally.

Referenced by dse_step4().

static bool dse_step2_nospill ( )
static
   Init the offset tables for the normal case.  
     Position 0 is unused because 0 is used in the maps to mean
     unused.  

References bitmap_and_compl_into(), bitmap_ior_into(), and group_info::group_kill.

Referenced by dse_step4().

static void dse_step3 ( )
static
   Build the transfer functions for the function.  
         If this is the second time dataflow is run, delete the old
         sets.  
     For any block in an infinite loop, we must initialize the out set
     to all ones.  This could be expensive, but almost never occurs in
     practice. However, it is common in regression tests.  

References store_info::alias_set, store_info::begin, bitmap_bit_p(), bitmap_empty_p(), bitmap_print(), insn_info::cannot_delete, check_for_inc_dec_1(), dbg_cnt(), delete_insn(), deleted, dump_file, dump_flags, get_bitmap_index(), globally_deleted, store_info::group_id, HOST_WIDE_INT, basic_block_def::index, insn_info::insn, store_info::is_set, bb_info::last_insn, store_info::next, bb_info::out, scan_stores_nospill(), and insn_info::store_rec.

Referenced by dse_step4().

static void dse_step3_exit_block_scan ( )
static
   Set the gen set of the exit block, and also any block with no
   successors that does not have a wild read.  
     The gen set is all 0's for the exit block except for the
     frame_pointer_group.  
static void dse_step3_scan ( )
static
   Scan the insns in BB_INFO starting at PTR and going to the top of
   the block in order to build the gen and kill sets for the block.
   We start at ptr which may be the last insn in the block or may be
   the first insn with a wild read.  In the latter case we are able to
   skip the rest of the block because it just does not matter:
   anything that happens is hidden by the wild read.  
       There are no wild reads in the spill case.  
     In the spill case or in the no_spill case if there is no wild
     read in the block, we will need a kill set.  
         There may have been code deleted by the dce pass run before
         this phase.  
             Process the read(s) last.  
static void dse_step5_nospill ( )
static
             There may have been code deleted by the dce pass run before
             this phase.  
                 Try to delete the current insn.  
                 Skip the clobbers.  
             We do want to process the local info if the insn was
             deleted.  For instance, if the insn did a wild read, we
             no longer need to trash the info.  

References dbg_cnt().

Referenced by dse_step4().

static void dse_step6 ( )
static
             There may have been code deleted by the dce pass run before
             this phase.  

Referenced by dse_step4().

static void dse_step7 ( )
static

Referenced by dse_step4().

static bool dse_transfer_function ( )
static
@verbatim 

Propagate the info from the out to the in set of BB_INDEX's basic block. There are three cases:

1) The block has no kill set. In this case the kill set is all ones. It does not matter what the out set of the block is, none of the info can reach the top. The only thing that reaches the top is the gen set and we just copy the set.

2) There is a kill set but no out set and bb has successors. In this case we just return. Eventually an out set will be created and it is better to wait than to create a set of ones.

3) There is both a kill and out set. We apply the obvious transfer function.

             Case 3 above.  
           Case 2 above.  
         Case 1 above.  If there is already an in set, nothing
         happens.  

References delete_dead_store_insn(), dump_file, dump_flags, insn_info::insn, and store_info::redundant_reason.

static void dump_insn_info ( )
static

References bitmap_set_range(), and regs_set.

static int emit_inc_dec_insn_before ( rtx  mem,
rtx  op,
rtx  dest,
rtx  src,
rtx  srcoff,
void *  arg 
)
static
   Callback for for_each_inc_dec that emits an INSN that sets DEST to
   SRC + SRCOFF before insn ARG.  
     We can reuse all operands without copying, because we are about
     to delete the insn that contained it.  
     If a failure was flagged above, return 1 so that for_each_inc_dec will
     return it immediately, communicating the failure to its caller.  
static insn_info_t find_insn_before_first_wild_read ( )
static
   Return the insn in BB_INFO before the first wild read or if there
   are no wild reads in the block, return the last insn.  
             Block starts with wild read.  

References bitmap_copy(), bb_info::gen, basic_block_def::index, and bb_info::out.

Referenced by scan_stores_nospill().

static rtx find_shift_sequence ( int  access_size,
store_info_t  store_info,
enum machine_mode  read_mode,
int  shift,
bool  speed,
bool  require_cst 
)
static
   If the modes are different and the value's source and target do not
   line up, we need to extract the value from lower part of the rhs of
   the store, shift it, and then put it into a form that can be shoved
   into the read_insn.  This function generates a right SHIFT of a
   value that is at least ACCESS_SIZE bytes wide of READ_MODE.  The
   shift sequence is returned or NULL if we failed to find a
   shift.  
     Some machines like the x86 have shift insns for each size of
     operand.  Other machines like the ppc or the ia-64 may only have
     shift insns that shift values within 32 or 64 bit registers.
     This loop tries to find the smallest shift insn that will right
     justify the value we want to read but is available in one insn on
     the machine.  
         If a constant was stored into memory, try to simplify it here,
         otherwise the cost of the shift might preclude this optimization
         e.g. at -Os, even when no actual shift will be needed.  
         Try a wider mode if truncating the store mode to NEW_MODE
         requires a real instruction.  
         Also try a wider mode if the necessary punning is either not
         desirable or not possible.  
         In theory we could also check for an ashr.  Ian Taylor knows
         of one dsp where the cost of these two was not the same.  But
         this really is a rare case anyway.  
         The computation up to here is essentially independent
         of the arguments and could be precomputed.  It may
         not be worth doing so.  We could precompute if
         worthwhile or at least cache the results.  The result
         technically depends on both SHIFT and ACCESS_SIZE,
         but in practice the answer will depend only on ACCESS_SIZE.  
         We found an acceptable shift.  Generate a move to
         take the value from the store and put it into the
         shift pseudo, then shift it, then generate another
         move to put in into the target of the read.  

References store_info::begin, store_info::const_rhs, copy_rtx(), store_info::end, extract_low_bits(), gen_int_mode(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, int_mode_for_mode(), store_info::mem, optimize_bb_for_speed_p(), store_info::rhs, and shift.

static void free_read_records ( )
static
   Free all READ_REC of the LAST_INSN of BB_INFO.  

Referenced by can_escape().

static void free_store_info ( )
static
   Delete all of the store_info recs from INSN_INFO.  
static bool gate_dse1 ( void  )
static
static bool gate_dse2 ( void  )
static
static int get_bitmap_index ( )
static
   Look up the bitmap index for OFFSET in GROUP_INFO.  If it is not
   there, return 0.  

References insn_info::prev_insn, and insn_info::wild_read.

Referenced by dse_step3().

static bool get_call_args ( )
static
   Get arguments passed to CALL_INSN.  Return TRUE if successful.
   So far it only handles arguments passed in registers.  
static group_info_t get_group_info ( )
static
   Get the GROUP for BASE.  Add a new group if it is not there.  
         Find the store_base_info structure for BASE, creating a new one
         if necessary.  
static rtx get_stored_val ( store_info_t  store_info,
enum machine_mode  read_mode,
HOST_WIDE_INT  read_begin,
HOST_WIDE_INT  read_end,
basic_block  bb,
bool  require_cst 
)
static
   Helper function for replace_read and record_store.
   Attempt to return a value stored in STORE_INFO, from READ_BEGIN
   to one before READ_END bytes read in READ_MODE.  Return NULL
   if not successful.  If REQUIRE_CST is true, return always constant.  
     To get here the read is within the boundaries of the write so
     shift will never be negative.  Start out with the shift being in
     bytes.  
     From now on it is bits.  
         The store is a memset (addr, const_val, const_size).  

References bitmap_and_into(), bitmap_empty_p(), df_print_regset(), dump_file, dump_flags, look_for_hardregs(), note_stores(), reg_obstack, and regs_set.

static bool local_variable_can_escape ( )
static
   Return whether DECL, a local variable, can possibly escape the current
   function scope.  
     If this is a partitioned variable, we need to consider all the variables
     in the partition.  This is necessary because a store into one of them can
     be replaced with a store into another and this may not change the outcome
     of the escape analysis.  
static void look_for_hardregs ( )
static
   Call back for note_stores to find the hard regs set or clobbered by
   insn.  Data is a bitmap of the hardregs set so far.  

Referenced by get_stored_val().

static unsigned HOST_WIDE_INT lowpart_bitmask ( )
static
   Return a bitmask with the first N low bits set.  
rtl_opt_pass* make_pass_rtl_dse1 ( )
rtl_opt_pass* make_pass_rtl_dse2 ( )
static void mark_reachable_blocks ( )
static
   Find all of the blocks that are not backwards reachable from the
   exit block or any block with no successors (BB).  These are the
   infinite loops or infinite self loops.  These blocks will still
   have their bits set in UNREACHABLE_BLOCKS.  
static void note_add_store ( )
static
   Callback for emit_inc_dec_insn_before via note_stores.
   Check if a register is clobbered which is live afterwards.  
     If this register is referenced by the current or an earlier insn,
     that's OK.  E.g. this applies to the register that is being incremented
     with this addition.  
     If we come here, we have a clobber of a register that's only OK
     if that register is not live.  If we don't have liveness information
     available, fail now.  
     Now check if this is a live fixed register.  
static int record_store ( )
static
   BODY is an instruction pattern that belongs to INSN.  Return 1 if
   there is a candidate store, after adding it to the appropriate
   local store group if so.  
     If this is not used, then this cannot be used to keep the insn
     from being deleted.  On the other hand, it does provide something
     that can be used to prove that another store is dead.  
     Check whether that value is a suitable memory location.  
         If the set or clobber is unused, then it does not effect our
         ability to get rid of the entire insn.  
     At this point we know mem is a mem. 
         Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
         as memset (addr, 0, 36);  
                 If the set or clobber is unused, then it does not effect our
                 ability to get rid of the entire insn.  
     We can still process a volatile mem, we just cannot delete it.  
         In the restrictive case where the base is a constant or the
         frame pointer we can do global analysis.  
         No place to keep the value after ra.  
         Sometimes the store and reload is used for truncation and
         rounding.  
     Check to see if this stores causes some other stores to be
     dead.  
     For alias_set != 0 canon_true_dependence should be never called.  
         Skip the clobbers. We delete the active insn if this insn
         shadows the set.  To have been put on the active list, it
         has exactly on set. 
             Generally, spills cannot be processed if and of the
             references to the slot have a different mode.  But if
             we are in the same block and mode is exactly the same
             between this store and one before in the same block,
             we can still delete it.  
             Even if PTR won't be eliminated as unneeded, if both
             PTR and this insn store the same constant value, we might
             eliminate this insn instead.  
           Need to see if it is possible for this store to overwrite
           the value of store_info.  If it is, set the rhs to NULL to
           keep it from being used to remove a load.  
         An insn can be deleted if every position of every one of
         its s_infos is zero.  
     Finish filling in the store_info.  
     If this is a clobber, we return 0.  We will only be able to
     delete this insn if there is only one store USED store, but we
     can use the clobber to delete other stores earlier.  

References insn_info::contains_cselib_groups, dump_file, dump_flags, may_be_sp_based_p(), offset, pool_alloc(), and insn_info::stack_pointer_based.

static void remove_useless_values ( )
static
   Remove BASE from the set of active_local_stores.  This is a
   callback from cselib that is used to get rid of the stores in
   active_local_stores.  
         If ANY of the store_infos match the cselib group that is
         being deleted, then the insn can not be deleted.  
static bool replace_read ( store_info_t  store_info,
insn_info_t  store_insn,
read_info_t  read_info,
insn_info_t  read_insn,
rtx loc,
bitmap  regs_live 
)
static
   Take a sequence of:
     A <- r1
     ...
     ... <- A

   and change it into
   r2 <- r1
   A <- r1
   ...
   ... <- r2

   or

   r3 <- extract (r1)
   r3 <- r3 >> shift
   r2 <- extract (r3)
   ... <- r2

   or

   r2 <- extract (r1)
   ... <- r2

   Depending on the alignment and the mode of the store and
   subsequent load.


   The STORE_INFO and STORE_INSN are for the store and READ_INFO
   and READ_INSN are for the read.  Return true if the replacement
   went ok.  
     Create a sequence of instructions to set up the read register.
     This sequence goes immediately before the store and its result
     is read by the load.

     We need to keep this in perspective.  We are replacing a read
     with a sequence of insns, but the read will almost certainly be
     in cache, so it is not going to be an expensive one.  Thus, we
     are not willing to do a multi insn shift or worse a subroutine
     call to get rid of the read.  
     Force the value into a new register so that it won't be clobbered
     between the store and the load.  
         Now we have to scan the set of new instructions to see if the
         sequence contains and sets of hardregs that happened to be
         live at this point.  For instance, this can happen if one of
         the insns sets the CC and the CC happened to be live at that
         point.  This does occasionally happen, see PR 37922.  
         Insert this right before the store insn where it will be safe
         from later insns that might change it before the read.  
         And now for the kludge part: cselib croaks if you just
         return at this point.  There are two reasons for this:

         1) Cselib has an idea of how many pseudos there are and
         that does not include the new ones we just added.

         2) Cselib does not know about the move insn we added
         above the store_info, and there is no way to tell it
         about it, because it has "moved on".

         Problem (1) is fixable with a certain amount of engineering.
         Problem (2) is requires starting the bb from scratch.  This
         could be expensive.

         So we are just going to have to lie.  The move/extraction
         insns are not really an issue, cselib did not see them.  But
         the use of the new pseudo read_insn is a real problem because
         cselib has not scanned this insn.  The way that we solve this
         problem is that we are just going to put the mem back for now
         and when we are finished with the block, we undo this.  We
         keep a table of mems to get rid of.  At the end of the basic
         block we can put them back.  
         Get rid of the read_info, from the point of view of the
         rest of dse, play like this read never happened.  

References add_wild_read(), insn_info::cannot_delete, dump_file, and dump_flags.

Referenced by check_mem_read_rtx().

static void reset_active_stores ( )
static

References get_address_mode().

Referenced by can_escape().

static unsigned int rest_of_handle_dse ( )
static
   -------------------------------------------------------------------------
   DSE
   ------------------------------------------------------------------------- 
   Callback for running pass_rtl_dse.  
     Need the notes since we must track live hardregs in the forwards
     direction.  
static void scan_insn ( )
static
   Apply record_store to all candidate stores in INSN.  Mark INSN
   if some part of it is not a candidate store and assigns to a
   non-register target.  
     Cselib clears the table for this case, so we have to essentially
     do the same.  
     Look at all of the uses in the insn.  
         Const functions cannot do anything bad i.e. read memory,
         however, they can read their parameters which may have
         been pushed onto the stack.
         memset and bzero don't read memory either.  
             See the head comment of the frame_read field.  
             Loop over the active stores and remove those which are
             killed by the const function call.  
                 The stack pointer based stores are always killed.  
                 If the frame is read, the frame related stores are killed.  
                     Skip the clobbers.  
           Every other call, including pure functions, may read any memory
           that is not relative to the frame.  
     Assuming that there are sets in these insns, we cannot delete
     them.  
     If we found some sets of mems, add it into the active_local_stores so
     that it can be locally deleted if found dead or used for
     replace_read and redundant constant store elimination.  Otherwise mark
     it as cannot delete.  This simplifies the processing later.  
static void scan_reads_nospill ( )
static
   Process the READ_INFOs into the bitmaps into GEN and KILL.  KILL
   may be NULL.  
     If this insn reads the frame, kill all the frame related stores.  
         Kill all non-frame related stores.  Kill all stores of variables that
         escape.  
                         Begin > end for block mode reads.  
                         The groups are the same, just process the
                         offsets.  
                     The groups are different, if the alias sets
                     conflict, clear the entire group.  We only need
                     to apply this test if the read_info is a cselib
                     read.  Anything with a constant base cannot alias
                     something else with a different constant
                     base.  

References bitmap_ior_into(), group_info::frame_related, bb_info::gen, group_info::group_kill, group_info::process_globally, and stores_off_frame_dead_at_return.

Referenced by scan_stores_spill().

static void scan_reads_spill ( )
static
   Process the READ_INFOs into the bitmaps into GEN and KILL.  KILL
   may be NULL.  

References bitmap_copy(), and bb_info::out.

Referenced by scan_stores_spill().

static void scan_stores_nospill ( )
static
   Process the STORE_INFOs into the bitmaps into GEN and KILL.  KILL
   may be NULL. 

References bitmap_clear(), find_insn_before_first_wild_read(), basic_block_def::index, bb_info::kill, and bb_info::last_insn.

Referenced by dse_step3(), and scan_stores_spill().

static void scan_stores_spill ( )
static
   Process the STORE_INFOs into the bitmaps into GEN and KILL.  KILL
   may be NULL. 

References bb_info::gen, insn_info::insn, bb_info::kill, insn_info::prev_insn, insn_info::read_rec, scan_reads_nospill(), scan_reads_spill(), scan_stores_nospill(), and insn_info::store_rec.

static void set_all_positions_unneeded ( )
inlinestatic
   Mark the whole store S_INFO as unneeded.  

References insn_info::cannot_delete, and clear_rhs_from_active_local_stores().

static void set_position_unneeded ( )
inlinestatic
   Mark byte POS bytes from the beginning of store S_INFO as unneeded.  

References add_wild_read(), insn_info::cannot_delete, dump_file, and dump_flags.

static void set_usage_bits ( group_info_t  group,
HOST_WIDE_INT  offset,
HOST_WIDE_INT  width,
tree  expr 
)
static
   Set the store* bitmaps offset_map_size* fields in GROUP based on
   OFFSET and WIDTH.  

Variable Documentation

insn_info_t active_local_stores
static
   The linked list of stores that are under consideration in this
   basic block.  
int active_local_stores_len
static
bitmap all_blocks
static
alloc_pool bb_info_pool
static
bb_info_t* bb_table
static
   Table to hold all bb_infos.  
group_info_t clear_alias_group
static
   The group that holds all of the clear_alias_sets.  
htab_t clear_alias_mode_table
static
   The modes of the clear_alias_sets.  
alloc_pool cse_store_info_pool
static
unsigned int current_position
static
   The number of bits used in the global bitmaps.  
deferred_change_t deferred_change_list = NULL
static
alloc_pool deferred_change_pool
static
bitmap_obstack dse_bitmap_obstack
static
   Obstack for the DSE dataflow bitmaps.  We don't want to put these
   on the default obstack because these bitmaps can grow quite large
   (~2GB for the small (!) test case of PR54146) and we'll hold on to
   all that memory until the end of the compiler run.
   As a bonus, delete_tree_live_info can destroy all the bitmaps by just
   releasing the whole obstack.  
struct obstack dse_obstack
static
   Obstack for other data.  As for above: Kinda nice to be able to
   throw it all away at the end in one big sweep.  
int globally_deleted
static
   Counter for stats.  

Referenced by dse_step3(), and dse_step4().

alloc_pool insn_info_pool
static
bitmap kill_on_calls
static
   Locations that are killed by calls in the global phase.  
int locally_deleted
static

Referenced by dse_step4().

alloc_pool read_info_pool
static
alloc_pool rtx_group_info_pool
static
int rtx_group_next_id
static
   Index into the rtx_group_vec.  
hash_table<invariant_group_base_hasher> rtx_group_table
static
   Tables of group_info structures, hashed by base value.  
vec<group_info_t> rtx_group_vec
static
alloc_pool rtx_store_info_pool
static
bitmap scratch = NULL
static
   Scratch bitmap for cselib's cselib_expand_value_rtx.  

Referenced by return_insn_p().

int spill_deleted
static

Referenced by dse_step4().

bool stores_off_frame_dead_at_return
static
   This is true except if cfun->stdarg -- i.e. we cannot do
   this for vararg functions because they play games with the frame.  

Referenced by scan_reads_nospill().