GCC Middle and Back End API Reference
|
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "hash-table.h"
#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "tm_p.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "regset.h"
#include "flags.h"
#include "df.h"
#include "cselib.h"
#include "tree-pass.h"
#include "alloc-pool.h"
#include "alias.h"
#include "insn-config.h"
#include "expr.h"
#include "recog.h"
#include "optabs.h"
#include "dbgcnt.h"
#include "target.h"
#include "params.h"
#include "gimple.h"
#include "gimple-ssa.h"
Data Structures | |
struct | store_info |
struct | read_info |
struct | insn_info |
struct | bb_info |
struct | group_info |
struct | deferred_change |
struct | clear_alias_mode_holder |
struct | invariant_group_base_hasher |
struct | note_add_store_info |
Macros | |
#define | MAX_OFFSET (64 * 1024) |
Typedefs | |
typedef struct store_info * | store_info_t |
typedef struct read_info * | read_info_t |
typedef struct insn_info * | insn_info_t |
typedef struct bb_info * | bb_info_t |
typedef struct group_info * | group_info_t |
typedef struct group_info * | const_group_info_t |
typedef struct deferred_change * | deferred_change_t |
Variables | |
static bitmap_obstack | dse_bitmap_obstack |
static struct obstack | dse_obstack |
static bitmap | scratch = NULL |
static alloc_pool | cse_store_info_pool |
static alloc_pool | rtx_store_info_pool |
static alloc_pool | read_info_pool |
static alloc_pool | insn_info_pool |
static insn_info_t | active_local_stores |
static int | active_local_stores_len |
static alloc_pool | bb_info_pool |
static bb_info_t * | bb_table |
static alloc_pool | rtx_group_info_pool |
static int | rtx_group_next_id |
static vec< group_info_t > | rtx_group_vec |
static alloc_pool | deferred_change_pool |
static deferred_change_t | deferred_change_list = NULL |
static group_info_t | clear_alias_group |
static htab_t | clear_alias_mode_table |
static bool | stores_off_frame_dead_at_return |
static int | globally_deleted |
static int | locally_deleted |
static int | spill_deleted |
static bitmap | all_blocks |
static bitmap | kill_on_calls |
static unsigned int | current_position |
static hash_table < invariant_group_base_hasher > | rtx_group_table |
#define MAX_OFFSET (64 * 1024) |
RTL dead store elimination. Copyright (C) 2005-2013 Free Software Foundation, Inc.
Contributed by Richard Sandiford rsand and Kenneth Zadeck ifor @code sour cery. comzadec k@na tural brid ge.co m
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/. This file contains three techniques for performing Dead Store Elimination (dse).
The first technique performs dse locally on any base address. It is based on the cselib which is a local value numbering technique. This technique is local to a basic block but deals with a fairly general addresses.
The second technique performs dse globally but is restricted to base addresses that are either constant or are relative to the frame_pointer.
The third technique, (which is only done after register allocation) processes the spill spill slots. This differs from the second technique because it takes advantage of the fact that spilling is completely free from the effects of aliasing.
Logically, dse is a backwards dataflow problem. A store can be deleted if it if cannot be reached in the backward direction by any use of the value being stored. However, the local technique uses a forwards scan of the basic block because cselib requires that the block be processed in that order.
The pass is logically broken into 7 steps:
0) Initialization.
1) The local algorithm, as well as scanning the insns for the two global algorithms.
2) Analysis to see if the global algs are necessary. In the case of stores base on a constant address, there must be at least two stores to that address, to make it possible to delete some of the stores. In the case of stores off of the frame or spill related stores, only one store to an address is necessary because those stores die at the end of the function.
3) Set up the global dataflow equations based on processing the info parsed in the first step.
4) Solve the dataflow equations.
5) Delete the insns that the global analysis has indicated are unnecessary.
6) Delete insns that store the same value as preceding store where the earlier store couldn't be eliminated.
7) Cleanup.
This step uses cselib and canon_rtx to build the largest expression possible for each address. This pass is a forwards pass through each basic block. From the point of view of the global technique, the first pass could examine a block in either direction. The forwards ordering is to accommodate cselib.
We make a simplifying assumption: addresses fall into four broad categories:
1) base has rtx_varies_p == false, offset is constant. 2) base has rtx_varies_p == false, offset variable. 3) base has rtx_varies_p == true, offset constant. 4) base has rtx_varies_p == true, offset variable.
The local passes are able to process all 4 kinds of addresses. The global pass only handles 1).
The global problem is formulated as follows:
A store, S1, to address A, where A is not relative to the stack frame, can be eliminated if all paths from S1 to the end of the function contain another store to A before a read to A.
If the address A is relative to the stack frame, a store S2 to A can be eliminated if there are no paths from S2 that reach the end of the function that read A before another store to A. In this case S2 can be deleted if there are paths from S2 to the end of the function that have no reads or writes to A. This second case allows stores to the stack frame to be deleted that would otherwise die when the function returns. This cannot be done if stores_off_frame_dead_at_return is not true. See the doc for that variable for when this variable is false.
The global problem is formulated as a backwards set union dataflow problem where the stores are the gens and reads are the kills. Set union problems are rare and require some special handling given our representation of bitmaps. A straightforward implementation requires a lot of bitmaps filled with 1s. These are expensive and cumbersome in our bitmap formulation so care has been taken to avoid large vectors filled with 1s. See the comments in bb_info and in the dataflow confluence functions for details.
There are two places for further enhancements to this algorithm:
1) The original dse which was embedded in a pass called flow also did local address forwarding. For example in
A <- r100 ... <- A
flow would replace the right hand side of the second insn with a reference to r100. Most of the information is available to add this to this pass. It has not done it because it is a lot of work in the case that either r100 is assigned to between the first and second insn and/or the second insn is a load of part of the value stored by the first insn.
insn 5 in gcc.c-torture/compile/990203-1.c simple case. insn 15 in gcc.c-torture/execute/20001017-2.c simple case. insn 25 in gcc.c-torture/execute/20001026-1.c simple case. insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
2) The cleaning up of spill code is quite profitable. It currently depends on reading tea leaves and chicken entrails left by reload. This pass depends on reload creating a singleton alias set for each spill slot and telling the next dse pass which of these alias sets are the singletons. Rather than analyze the addresses of the spills, dse's spill processing just does analysis of the loads and stores that use those alias sets. There are three cases where this falls short:
a) Reload sometimes creates the slot for one mode of access, and then inserts loads and/or stores for a smaller mode. In this case, the current code just punts on the slot. The proper thing to do is to back out and use one bit vector position for each byte of the entity associated with the slot. This depends on KNOWING that reload always generates the accesses for each of the bytes in some canonical (read that easy to understand several passes after reload happens) way.
b) Reload sometimes decides that spill slot it allocated was not large enough for the mode and goes back and allocates more slots with the same mode and alias set. The backout in this case is a little more graceful than (a). In this case the slot is unmarked as being a spill slot and if final address comes out to be based off the frame pointer, the global algorithm handles this slot.
c) For any pass that may prespill, there is currently no mechanism to tell the dse pass that the slot being used has the special properties that reload uses. It may be that all that is required is to have those passes make the same calls that reload does, assuming that the alias sets can be manipulated in the same way. There are limits to the size of constant offsets we model for the global problem. There are certainly test cases, that exceed this limit, however, it is unlikely that there are important programs that really have constant offsets this size.
typedef struct group_info* const_group_info_t |
typedef struct deferred_change* deferred_change_t |
typedef struct group_info* group_info_t |
typedef struct insn_info* insn_info_t |
typedef struct read_info* read_info_t |
typedef struct store_info* store_info_t |
|
static |
Set the BB_INFO so that the last insn is marked as a wild read of non-frame locations.
|
static |
Set the BB_INFO so that the last insn is marked as a wild read.
Referenced by replace_read(), and set_position_unneeded().
|
inlinestatic |
Return TRUE if all bytes START through START+WIDTH-1 from S_INFO store are needed.
Referenced by check_mem_read_rtx().
|
inlinestatic |
Return TRUE if any bytes from S_INFO store are needed.
|
static |
Return whether EXPR can possibly escape the current function scope.
References free_read_records(), bb_info::last_insn, reset_active_stores(), and insn_info::wild_read.
|
static |
Take all reasonable action to put the address of MEM into the form that we can do analysis on.
The gold standard is to get the address into the form: address + OFFSET where address is something that rtx_varies_p considers a constant. When we can get the address in this form, we can do global analysis on it. Note that for constant bases, address is not actually returned, only the group_id. The address can be obtained from that.
If that fails, we try cselib to get a value we can at least use locally. If that fails we return false.
The GROUP_ID is set to -1 for cselib bases and the index of the group for non_varying bases.
FOR_READ is true if this is a mem read and false if not.
First see if just canon_rtx (mem_address) is const or frame, if not, try cselib_expand_value_rtx and call canon_rtx on that.
Use cselib to replace all of the reg references with the full expression. This will take care of the case where we have r_x = base + offset; val = *r_x; by making it into val = *(base + offset);
If this fails, just go with the address from first iteration.
Split the address into canonical BASE + OFFSET terms.
bool check_for_inc_dec | ( | ) |
Entry point for postreload. If you work on reload_cse, or you need this anywhere else, consider if you can provide register liveness information and add a parameter to this function so that it can be passed down in insn_info.fixed_regs_live.
|
static |
Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it is there, is split into a separate insn. Return true on success (or if there was nothing to do), false on failure.
Referenced by dse_step3().
|
static |
A for_each_rtx callback in which DATA is the bb_info. Check to see if LOC is a mem and if it is look at the address and kill any appropriate stores that may be active.
If it is reading readonly mem, then there can be no conflict with another write.
For alias_set != 0 canon_true_dependence should be never called.
We ignore the clobbers in store_info. The is mildly aggressive, but there really should not be a clobber followed by a read.
Skip the clobbers.
This is the restricted case where the base is a constant or the frame pointer and offset is a constant.
Skip the clobbers.
There are three cases here.
We have a cselib store followed by a read from a const base.
This is a block mode load. We may get lucky and canon_true_dependence may save the day.
If this read is just reading back something that we just stored, rewrite the read.
The bases are the same, just see if the offsets overlap.
else The else case that is missing here is that the bases are constant but different. There is nothing to do here because there is no overlap.
Skip the clobbers.
If this read is just reading back something that we just stored, rewrite the read.
References all_positions_needed_p(), store_info::begin, bb_info::regs_live, replace_read(), and store_info::rhs.
|
static |
A for_each_rtx callback in which DATA points the INSN_INFO for as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns true for any part of *LOC.
References store_info::is_set, store_info::next, and insn_info::store_rec.
|
staticread |
Find the entry associated with ALIAS_SET.
|
static |
Clear the rhs field from the active_local_stores array.
Skip the clobbers.
Referenced by set_all_positions_unneeded().
|
static |
Return true if X is a constant or one of the registers that behave as a constant over the life of a function. This is equivalent to !rtx_varies_p for memory addresses.
Note that we have to test for the actual rtx used for the frame and arg pointers and not just the register number in case we have eliminated the frame and/or arg pointer and are using it for pseudos.
The arg pointer varies if it is not a fixed register.
|
static |
Return a bitmap of the fixed registers contained in IN.
|
static |
Delete the insn and free all of the fields inside INSN_INFO.
References bitmap_set_bit, group_info::escaped_n, group_info::escaped_p, group_info::offset_map_size_n, group_info::offset_map_size_p, group_info::store1_n, group_info::store1_p, group_info::store2_n, and group_info::store2_p.
Referenced by dse_transfer_function().
|
static |
Confluence function for blocks with no successors. Create an out set from the gen set of the exit block. This block logically has the exit block as a successor.
References bitmap_clear(), dump_file, and dump_flags.
|
static |
Propagate the information from the in set of the dest of E to the out set of the src of E. If the various in or out sets are not there, that means they are all ones.
|
static |
Initialization of data structures.
Referenced by dse_step4().
|
static |
Do all of step 1.
Scan the insns.
This is something of a hack, because the global algorithm is supposed to take care of the case where stores go dead at the end of the function. However, the global algorithm must take a more conservative view of block mode reads than the local alg does. So to get the case where you have a store to the frame followed by a non overlapping block more read, we look at the active local stores at the end of the function and delete all of the frame and spill based ones.
Skip the clobbers.
Get rid of the loads that were discovered in replace_read. Cselib is finished with this block.
There is no reason to validate this change. That was done earlier.
Get rid of all of the cselib based store_infos in this block and mark the containing insns as not being deletable.
Free at least positions_needed bitmaps.
Referenced by dse_step4().
|
static |
For all non stack related bases, we only consider a store to be deletable if there are two or more stores for that position. This is because it takes one store to make the other store redundant. However, for the stores that are stack related, we consider them if there is only one store for the position. We do this because the stack related stores can be deleted if their is no read between them and the end of the function.
To make this work in the current framework, we take the stack related bases add all of the bits from store1 into store2. This has the effect of making the eligible even if there is only one store.
References bitmap_and_compl_into(), bitmap_ior_into(), FOR_EACH_VEC_ELT, group_info::frame_related, group_info::group_kill, and group_info::process_globally.
Referenced by dse_step4().
|
static |
Init the offset tables for the normal case.
Position 0 is unused because 0 is used in the maps to mean unused.
References bitmap_and_compl_into(), bitmap_ior_into(), and group_info::group_kill.
Referenced by dse_step4().
|
static |
Build the transfer functions for the function.
If this is the second time dataflow is run, delete the old sets.
For any block in an infinite loop, we must initialize the out set to all ones. This could be expensive, but almost never occurs in practice. However, it is common in regression tests.
References store_info::alias_set, store_info::begin, bitmap_bit_p, bitmap_empty_p(), bitmap_print(), insn_info::cannot_delete, check_for_inc_dec_1(), dbg_cnt(), delete_insn(), deleted, dump_file, dump_flags, FOR_EACH_BB, get_bitmap_index(), globally_deleted, store_info::group_id, HOST_WIDE_INT, basic_block_def::index, insn_info::insn, INSN_P, INSN_UID, store_info::is_set, bb_info::last_insn, store_info::next, NULL, bb_info::out, scan_stores_nospill(), and insn_info::store_rec.
Referenced by dse_step4().
|
static |
Set the gen set of the exit block, and also any block with no successors that does not have a wild read.
The gen set is all 0's for the exit block except for the frame_pointer_group.
|
static |
Scan the insns in BB_INFO starting at PTR and going to the top of the block in order to build the gen and kill sets for the block. We start at ptr which may be the last insn in the block or may be the first insn with a wild read. In the latter case we are able to skip the rest of the block because it just does not matter: anything that happens is hidden by the wild read.
There are no wild reads in the spill case.
In the spill case or in the no_spill case if there is no wild read in the block, we will need a kill set.
There may have been code deleted by the dce pass run before this phase.
Process the read(s) last.
|
static |
Solve the dataflow equations.
References df_analyze(), DF_DEFER_INSN_RESCAN, DF_LR_RUN_DCE, df_note_add_problem(), df_set_flags(), dse_step0(), dse_step1(), dse_step2_init(), dse_step2_nospill(), dse_step3(), dse_step5_nospill(), dse_step6(), dse_step7(), dump_file, dump_flags, globally_deleted, locally_deleted, and spill_deleted.
|
static |
There may have been code deleted by the dce pass run before this phase.
Try to delete the current insn.
Skip the clobbers.
We do want to process the local info if the insn was deleted. For instance, if the insn did a wild read, we no longer need to trash the info.
References dbg_cnt().
Referenced by dse_step4().
|
static |
There may have been code deleted by the dce pass run before this phase.
Referenced by dse_step4().
|
static |
Referenced by dse_step4().
|
static |
Propagate the info from the out to the in set of BB_INDEX's basic block. There are three cases:
1) The block has no kill set. In this case the kill set is all ones. It does not matter what the out set of the block is, none of the info can reach the top. The only thing that reaches the top is the gen set and we just copy the set.
2) There is a kill set but no out set and bb has successors. In this case we just return. Eventually an out set will be created and it is better to wait than to create a set of ones.
3) There is both a kill and out set. We apply the obvious transfer function.
Case 3 above.
Case 2 above.
Case 1 above. If there is already an in set, nothing happens.
References delete_dead_store_insn(), dump_file, dump_flags, insn_info::insn, INSN_UID, and store_info::redundant_reason.
|
static |
References bitmap_set_range(), GET_MODE, HARD_REGISTER_P, hard_regno_nregs, REG_P, REGNO, and regs_set.
|
static |
Callback for for_each_inc_dec that emits an INSN that sets DEST to SRC + SRCOFF before insn ARG.
We can reuse all operands without copying, because we are about to delete the insn that contained it.
If a failure was flagged above, return 1 so that for_each_inc_dec will return it immediately, communicating the failure to its caller.
|
static |
Return the insn in BB_INFO before the first wild read or if there are no wild reads in the block, return the last insn.
Block starts with wild read.
References BITMAP_ALLOC, bitmap_copy(), EXIT_BLOCK, bb_info::gen, basic_block_def::index, and bb_info::out.
Referenced by scan_stores_nospill().
|
static |
If the modes are different and the value's source and target do not line up, we need to extract the value from lower part of the rhs of the store, shift it, and then put it into a form that can be shoved into the read_insn. This function generates a right SHIFT of a value that is at least ACCESS_SIZE bytes wide of READ_MODE. The shift sequence is returned or NULL if we failed to find a shift.
Some machines like the x86 have shift insns for each size of operand. Other machines like the ppc or the ia-64 may only have shift insns that shift values within 32 or 64 bit registers. This loop tries to find the smallest shift insn that will right justify the value we want to read but is available in one insn on the machine.
If a constant was stored into memory, try to simplify it here, otherwise the cost of the shift might preclude this optimization e.g. at -Os, even when no actual shift will be needed.
Try a wider mode if truncating the store mode to NEW_MODE requires a real instruction.
Also try a wider mode if the necessary punning is either not desirable or not possible.
In theory we could also check for an ashr. Ian Taylor knows of one dsp where the cost of these two was not the same. But this really is a rare case anyway.
The computation up to here is essentially independent of the arguments and could be precomputed. It may not be worth doing so. We could precompute if worthwhile or at least cache the results. The result technically depends on both SHIFT and ACCESS_SIZE, but in practice the answer will depend only on ACCESS_SIZE.
We found an acceptable shift. Generate a move to take the value from the store and put it into the shift pseudo, then shift it, then generate another move to put in into the target of the read.
References store_info::begin, BITS_PER_UNIT, const0_rtx, CONST_INT_P, store_info::const_rhs, CONSTANT_P, copy_rtx(), store_info::end, extract_low_bits(), gcc_assert, gen_int_mode(), GET_MODE, GET_MODE_BITSIZE, GET_MODE_CLASS, GET_MODE_SIZE, HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, int_mode_for_mode(), INTVAL, store_info::mem, NULL_RTX, optimize_bb_for_speed_p(), store_info::rhs, and shift.
|
static |
Free all READ_REC of the LAST_INSN of BB_INFO.
Referenced by can_escape().
|
static |
Delete all of the store_info recs from INSN_INFO.
|
static |
|
static |
|
static |
Look up the bitmap index for OFFSET in GROUP_INFO. If it is not there, return 0.
References NULL, insn_info::prev_insn, and insn_info::wild_read.
Referenced by dse_step3().
|
static |
Get arguments passed to CALL_INSN. Return TRUE if successful. So far it only handles arguments passed in registers.
|
static |
Get the GROUP for BASE. Add a new group if it is not there.
Find the store_base_info structure for BASE, creating a new one if necessary.
|
static |
Helper function for replace_read and record_store. Attempt to return a value stored in STORE_INFO, from READ_BEGIN to one before READ_END bytes read in READ_MODE. Return NULL if not successful. If REQUIRE_CST is true, return always constant.
To get here the read is within the boundaries of the write so shift will never be negative. Start out with the shift being in bytes.
From now on it is bits.
The store is a memset (addr, const_val, const_size).
References BITMAP_ALLOC, bitmap_and_into(), bitmap_empty_p(), BITMAP_FREE, df_print_regset(), dump_file, dump_flags, look_for_hardregs(), NEXT_INSN, note_stores(), NULL_RTX, PATTERN, reg_obstack, and regs_set.
|
static |
Return whether DECL, a local variable, can possibly escape the current function scope.
If this is a partitioned variable, we need to consider all the variables in the partition. This is necessary because a store into one of them can be replaced with a store into another and this may not change the outcome of the escape analysis.
|
static |
Call back for note_stores to find the hard regs set or clobbered by insn. Data is a bitmap of the hardregs set so far.
Referenced by get_stored_val().
|
static |
Return a bitmask with the first N low bits set.
rtl_opt_pass* make_pass_rtl_dse1 | ( | ) |
rtl_opt_pass* make_pass_rtl_dse2 | ( | ) |
|
static |
Find all of the blocks that are not backwards reachable from the exit block or any block with no successors (BB). These are the infinite loops or infinite self loops. These blocks will still have their bits set in UNREACHABLE_BLOCKS.
|
static |
Callback for emit_inc_dec_insn_before via note_stores. Check if a register is clobbered which is live afterwards.
If this register is referenced by the current or an earlier insn, that's OK. E.g. this applies to the register that is being incremented with this addition.
If we come here, we have a clobber of a register that's only OK if that register is not live. If we don't have liveness information available, fail now.
Now check if this is a live fixed register.
|
static |
BODY is an instruction pattern that belongs to INSN. Return 1 if there is a candidate store, after adding it to the appropriate local store group if so.
If this is not used, then this cannot be used to keep the insn from being deleted. On the other hand, it does provide something that can be used to prove that another store is dead.
Check whether that value is a suitable memory location.
If the set or clobber is unused, then it does not effect our ability to get rid of the entire insn.
At this point we know mem is a mem.
Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0)) as memset (addr, 0, 36);
If the set or clobber is unused, then it does not effect our ability to get rid of the entire insn.
We can still process a volatile mem, we just cannot delete it.
In the restrictive case where the base is a constant or the frame pointer we can do global analysis.
No place to keep the value after ra.
Sometimes the store and reload is used for truncation and rounding.
Check to see if this stores causes some other stores to be dead.
For alias_set != 0 canon_true_dependence should be never called.
Skip the clobbers. We delete the active insn if this insn shadows the set. To have been put on the active list, it has exactly on set.
Generally, spills cannot be processed if and of the references to the slot have a different mode. But if we are in the same block and mode is exactly the same between this store and one before in the same block, we can still delete it.
Even if PTR won't be eliminated as unneeded, if both PTR and this insn store the same constant value, we might eliminate this insn instead.
Need to see if it is possible for this store to overwrite the value of store_info. If it is, set the rhs to NULL to keep it from being used to remove a load.
An insn can be deleted if every position of every one of its s_infos is zero.
Finish filling in the store_info.
If this is a clobber, we return 0. We will only be able to delete this insn if there is only one store USED store, but we can use the clobber to delete other stores earlier.
References insn_info::contains_cselib_groups, dump_file, dump_flags, may_be_sp_based_p(), offset, pool_alloc(), insn_info::stack_pointer_based, and XEXP.
|
static |
Remove BASE from the set of active_local_stores. This is a callback from cselib that is used to get rid of the stores in active_local_stores.
If ANY of the store_infos match the cselib group that is being deleted, then the insn can not be deleted.
|
static |
Take a sequence of: A <- r1 ... ... <- A
and change it into r2 <- r1 A <- r1 ... ... <- r2
or
r3 <- extract (r1) r3 <- r3 >> shift r2 <- extract (r3) ... <- r2
or
r2 <- extract (r1) ... <- r2
Depending on the alignment and the mode of the store and subsequent load.
The STORE_INFO and STORE_INSN are for the store and READ_INFO and READ_INSN are for the read. Return true if the replacement went ok.
Create a sequence of instructions to set up the read register. This sequence goes immediately before the store and its result is read by the load. We need to keep this in perspective. We are replacing a read with a sequence of insns, but the read will almost certainly be in cache, so it is not going to be an expensive one. Thus, we are not willing to do a multi insn shift or worse a subroutine call to get rid of the read.
Force the value into a new register so that it won't be clobbered between the store and the load.
Now we have to scan the set of new instructions to see if the sequence contains and sets of hardregs that happened to be live at this point. For instance, this can happen if one of the insns sets the CC and the CC happened to be live at that point. This does occasionally happen, see PR 37922.
Insert this right before the store insn where it will be safe from later insns that might change it before the read.
And now for the kludge part: cselib croaks if you just return at this point. There are two reasons for this: 1) Cselib has an idea of how many pseudos there are and that does not include the new ones we just added. 2) Cselib does not know about the move insn we added above the store_info, and there is no way to tell it about it, because it has "moved on". Problem (1) is fixable with a certain amount of engineering. Problem (2) is requires starting the bb from scratch. This could be expensive. So we are just going to have to lie. The move/extraction insns are not really an issue, cselib did not see them. But the use of the new pseudo read_insn is a real problem because cselib has not scanned this insn. The way that we solve this problem is that we are just going to put the mem back for now and when we are finished with the block, we undo this. We keep a table of mems to get rid of. At the end of the basic block we can put them back.
Get rid of the read_info, from the point of view of the rest of dse, play like this read never happened.
References add_wild_read(), insn_info::cannot_delete, dump_file, and dump_flags.
Referenced by check_mem_read_rtx().
|
static |
References get_address_mode(), and XEXP.
Referenced by can_escape().
|
static |
Need the notes since we must track live hardregs in the forwards direction.
|
static |
Apply record_store to all candidate stores in INSN. Mark INSN if some part of it is not a candidate store and assigns to a non-register target.
Cselib clears the table for this case, so we have to essentially do the same.
Look at all of the uses in the insn.
Const functions cannot do anything bad i.e. read memory, however, they can read their parameters which may have been pushed onto the stack. memset and bzero don't read memory either.
See the head comment of the frame_read field.
Loop over the active stores and remove those which are killed by the const function call.
The stack pointer based stores are always killed.
If the frame is read, the frame related stores are killed.
Skip the clobbers.
Every other call, including pure functions, may read any memory that is not relative to the frame.
Assuming that there are sets in these insns, we cannot delete them.
If we found some sets of mems, add it into the active_local_stores so that it can be locally deleted if found dead or used for replace_read and redundant constant store elimination. Otherwise mark it as cannot delete. This simplifies the processing later.
|
static |
Process the READ_INFOs into the bitmaps into GEN and KILL. KILL may be NULL.
If this insn reads the frame, kill all the frame related stores.
Kill all non-frame related stores. Kill all stores of variables that escape.
Begin > end for block mode reads.
The groups are the same, just process the offsets.
The groups are different, if the alias sets conflict, clear the entire group. We only need to apply this test if the read_info is a cselib read. Anything with a constant base cannot alias something else with a different constant base.
References bitmap_ior_into(), FOR_EACH_VEC_ELT, group_info::frame_related, bb_info::gen, group_info::group_kill, group_info::process_globally, and stores_off_frame_dead_at_return.
Referenced by scan_stores_spill().
|
static |
Process the READ_INFOs into the bitmaps into GEN and KILL. KILL may be NULL.
References BITMAP_ALLOC, bitmap_copy(), and bb_info::out.
Referenced by scan_stores_spill().
|
static |
Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL may be NULL.
References BITMAP_ALLOC, bitmap_clear(), BITMAP_FREE, find_insn_before_first_wild_read(), basic_block_def::index, bb_info::kill, and bb_info::last_insn.
Referenced by dse_step3(), and scan_stores_spill().
|
static |
Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL may be NULL.
References bb_info::gen, insn_info::insn, INSN_P, bb_info::kill, insn_info::prev_insn, insn_info::read_rec, scan_reads_nospill(), scan_reads_spill(), scan_stores_nospill(), and insn_info::store_rec.
|
inlinestatic |
Mark the whole store S_INFO as unneeded.
References insn_info::cannot_delete, and clear_rhs_from_active_local_stores().
|
inlinestatic |
Mark byte POS bytes from the beginning of store S_INFO as unneeded.
References add_wild_read(), insn_info::cannot_delete, dump_file, and dump_flags.
|
static |
Set the store* bitmaps offset_map_size* fields in GROUP based on OFFSET and WIDTH.
References arg_pointer_rtx, CONSTANT_P, fixed_regs, frame_pointer_rtx, GET_CODE, hard_frame_pointer_rtx, and pic_offset_table_rtx.
|
static |
The linked list of stores that are under consideration in this basic block.
|
static |
|
static |
Referenced by setjmp_args_warning(), and setjmp_vars_warning().
|
static |
|
static |
Table to hold all bb_infos.
|
static |
The group that holds all of the clear_alias_sets.
|
static |
The modes of the clear_alias_sets.
|
static |
|
static |
The number of bits used in the global bitmaps.
|
static |
|
static |
|
static |
Obstack for the DSE dataflow bitmaps. We don't want to put these on the default obstack because these bitmaps can grow quite large (~2GB for the small (!) test case of PR54146) and we'll hold on to all that memory until the end of the compiler run. As a bonus, delete_tree_live_info can destroy all the bitmaps by just releasing the whole obstack.
|
static |
Obstack for other data. As for above: Kinda nice to be able to throw it all away at the end in one big sweep.
|
static |
Counter for stats.
Referenced by dse_step3(), and dse_step4().
|
static |
|
static |
Locations that are killed by calls in the global phase.
|
static |
Referenced by dse_step4().
|
static |
|
static |
|
static |
Index into the rtx_group_vec.
|
static |
Tables of group_info structures, hashed by base value.
|
static |
|
static |
Scratch bitmap for cselib's cselib_expand_value_rtx.
Referenced by return_insn_p().
|
static |
Referenced by dse_step4().
|
static |
This is true except if cfun->stdarg – i.e. we cannot do this for vararg functions because they play games with the frame.
Referenced by scan_reads_nospill().