Functions |
static void | set_of_1 (rtx, const_rtx, void *) |
static bool | covers_regno_p (const_rtx, unsigned int) |
static bool | covers_regno_no_parallel_p (const_rtx, unsigned int) |
static int | rtx_referenced_p_1 (rtx *, void *) |
static int | computed_jump_p_1 (const_rtx) |
static void | parms_set (rtx, const_rtx, void *) |
static unsigned HOST_WIDE_INT | cached_nonzero_bits (const_rtx, enum machine_mode, const_rtx, enum machine_mode, unsigned HOST_WIDE_INT) |
static unsigned HOST_WIDE_INT | nonzero_bits1 (const_rtx, enum machine_mode, const_rtx, enum machine_mode, unsigned HOST_WIDE_INT) |
static unsigned int | cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx, enum machine_mode, unsigned int) |
static unsigned int | num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx, enum machine_mode, unsigned int) |
int | rtx_unstable_p () |
bool | rtx_varies_p () |
static int | rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size, enum machine_mode mode, bool unaligned_mems) |
int | rtx_addr_can_trap_p () |
bool | nonzero_address_p () |
bool | rtx_addr_varies_p () |
rtx | get_call_rtx_from () |
HOST_WIDE_INT | get_integer_term () |
rtx | get_related_value () |
bool | offset_within_block_p () |
void | split_const () |
int | count_occurrences () |
bool | unsigned_reg_p () |
int | reg_mentioned_p () |
int | no_labels_between_p () |
int | reg_used_between_p () |
int | reg_referenced_p () |
int | reg_set_between_p () |
int | reg_set_p () |
int | modified_between_p () |
int | modified_in_p () |
static void | set_of_1 () |
const_rtx | set_of () |
void | record_hard_reg_sets () |
void | find_all_hard_reg_sets () |
static int | record_hard_reg_uses_1 () |
void | record_hard_reg_uses () |
rtx | single_set_2 () |
int | multiple_sets () |
int | set_noop_p () |
int | noop_move_p () |
rtx | find_last_value () |
int | refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, rtx *loc) |
int | reg_overlap_mentioned_p () |
void | note_stores (const_rtx x, void(*fun)(rtx, const_rtx, void *), void *data) |
void | note_uses (rtx *pbody, void(*fun)(rtx *, void *), void *data) |
int | dead_or_set_p () |
static bool | covers_regno_no_parallel_p () |
static bool | covers_regno_p () |
int | dead_or_set_regno_p () |
rtx | find_reg_note () |
rtx | find_regno_note () |
rtx | find_reg_equal_equiv_note () |
rtx | find_constant_src () |
int | find_reg_fusage () |
int | find_regno_fusage () |
rtx | alloc_reg_note () |
void | add_reg_note () |
void | remove_note () |
void | remove_reg_equal_equiv_notes () |
void | remove_reg_equal_equiv_notes_for_regno () |
int | in_expr_list_p () |
void | remove_node_from_expr_list () |
int | volatile_insn_p () |
int | volatile_refs_p () |
int | side_effects_p () |
int | may_trap_p_1 () |
int | may_trap_p () |
int | may_trap_or_fault_p () |
int | inequality_comparisons_p () |
rtx | replace_rtx () |
int | replace_label () |
static int | rtx_referenced_p_1 () |
int | rtx_referenced_p () |
bool | tablejump_p () |
static int | computed_jump_p_1 () |
int | computed_jump_p () |
static int | for_each_rtx_1 () |
int | for_each_rtx () |
static int | for_each_inc_dec_find_mem (rtx *r, void *d) |
static int | for_each_inc_dec_find_inc_dec () |
static int | for_each_inc_dec_find_mem () |
int | for_each_inc_dec (rtx *x, for_each_inc_dec_fn fn, void *arg) |
rtx | regno_use_in () |
int | commutative_operand_precedence () |
bool | swap_commutative_operands_p () |
int | auto_inc_p () |
int | loc_mentioned_in_p () |
unsigned int | subreg_lsb_1 (enum machine_mode outer_mode, enum machine_mode inner_mode, unsigned int subreg_byte) |
unsigned int | subreg_lsb () |
void | subreg_get_info (unsigned int xregno, enum machine_mode xmode, unsigned int offset, enum machine_mode ymode, struct subreg_info *info) |
unsigned int | subreg_regno_offset (unsigned int xregno, enum machine_mode xmode, unsigned int offset, enum machine_mode ymode) |
bool | subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, unsigned int offset, enum machine_mode ymode) |
int | simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode, unsigned int offset, enum machine_mode ymode) |
unsigned int | subreg_regno () |
unsigned int | subreg_nregs () |
unsigned int | subreg_nregs_with_regno () |
static void | parms_set () |
rtx | find_first_parameter_load () |
bool | keep_with_call_p () |
bool | label_is_jump_target_p () |
int | rtx_cost () |
void | get_full_rtx_cost (rtx x, enum rtx_code outer, int opno, struct full_rtx_costs *c) |
int | address_cost () |
int | default_address_cost () |
unsigned HOST_WIDE_INT | nonzero_bits () |
unsigned int | num_sign_bit_copies () |
int | insn_rtx_cost () |
rtx | canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, rtx want_reg, int allow_cc_mode, int valid_at_insn_p) |
rtx | get_condition () |
static void | init_num_sign_bit_copies_in_rep () |
bool | truncated_to_mode () |
void | init_rtlanal () |
bool | constant_pool_constant_p () |
int | low_bitmask_len () |
enum machine_mode | get_address_mode () |
void | split_double () |
rtx * | strip_address_mutations () |
static bool | must_be_base_p () |
static bool | must_be_index_p () |
static void | set_address_segment () |
static void | set_address_base () |
static void | set_address_index () |
static void | set_address_disp () |
static void | decompose_incdec_address () |
static void | decompose_automod_address () |
static rtx ** | extract_plus_operands () |
static int | baseness (rtx x, enum machine_mode mode, addr_space_t as, enum rtx_code outer_code, enum rtx_code index_code) |
static void | decompose_normal_address () |
void | decompose_address (struct address_info *info, rtx *loc, enum machine_mode mode, addr_space_t as, enum rtx_code outer_code) |
void | decompose_lea_address () |
void | decompose_mem_address () |
void | update_address () |
HOST_WIDE_INT | get_index_scale () |
enum rtx_code | get_index_code () |
Evaluate the likelihood of X being a base or index value, returning
positive if it is likely to be a base, negative if it is likely to be
an index, and 0 if we can't tell. Make the magnitude of the return
value reflect the amount of confidence we have in the answer.
MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1.
References must_be_base_p(), must_be_index_p(), and ok_for_base_p_1().
Referenced by decompose_normal_address().
rtx canonicalize_condition |
( |
rtx |
insn, |
|
|
rtx |
cond, |
|
|
int |
reverse, |
|
|
rtx * |
earliest, |
|
|
rtx |
want_reg, |
|
|
int |
allow_cc_mode, |
|
|
int |
valid_at_insn_p |
|
) |
| |
Given an insn INSN and condition COND, return the condition in a
canonical form to simplify testing by callers. Specifically:
(1) The code will always be a comparison operation (EQ, NE, GT, etc.).
(2) Both operands will be machine operands; (cc0) will have been replaced.
(3) If an operand is a constant, it will be the second operand.
(4) (LE x const) will be replaced with (LT x <const+1>) and similarly
for GE, GEU, and LEU.
If the condition cannot be understood, or is an inequality floating-point
comparison which needs to be reversed, 0 will be returned.
If REVERSE is nonzero, then reverse the condition prior to canonizing it.
If EARLIEST is nonzero, it is a pointer to a place where the earliest
insn used in locating the condition was found. If a replacement test
of the condition is desired, it should be placed in front of that
insn and we will be sure that the inputs are still valid.
If WANT_REG is nonzero, we wish the condition to be relative to that
register, if possible. Therefore, do not canonicalize the condition
further. If ALLOW_CC_MODE is nonzero, allow the condition returned
to be a compare to a CC mode register.
If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
and at INSN.
References cc0_rtx, const_val, gen_int_mode(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, modified_between_p(), modified_in_p(), prev_nonnote_insn(), prev_nonnote_nondebug_insn(), reg_set_p(), reversed_comparison_code(), RTX_COMM_COMPARE, RTX_COMPARE, rtx_equal_p(), SET, set_of(), swap_condition(), and val_signbit_known_set_p().
Referenced by get_condition(), noce_get_alt_condition(), and noce_get_condition().
int commutative_operand_precedence |
( |
| ) |
|
Return nonzero if X's old contents don't survive after INSN.
This will be true if X is (cc0) or if X is a register and
X dies in INSN or because INSN entirely sets X.
"Entirely set" means set directly and not through a SUBREG, or
ZERO_EXTRACT, so no trace of the old contents remains.
Likewise, REG_INC does not count.
REG may be a hard or pseudo reg. Renumbering is not taken into account,
but for this use that makes no difference, since regs don't overlap
during their lifetimes. Therefore, this function may be used
at any time after deaths have been computed.
If REG is a hard reg that occupies multiple machine registers, this
function will only return 1 if each of those registers will be replaced
by INSN.
References dead_or_set_regno_p().
Return the last thing that X was assigned from before *PINSN. If VALID_TO
is not NULL_RTX then verify that the object is not modified up to VALID_TO.
If the object was modified, if we hit a partial assignment to X, or hit a
CODE_LABEL first, return X. If we found an assignment, update *PINSN to
point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
be the src.
References find_reg_note(), modified_between_p(), reg_set_p(), and rtx_equal_p().
Traverse *X looking for MEMs, and for autoinc operations within
them. For each such autoinc operation found, call FN, passing it
the innermost enclosing MEM, the operation itself, the RTX modified
by the operation, two RTXs (the second may be NULL) that, once
added, represent the value to be held by the modified RTX
afterwards, and ARG. FN is to return -1 to skip looking for other
autoinc operations within the visited operation, 0 to continue the
traversal, or any other value to have it returned to the caller of
for_each_inc_dec.
References for_each_inc_dec_ops::arg, for_each_inc_dec_ops::fn, for_each_inc_dec_find_mem(), for_each_rtx(), and for_each_inc_dec_ops::mem.
Referenced by check_for_inc_dec(), check_for_inc_dec_1(), and cselib_record_sets().
Traverse X via depth-first search, calling F for each
sub-expression (including X itself). F is also passed the DATA.
If F returns -1, do not traverse sub-expressions, but continue
traversing the rest of the tree. If F ever returns any other
nonzero value, stop the traversal, and return the value returned
by F. Otherwise, return 0. This function does not traverse inside
tree structure that contains RTX_EXPRs, or into sub-expressions
whose format code is `0' since it is not known whether or not those
codes are actually RTL.
This routine is very general, and could (should?) be used to
implement many of the other routines in this file.
References for_each_rtx_1(), and non_rtx_starting_operands.
Given a jump insn JUMP, return the condition that will cause it to branch
to its JUMP_LABEL. If the condition cannot be understood, or is an
inequality floating-point comparison which needs to be reversed, 0 will
be returned.
If EARLIEST is nonzero, it is a pointer to a place where the earliest
insn used in locating the condition was found. If a replacement test
of the condition is desired, it should be placed in front of that
insn and we will be sure that the inputs are still valid. If EARLIEST
is null, the returned condition will be valid at INSN.
If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
compare CC mode register.
VALID_AT_INSN_P is the same as for canonicalize_condition.
References any_condjump_p(), canonicalize_condition(), and pc_set().
static void init_num_sign_bit_copies_in_rep |
( |
| ) |
|
|
static |
Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
TARGET_MODE_REP_EXTENDED.
Note that we assume that the property of
TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
narrower than mode B. I.e., if A is a mode narrower than B then in
order to be able to operate on it in mode B, mode A needs to
satisfy the requirements set by the representation of mode B.
References num_sign_bit_copies_in_rep, and targetm.
Referenced by init_rtlanal().
bool label_is_jump_target_p |
( |
| ) |
|
Return true if LABEL is a target of JUMP_INSN. This applies only
to non-complex jumps. That is, direct unconditional, conditional,
and tablejumps, but not computed jumps or returns. It also does
not apply to the fallthru case of a conditional jump.
References find_reg_note(), and tablejump_p().
int may_trap_or_fault_p |
( |
| ) |
|
Same as above, but additionally return nonzero if evaluating rtx X might
cause a fault. We define a fault for the purpose of this function as a
erroneous execution condition that cannot be encountered during the normal
execution of a valid program; the typical example is an unaligned memory
access on a strict alignment machine. The compiler guarantees that it
doesn't generate code that will fault from a valid program, but this
guarantee doesn't mean anything for individual instructions. Consider
the following example:
struct S { int d; union { char *cp; int *ip; }; };
int foo(struct S *s)
{
if (s->d == 1)
return *s->ip;
else
return *s->cp;
}
on a strict alignment machine. In a valid program, foo will never be
invoked on a structure for which d is equal to 1 and the underlying
unique field of the union not aligned on a 4-byte boundary, but the
expression *s->ip might cause a fault if considered individually.
At the RTL level, potentially problematic expressions will almost always
verify may_trap_p; for example, the above dereference can be emitted as
(mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
However, suppose that foo is inlined in a caller that causes s->cp to
point to a local character variable and guarantees that s->d is not set
to 1; foo may have been effectively translated into pseudo-RTL as:
if ((reg:SI) == 1)
(set (reg:SI) (mem:SI (%fp - 7)))
else
(set (reg:QI) (mem:QI (%fp - 7)))
Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
memory reference to a stack slot, but it will certainly cause a fault
on a strict alignment machine.
References may_trap_p_1().
Given an expression, X, compute which bits in X can be nonzero.
We don't care about bits outside of those defined in MODE.
For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
an arithmetic operation, we can do better.
References cached_nonzero_bits(), count, floor_log2(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT_M1U, num_sign_bit_copies(), ptr_mode, rtl_hooks::reg_nonzero_bits, target_default_pointer_address_modes_p(), and val_signbit_known_set_p().
Referenced by cached_nonzero_bits().
Call FUN on each register or MEM that is stored into or clobbered by X.
(X would be the pattern of an insn). DATA is an arbitrary pointer,
ignored by note_stores, but passed to FUN.
FUN receives three arguments:
1. the REG, MEM, CC0 or PC being stored in or clobbered,
2. the SET or CLOBBER rtx that does the store,
3. the pointer DATA provided to note_stores.
If the item being stored in or clobbered is a SUBREG of a hard register,
the SUBREG will be passed.
References note_stores(), and SET.
Referenced by add_with_sets(), adjust_insn(), assign_parm_setup_reg(), build_def_use(), calculate_loop_reg_pressure(), can_move_insns_across(), combine_instructions(), compute_defs_uses_and_gen(), compute_hash_table_work(), copyprop_hardreg_forward_1(), cselib_record_sets(), delete_trivially_dead_insns(), emit_inc_dec_insn_before(), emit_libcall_block_1(), emit_output_reload_insns(), expand_atomic_compare_and_swap(), find_all_hard_reg_sets(), find_first_parameter_load(), init_alias_analysis(), init_insn_reg_pressure_info(), insert_one_insn(), likely_spilled_retval_p(), load_killed_in_block_p(), mark_nonreg_stores(), mark_target_live_regs(), mem_write_insn_p(), memory_modified_in_insn_p(), note_stores(), notice_stack_pointer_modification(), optimize_mode_switching(), record_dead_and_set_regs(), record_last_mem_set_info(), record_opr_changes(), reg_dead_at_p(), reload(), reload_as_needed(), reload_combine(), reload_cse_move2add(), replace_read(), save_call_clobbered_regs(), set_of(), sets_likely_spilled(), setup_save_areas(), simplify_using_initial_values(), thread_prologue_and_epilogue_insns(), try_combine(), update_equiv_regs(), and validate_equiv_mem().
void note_uses |
( |
rtx * |
pbody, |
|
|
void(*)(rtx *, void *) |
fun, |
|
|
void * |
data |
|
) |
| |
Like notes_stores, but call FUN for each expression that is being
referenced in PBODY, a pointer to the PATTERN of an insn. We only call
FUN for each expression, not any interior subexpressions. FUN receives a
pointer to the expression and the DATA passed to this function.
Note that this is not quite the same test as that done in reg_referenced_p
since that considers something as being referenced if it is being
partially set, while we do not.
References note_uses(), and SET.
Referenced by add_dependence(), add_with_sets(), adjust_insn(), bypass_block(), combine_instructions(), copyprop_hardreg_forward_1(), cprop_insn(), insert_one_insn(), local_cprop_pass(), mem_read_insn_p(), note_uses(), scan_insn(), thread_prologue_and_epilogue_insns(), and validate_replace_src_group().
int refers_to_regno_p |
( |
unsigned int |
regno, |
|
|
unsigned int |
endregno, |
|
|
const_rtx |
x, |
|
|
rtx * |
loc |
|
) |
| |
Return nonzero if register in range [REGNO, ENDREGNO)
appears either explicitly or implicitly in X
other than being stored into.
References contained within the substructure at LOC do not count.
LOC may be zero, meaning don't ignore anything.
References refers_to_regno_p(), SET, subreg_nregs(), and subreg_regno().
Referenced by compute_defs_uses_and_gen(), delete_output_reload(), df_get_call_refs(), distribute_notes(), link_btr_uses(), refers_to_regno_p(), reg_overlap_mentioned_p(), remove_invalid_refs(), remove_invalid_subreg_refs(), and sched_analyze_insn().
int reg_overlap_mentioned_p |
( |
| ) |
|
Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
we check if any register number in X conflicts with the relevant register
numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
contains a MEM (we don't bother checking for memory addresses that can't
conflict because we expect this to be a rare case.
References refers_to_regno_p(), reg_mentioned_p(), reg_overlap_mentioned_p(), subreg_nregs(), and subreg_regno().
Return an estimate of the cost of computing rtx X.
One use is in cse, to decide which expression to keep in the hash table.
Another is in rtl generation, to pick the cheapest way to multiply.
Other uses like the latter are expected in the future.
X appears as operand OPNO in an expression with code OUTER_CODE.
SPEED specifies whether costs optimized for speed or size should
be returned.
References rtx_cost(), SET, and targetm.
@verbatim Analyze RTL for GNU compiler.
Copyright (C) 1987-2013 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/.
Forward declarations
Referenced by set_of().
void subreg_get_info |
( |
unsigned int |
xregno, |
|
|
enum machine_mode |
xmode, |
|
|
unsigned int |
offset, |
|
|
enum machine_mode |
ymode, |
|
|
struct subreg_info * |
info |
|
) |
| |
Fill in information about a subreg of a hard register.
xregno - A regno of an inner hard subreg_reg (or what will become one).
xmode - The mode of xregno.
offset - The byte offset.
ymode - The mode of a top level SUBREG (or what may become one).
info - Pointer to structure to fill in.
References HOST_WIDE_INT, mode_for_size(), subreg_info::nregs, offset, subreg_info::offset, subreg_info::representable_p, and subreg_lowpart_offset().
Referenced by rtx_renumbered_equal_p(), simplify_subreg_regno(), subreg_nregs_with_regno(), subreg_offset_representable_p(), subreg_regno_offset(), and true_regnum().
unsigned int subreg_regno_offset |
( |
unsigned int |
xregno, |
|
|
enum machine_mode |
xmode, |
|
|
unsigned int |
offset, |
|
|
enum machine_mode |
ymode |
|
) |
| |
This function returns the regno offset of a subreg expression.
xregno - A regno of an inner hard subreg_reg (or what will become one).
xmode - The mode of xregno.
offset - The byte offset.
ymode - The mode of a top level SUBREG (or what may become one).
RETURN - The regno offset which would be used.
References subreg_info::offset, and subreg_get_info().
Referenced by add_stored_regs(), choose_reload_regs(), constrain_operands(), df_ref_record(), find_dummy_reload(), find_reloads(), find_reloads_address_1(), get_hard_regno(), get_true_reg(), go_through_subreg(), maybe_mode_change(), move2add_valid_value_p(), operands_match_p(), push_reload(), reg_overlap_mentioned_for_reload_p(), reload_combine_note_store(), subreg_regno(), and var_lowpart().
unsigned int num_sign_bit_copies_in_rep[MAX_MODE_INT+1][MAX_MODE_INT+1] |
|
static |
Truncation narrows the mode from SOURCE mode to DESTINATION mode.
If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
SIGN_EXTEND then while narrowing we also have to enforce the
representation and sign-extend the value to mode DESTINATION_REP.
If the value is already sign-extended to DESTINATION_REP mode we
can just switch to DESTINATION mode on it. For each pair of
integral modes SOURCE and DESTINATION, when truncating from SOURCE
to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
contains the number of high-order bits in SOURCE that have to be
copies of the sign-bit so that we can do this mode-switch to
DESTINATION.
Referenced by init_num_sign_bit_copies_in_rep(), and truncated_to_mode().