GCC Middle and Back End API Reference
reorg.c File Reference

Functions

static rtx skip_consecutive_labels ()
static void link_cc0_insns ()
static int stop_search_p (rtx, int)
static int resource_conflicts_p (struct resources *, struct resources *)
static int insn_references_resource_p (rtx, struct resources *, bool)
static int insn_sets_resource_p (rtx, struct resources *, bool)
static rtx find_end_label (rtx)
static rtx emit_delay_sequence (rtx, rtx, int)
static rtx add_to_delay_list (rtx, rtx)
static rtx delete_from_delay_slot (rtx)
static void delete_scheduled_jump (rtx)
static void note_delay_statistics (int, int)
static rtx optimize_skip (rtx)
static int get_jump_flags (rtx, rtx)
static int mostly_true_jump (rtx)
static rtx get_branch_condition (rtx, rtx)
static int condition_dominates_p (rtx, rtx)
static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx)
static int redirect_with_delay_list_safe_p (rtx, rtx, rtx)
static int check_annul_list_true_false (int, rtx)
static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx, struct resources *, struct resources *, struct resources *, int, int *, int *, rtx *)
static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx, struct resources *, struct resources *, struct resources *, int, int *, int *)
static void try_merge_delay_insns (rtx, rtx)
static rtx redundant_insn (rtx, rtx, rtx)
static int own_thread_p (rtx, rtx, int)
static void update_block (rtx, rtx)
static int reorg_redirect_jump (rtx, rtx)
static void update_reg_dead_notes (rtx, rtx)
static void fix_reg_dead_note (rtx, rtx)
static void update_reg_unused_notes (rtx, rtx)
static void fill_simple_delay_slots (int)
static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx, int, int, int, int, int *, rtx)
static void fill_eager_delay_slots (void)
static void relax_delay_slots (rtx)
static void make_return_insns (rtx)
static rtx first_active_target_insn ()
static bool simplejump_or_return_p ()
static int stop_search_p ()
static int resource_conflicts_p ()
static rtx find_end_label ()
static rtx emit_delay_sequence ()
static rtx add_to_delay_list ()
static rtx delete_from_delay_slot ()
static void delete_scheduled_jump ()
static void note_delay_statistics ()
static rtx optimize_skip ()
static int get_jump_flags ()
static int mostly_true_jump ()
static rtx get_branch_condition ()
static int condition_dominates_p ()
static int redirect_with_delay_slots_safe_p ()
static int redirect_with_delay_list_safe_p ()
static int check_annul_list_true_false ()
static void try_merge_delay_insns ()
static rtx redundant_insn ()
static int own_thread_p ()
static void update_block ()
static int reorg_redirect_jump ()
static void update_reg_dead_notes ()
static void fix_reg_dead_note ()
static void update_reg_unused_notes ()
static rtx get_label_before ()
static void fill_simple_delay_slots ()
static rtx follow_jumps ()
static void delete_computation (rtx insn)
static void delete_prior_computation ()
static void delete_computation ()
static void delete_jump ()
static rtx label_before_next_insn ()
static void relax_delay_slots ()
static void make_return_insns ()
static void dbr_schedule ()
static bool gate_handle_delay_slots ()
static unsigned int rest_of_handle_delay_slots ()
rtl_opt_passmake_pass_delay_slots ()
static bool gate_handle_machine_reorg ()
static unsigned int rest_of_handle_machine_reorg ()
rtl_opt_passmake_pass_machine_reorg ()

Variables

static struct obstack unfilled_slots_obstack
static rtxunfilled_firstobj
static rtx function_return_label
static rtx function_simple_return_label
static int * uid_to_ruid
static int max_uid
static int num_insns_needing_delays [NUM_REORG_FUNCTIONS][MAX_REORG_PASSES]
static int num_filled_delays [NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES]
static int reorg_pass_number
static vec< rtxsibling_labels

Function Documentation

static rtx add_to_delay_list ( )
static
Add INSN to DELAY_LIST and return the head of the new list.  The list must
   be in the order in which the insns are to be executed.   

References add_to_delay_list(), and clear_hashed_info_for_insn().

static int check_annul_list_true_false ( int  ,
rtx   
)
static
static int check_annul_list_true_false ( )
static
DELAY_LIST is a list of insns that have already been placed into delay
   slots.  See if all of them have the same annulling status as ANNUL_TRUE_P.
   If not, return 0; otherwise return 1.   
static int condition_dominates_p ( rtx  ,
rtx   
)
static
static int condition_dominates_p ( )
static
Return nonzero if CONDITION is more strict than the condition of
   INSN, i.e., if INSN will always branch if CONDITION is true.   

References comparison_dominates_p(), const_true_rtx, get_branch_condition(), and rtx_equal_p().

static void delete_computation ( rtx  insn)
static
static void delete_computation ( )
static
Delete INSN and recursively delete insns that compute values used only
   by INSN.  This uses the REG_DEAD notes computed during flow analysis.

   Look at all our REG_DEAD notes.  If a previous insn does nothing other
   than set a register that dies in this insn, we can delete that insn
   as well.

   On machines with CC0, if CC0 is used in this insn, we may be able to
   delete the insn that set it.   

References add_reg_note(), cc0_rtx, delete_computation(), delete_prior_computation(), delete_related_insns(), prev_nonnote_insn(), reg_referenced_p(), sets_cc0_p(), and side_effects_p().

static rtx delete_from_delay_slot ( rtx  )
static
static rtx delete_from_delay_slot ( )
static
Delete INSN from the delay slot of the insn that it is in, which may
   produce an insn with no delay slots.  Return the new insn.   

References add_insn_after(), add_to_delay_list(), delete_related_insns(), emit_barrier_after(), emit_delay_sequence(), and unfilled_slots_obstack.

static void delete_jump ( )
static
If all INSN does is set the pc, delete it,
   and delete the insn that set the condition codes for it
   if that's what the previous thing was.   

References delete_computation().

Referenced by relax_delay_slots().

static void delete_prior_computation ( )
static
Recursively delete prior insns that compute the value (used only by INSN
   which the caller is deleting) stored in the register mentioned by NOTE
   which is a REG_DEAD note associated with INSN.   

References add_reg_note(), delete_computation(), find_regno_note(), prev_nonnote_insn(), reg_overlap_mentioned_p(), reg_set_p(), SET, and side_effects_p().

Referenced by delete_computation().

static void delete_scheduled_jump ( rtx  )
static

Referenced by relax_delay_slots().

static void delete_scheduled_jump ( )
static
Delete INSN, a JUMP_INSN.  If it is a conditional jump, we must track down
   the insn that sets CC0 for it and delete it too.   

References cc0_rtx, delete_from_delay_slot(), delete_related_insns(), find_reg_note(), prev_nonnote_insn(), previous_insn(), reg_mentioned_p(), and sets_cc0_p().

static rtx emit_delay_sequence ( rtx  ,
rtx  ,
int   
)
static
static rtx emit_delay_sequence ( )
static
Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
   the pattern of INSN with the SEQUENCE.

   Returns the SEQUENCE that replaces INSN.   

References add_insn_after(), emit_insn(), end_sequence(), make_insn_raw(), remove_insn(), remove_note(), rtvec_alloc(), and start_sequence().

static void fill_eager_delay_slots ( )
static
Make another attempt to find insns to place in delay slots.

   We previously looked for insns located in front of the delay insn
   and, for non-jump delay insns, located behind the delay insn.

   Here only try to schedule jump insns and try to move insns from either
   the target or the following insns into the delay slot.  If annulling is
   supported, we will be likely to do this.  Otherwise, we can do this only
   if safe.   

References condjump_in_parallel_p(), condjump_p(), const_true_rtx, emit_delay_sequence(), fill_slots_from_thread(), first_active_target_insn(), get_branch_condition(), mostly_true_jump(), next_active_insn(), note_delay_statistics(), and own_thread_p().

Referenced by dbr_schedule().

static void fill_simple_delay_slots ( int  )
static

Referenced by dbr_schedule(), and make_return_insns().

static void fill_simple_delay_slots ( )
static
Scan a function looking for insns that need a delay slot and find insns to
   put into the delay slot.

   NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
   as calls).  We do these first since we don't want jump insns (that are
   easier to fill) to get the only insns that could be used for non-jump insns.
   When it is zero, only try to fill JUMP_INSNs.

   When slots are filled in this manner, the insns (including the
   delay_insn) are put together in a SEQUENCE rtx.  In this fashion,
   it is possible to tell whether a delay slot has really been filled
   or not.  `final' knows how to deal with this, by communicating
   through FINAL_SEQUENCE.   

References add_to_delay_list(), can_throw_internal(), cc0_rtx, condjump_in_parallel_p(), condjump_p(), const_true_rtx, copy_delay_slot_insn(), delete_related_insns(), emit_delay_sequence(), fill_slots_from_thread(), find_end_label(), get_jump_flags(), get_label_before(), insn_references_resource_p(), insn_sets_resource_p(), jump_to_label_p(), link_cc0_insns(), mark_referenced_resources(), mark_set_resources(), MARK_SRC_DEST, MARK_SRC_DEST_CALL, may_trap_or_fault_p(), next_active_insn(), next_nonnote_insn(), next_real_insn(), no_labels_between_p(), note_delay_statistics(), optimize_skip(), own_thread_p(), prev_nonnote_insn(), reg_mentioned_p(), reorg_redirect_jump(), sets_cc0_p(), simple_return_rtx, simplejump_p(), stop_search_p(), try_split(), update_block(), and update_reg_dead_notes().

static rtx fill_slots_from_thread ( rtx  insn,
rtx  condition,
rtx  thread,
rtx  opposite_thread,
int  likely,
int  thread_if_true,
int  own_thread,
int  slots_to_fill,
int *  pslots_filled,
rtx  delay_list 
)
static
Try to find insns to place in delay slots.

   INSN is the jump needing SLOTS_TO_FILL delay slots.  It tests CONDITION
   or is an unconditional branch if CONDITION is const_true_rtx.
   *PSLOTS_FILLED is updated with the number of slots that we have filled.

   THREAD is a flow-of-control, either the insns to be executed if the
   branch is true or if the branch is false, THREAD_IF_TRUE says which.

   OPPOSITE_THREAD is the thread in the opposite direction.  It is used
   to see if any potential delay slot insns set things needed there.

   LIKELY is nonzero if it is extremely likely that the branch will be
   taken and THREAD_IF_TRUE is set.  This is used for the branch at the
   end of a loop back up to the top.

   OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
   thread.  I.e., it is the fallthrough code of our jump or the target of the
   jump when we are the only jump going there.

   If OWN_THREAD is false, it must be the "true" thread of a jump.  In that
   case, we can only take insns from the head of the thread for our delay
   slot.  We then adjust the jump to point after the insns we have taken.   

References add_to_delay_list(), asm_noperands(), can_throw_internal(), cc0_rtx, check_annul_list_true_false(), const_true_rtx, constrain_operands(), copy_delay_slot_insn(), delete_related_insns(), emit_insn_after(), extract_insn(), find_end_label(), fix_reg_dead_note(), follow_jumps(), get_insns(), get_jump_flags(), get_label_before(), insn_references_resource_p(), insn_sets_resource_p(), jump_to_label_p(), link_cc0_insns(), mark_referenced_resources(), mark_set_resources(), MARK_SRC_DEST_CALL, mark_target_live_regs(), may_trap_or_fault_p(), modified_in_p(), negate_rtx(), next_active_insn(), next_nonnote_insn(), own_thread_p(), recog_memoized(), redirect_with_delay_list_safe_p(), redundant_insn(), reg_mentioned_p(), reg_overlap_mentioned_p(), reg_referenced_p(), reg_set_p(), reorg_redirect_jump(), rtx_equal_p(), SET, set_unique_reg_note(), sets_cc0_p(), side_effects_p(), simplejump_or_return_p(), steal_delay_list_from_fallthrough(), steal_delay_list_from_target(), stop_search_p(), try_split(), update_block(), update_reg_unused_notes(), and validate_replace_rtx().

Referenced by fill_eager_delay_slots(), and fill_simple_delay_slots().

static rtx find_end_label ( rtx  )
static
static rtx find_end_label ( )
static
Find a label at the end of the function or before a RETURN.  If there
   is none, try to make one.  If that fails, returns 0.

   The property of such a label is that it is placed just before the
   epilogue or a bare RETURN insn, so that another bare RETURN can be
   turned into a jump to the label unconditionally.  In particular, the
   label cannot be placed before a RETURN insn with a filled delay slot.

   ??? There may be a problem with the current implementation.  Suppose
   we start with a bare RETURN insn and call find_end_label.  It may set
   function_return_label just before the RETURN.  Suppose the machinery
   is able to fill the delay slot of the RETURN insn afterwards.  Then
   function_return_label is no longer valid according to the property
   described above and find_end_label will still return it unmodified.
   Note that this is probably mitigated by the following observation:
   once function_return_label is made, it is very likely the target of
   a jump, so filling the delay slot of the RETURN will be much more
   difficult.
   KIND is either simple_return_rtx or ret_rtx, indicating which type of
   return we're looking for.   

References emit_barrier(), emit_jump_insn(), emit_label(), emit_label_after(), function_return_label, function_simple_return_label, gen_label_rtx(), get_last_insn(), HAVE_epilogue, ret_rtx, set_return_jump_label(), simple_return_rtx, and unfilled_slots_obstack.

static rtx first_active_target_insn ( )
static
A wrapper around next_active_insn which takes care to return ret_rtx
   unchanged.   

References next_active_insn().

Referenced by fill_eager_delay_slots(), and steal_delay_list_from_target().

static void fix_reg_dead_note ( rtx  ,
rtx   
)
static

Referenced by fill_slots_from_thread().

static void fix_reg_dead_note ( )
static
Called when an insn redundant with start_insn is deleted.  If there
   is a REG_DEAD note for the target of start_insn between start_insn
   and stop_insn, then the REG_DEAD note needs to be deleted since the
   value no longer dies there.

   If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
   confused into thinking the register is dead.   

References next_nonnote_insn(), reg_set_p(), and remove_note().

static rtx follow_jumps ( )
static
Follow any unconditional jump at LABEL, for the purpose of redirecting JUMP;
   return the ultimate label reached by any such chain of jumps.
   Return a suitable return rtx if the chain ultimately leads to a
   return instruction.
   If LABEL is not followed by a jump, return LABEL.
   If the chain loops or we can't find end, return LABEL,
   since that tells caller to avoid changing the insn.
   If the returned label is obtained by following a REG_CROSSING_JUMP
   jump, set *CROSSING to true, otherwise set it to false.   

References any_uncondjump_p(), find_reg_note(), next_active_insn(), onlyjump_p(), and targetm.

Referenced by fill_slots_from_thread(), and relax_delay_slots().

static bool gate_handle_delay_slots ( )
static
static bool gate_handle_machine_reorg ( )
static
Machine dependent reorg pass.   

References targetm.

static rtx get_branch_condition ( rtx  ,
rtx   
)
static
static rtx get_branch_condition ( )
static
Return the condition under which INSN will branch to TARGET.  If TARGET
   is zero, return the condition under which INSN will return.  If INSN is
   an unconditional branch, return const_true_rtx.  If INSN isn't a simple
   type of jump, or it doesn't go to TARGET, return 0.   

References condjump_in_parallel_p(), const_true_rtx, pc_rtx, reversed_comparison_code(), and SET.

static int get_jump_flags ( )
static
Encode and return branch direction and prediction information for
    INSN assuming it will jump to LABEL.

    Non conditional branches return no direction information and
    are predicted as very likely taken.   

References condjump_in_parallel_p(), condjump_p(), max_uid, and uid_to_ruid.

static rtx get_label_before ( )
static
Return the label before INSN, or put a new label there.  If SIBLING is
   non-zero, it is another label associated with the new label (if any),
   typically the former target of the jump that will be redirected to
   the new label.   

References emit_label_after(), gen_label_rtx(), and prev_nonnote_insn().

Referenced by fill_simple_delay_slots(), fill_slots_from_thread(), make_return_insns(), and relax_delay_slots().

static int insn_references_resource_p ( rtx  insn,
struct resources res,
bool  include_delayed_effects 
)
static
Return TRUE if any resource marked in RES, a `struct resources', is
   referenced by INSN.  If INCLUDE_DELAYED_EFFECTS is set, return if the called
   routine is using those resources.

   We compute this by computing all the resources referenced by INSN and
   seeing if this conflicts with RES.  It might be faster to directly check
   ourselves, and this is the way it used to work, but it means duplicating
   a large block of complex code.   

References mark_referenced_resources(), and resource_conflicts_p().

Referenced by fill_simple_delay_slots(), fill_slots_from_thread(), steal_delay_list_from_fallthrough(), steal_delay_list_from_target(), and try_merge_delay_insns().

static int insn_sets_resource_p ( rtx  insn,
struct resources res,
bool  include_delayed_effects 
)
static
Return TRUE if INSN modifies resources that are marked in RES.
   INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
   included.   CC0 is only modified if it is explicitly set; see comments
   in front of mark_set_resources for details.   

References mark_set_resources(), MARK_SRC_DEST, MARK_SRC_DEST_CALL, and resource_conflicts_p().

Referenced by fill_simple_delay_slots(), fill_slots_from_thread(), redundant_insn(), steal_delay_list_from_fallthrough(), steal_delay_list_from_target(), and try_merge_delay_insns().

static rtx label_before_next_insn ( )
static

References next_active_insn().

Referenced by relax_delay_slots().

static void link_cc0_insns ( )
static
INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
   and REG_CC_USER notes so we can find it.   

References add_reg_note(), and next_nonnote_insn().

Referenced by fill_simple_delay_slots(), and fill_slots_from_thread().

rtl_opt_pass* make_pass_delay_slots ( )
rtl_opt_pass* make_pass_machine_reorg ( )
static void make_return_insns ( rtx  )
static

Referenced by dbr_schedule().

static void make_return_insns ( )
static
static int mostly_true_jump ( rtx  )
static
static int mostly_true_jump ( )
static
Return truth value of the statement that this branch
   is mostly taken.  If we think that the branch is extremely likely
   to be taken, we return 2.  If the branch is slightly more likely to be
   taken, return 1.  If the branch is slightly less likely to be taken,
   return 0 and if the branch is highly unlikely to be taken, return -1.   

References find_reg_note(), and prob.

static void note_delay_statistics ( int  ,
int   
)
static
static void note_delay_statistics ( )
static
static rtx optimize_skip ( rtx  )
static

Referenced by fill_simple_delay_slots().

static rtx optimize_skip ( )
static
Optimize the following cases:

   1.  When a conditional branch skips over only one instruction,
       use an annulling branch and put that insn in the delay slot.
       Use either a branch that annuls when the condition if true or
       invert the test with a branch that annuls when the condition is
       false.  This saves insns, since otherwise we must copy an insn
       from the L1 target.

        (orig)           (skip)         (otherwise)
        Bcc.n L1        Bcc',a L1       Bcc,a L1'
        insn            insn            insn2
      L1:             L1:             L1:
        insn2           insn2           insn2
        insn3           insn3         L1':
                                        insn3

   2.  When a conditional branch skips over only one instruction,
       and after that, it unconditionally branches somewhere else,
       perform the similar optimization. This saves executing the
       second branch in the case where the inverted condition is true.

        Bcc.n L1        Bcc',a L2
        insn            insn
      L1:             L1:
        Bra L2          Bra L2

   INSN is a JUMP_INSN.

   This should be expanded to skip over N insns, where N is the number
   of delay slots required.   

References add_to_delay_list(), can_throw_internal(), delete_related_insns(), find_end_label(), get_jump_flags(), invert_jump(), next_active_insn(), next_nonnote_insn(), recog_memoized(), reorg_redirect_jump(), simplejump_or_return_p(), and update_block().

static int own_thread_p ( rtx  ,
rtx  ,
int   
)
static
static int own_thread_p ( )
static
Return 1 if THREAD can only be executed in one way.  If LABEL is nonzero,
   it is the target of the branch insn being scanned.  If ALLOW_FALLTHROUGH
   is nonzero, we are allowed to fall into this thread; otherwise, we are
   not.

   If LABEL is used more than one or we pass a label other than LABEL before
   finding an active insn, we do not own this thread.   

References next_active_insn(), and prev_nonnote_insn().

static int redirect_with_delay_list_safe_p ( rtx  ,
rtx  ,
rtx   
)
static

Referenced by fill_slots_from_thread().

static int redirect_with_delay_list_safe_p ( )
static
Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
   any insns we wish to place in the delay slot of JUMP.   

References get_jump_flags().

static int redirect_with_delay_slots_safe_p ( rtx  ,
rtx  ,
rtx   
)
static
static int redirect_with_delay_slots_safe_p ( )
static
Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
   any insns already in the delay slot of JUMP.   

References get_jump_flags().

static rtx redundant_insn ( )
static
See if INSN is redundant with an insn in front of TARGET.  Often this
   is called when INSN is a candidate for a delay slot of TARGET.
   DELAY_LIST are insns that will be placed in delay slots of TARGET in front
   of INSN.  Often INSN will be redundant with an insn in a delay slot of
   some previous insn.  This happens when we have a series of branches to the
   same label; in that case the first insn at the target might want to go
   into each of the delay slots.

   If we are not careful, this routine can take up a significant fraction
   of the total compilation time (4%), but only wins rarely.  Hence we
   speed this routine up by making two passes.  The first pass goes back
   until it hits a label and sees if it finds an insn with an identical
   pattern.  Only in this (relatively rare) event does it check for
   data conflicts.

   We do not split insns we encounter.  This could cause us not to find a
   redundant insn, but the cost of splitting seems greater than the possible
   gain in rare cases.   

References candidate(), cc0_rtx, find_reg_note(), insn_sets_resource_p(), mark_referenced_resources(), mark_set_resources(), MARK_SRC_DEST_CALL, resources::memory, reg_mentioned_p(), resources::regs, resource_conflicts_p(), and rtx_equal_p().

static void relax_delay_slots ( rtx  )
static

Referenced by dbr_schedule().

static int reorg_redirect_jump ( rtx  ,
rtx   
)
static
static int reorg_redirect_jump ( )
static
Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
   the basic block containing the jump.   

References incr_ticks_for_insn(), and redirect_jump().

static int resource_conflicts_p ( struct resources ,
struct resources  
)
static
static int resource_conflicts_p ( )
static
Return TRUE if any resources are marked in both RES1 and RES2 or if either
   resource set contains a volatile memory reference.  Otherwise, return FALSE.   

References resources::cc, hard_reg_set_intersect_p(), resources::memory, resources::regs, and resources::volatil.

static unsigned int rest_of_handle_delay_slots ( )
static
Run delay slot optimization.   

References dbr_schedule(), and get_insns().

static unsigned int rest_of_handle_machine_reorg ( )
static

References targetm.

static bool simplejump_or_return_p ( )
static
Return true iff INSN is a simplejump, or any kind of return insn.   

References simplejump_p().

Referenced by fill_slots_from_thread(), optimize_skip(), relax_delay_slots(), and steal_delay_list_from_fallthrough().

static rtx skip_consecutive_labels ( )
static
First, some functions that were used before GCC got a control flow graph.
   These functions are now only used here in reorg.c, and have therefore
   been moved here to avoid inadvertent misuse elsewhere in the compiler.   
Return the last label to mark the same position as LABEL.  Return LABEL
   itself if it is null or any return rtx.   

References insn_chain::insn.

Referenced by dbr_schedule(), and relax_delay_slots().

static rtx steal_delay_list_from_fallthrough ( rtx  insn,
rtx  condition,
rtx  seq,
rtx  delay_list,
struct resources sets,
struct resources needed,
struct resources other_needed,
int  slots_to_fill,
int *  pslots_filled,
int *  pannul_p 
)
static
Similar to steal_delay_list_from_target except that SEQ is on the
   fallthrough path of INSN.  Here we only do something if the delay insn
   of SEQ is an unconditional branch.  In that case we steal its delay slot
   for INSN since unconditional branches are much easier to fill.   

References add_to_delay_list(), check_annul_list_true_false(), const_true_rtx, delete_from_delay_slot(), get_jump_flags(), insn_references_resource_p(), insn_sets_resource_p(), may_trap_or_fault_p(), redundant_insn(), sets_cc0_p(), and simplejump_or_return_p().

Referenced by fill_slots_from_thread().

static rtx steal_delay_list_from_target ( rtx  insn,
rtx  condition,
rtx  seq,
rtx  delay_list,
struct resources sets,
struct resources needed,
struct resources other_needed,
int  slots_to_fill,
int *  pslots_filled,
int *  pannul_p,
rtx pnew_thread 
)
static
INSN branches to an insn whose pattern SEQ is a SEQUENCE.  Given that
   the condition tested by INSN is CONDITION and the resources shown in
   OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
   from SEQ's delay list, in addition to whatever insns it may execute
   (in DELAY_LIST).   SETS and NEEDED are denote resources already set and
   needed while searching for delay slot insns.  Return the concatenated
   delay list if possible, otherwise, return 0.

   SLOTS_TO_FILL is the total number of slots required by INSN, and
   PSLOTS_FILLED points to the number filled so far (also the number of
   insns in DELAY_LIST).  It is updated with the number that have been
   filled from the SEQUENCE, if any.

   PANNUL_P points to a nonzero value if we already know that we need
   to annul INSN.  If this routine determines that annulling is needed,
   it may set that value nonzero.

   PNEW_THREAD points to a location that is to receive the place at which
   execution should continue.   

References add_to_delay_list(), check_annul_list_true_false(), condition_dominates_p(), const_true_rtx, copy_delay_slot_insn(), find_reg_note(), first_active_target_insn(), get_jump_flags(), insn_references_resource_p(), insn_sets_resource_p(), mark_set_resources(), MARK_SRC_DEST_CALL, may_trap_or_fault_p(), and redundant_insn().

Referenced by fill_slots_from_thread().

static int stop_search_p ( rtx  ,
int   
)
static
static int stop_search_p ( )
static
Return TRUE if this insn should stop the search for insn to fill delay
   slots.  LABELS_P indicates that labels should terminate the search.
   In all cases, jumps terminate the search.   

References asm_noperands(), and can_throw_internal().

static void try_merge_delay_insns ( rtx  ,
rtx   
)
static

Referenced by relax_delay_slots().

static void try_merge_delay_insns ( )
static
Try merging insns starting at THREAD which match exactly the insns in
   INSN's delay list.

   If all insns were matched and the insn was previously annulling, the
   annul bit will be cleared.

   For each insn that is merged, if the branch is or will be non-annulling,
   we delete the merged insn.   

References delete_from_delay_slot(), delete_related_insns(), get_jump_flags(), insn_references_resource_p(), insn_sets_resource_p(), mark_referenced_resources(), mark_set_resources(), MARK_SRC_DEST_CALL, next_active_insn(), next_nonnote_insn(), rtx_equal_p(), sets_cc0_p(), stop_search_p(), try_split(), and update_block().

static void update_block ( rtx  ,
rtx   
)
static
static void update_block ( )
static
Called when INSN is being moved from a location near the target of a jump.
   We leave a marker of the form (use (INSN)) immediately in front
   of WHERE for mark_target_live_regs.  These markers will be deleted when
   reorg finishes.

   We used to try to update the live status of registers if WHERE is at
   the start of a basic block, but that can't work since we may remove a
   BARRIER in relax_delay_slots.   

References emit_insn_before(), and incr_ticks_for_insn().

static void update_reg_dead_notes ( rtx  ,
rtx   
)
static

Referenced by fill_simple_delay_slots().

static void update_reg_dead_notes ( )
static
Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
   We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
   that reference values used in INSN.  If we find one, then we move the
   REG_DEAD note to INSN.

   This is needed to handle the case where a later insn (after INSN) has a
   REG_DEAD note for a register used by INSN, and this later insn subsequently
   gets moved before a CODE_LABEL because it is a redundant insn.  In this
   case, mark_target_live_regs may be confused into thinking the register
   is dead because it sees a REG_DEAD note immediately before a CODE_LABEL.   

References next_nonnote_insn(), reg_referenced_p(), and remove_note().

static void update_reg_unused_notes ( rtx  ,
rtx   
)
static

Referenced by fill_slots_from_thread().

static void update_reg_unused_notes ( )
static
Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.

   This handles the case of udivmodXi4 instructions which optimize their
   output depending on whether any REG_UNUSED notes are present.
   we must make sure that INSN calculates as many results as REDUNDANT_INSN
   does.   

References find_regno_note(), and remove_note().


Variable Documentation

rtx function_return_label
static
Points to the label before the end of the function, or before a
   return insn.   

Referenced by dbr_schedule(), find_end_label(), and make_return_insns().

rtx function_simple_return_label
static
Likewise for a simple_return.   

Referenced by dbr_schedule(), find_end_label(), and make_return_insns().

int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES]
static
int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES]
static
int reorg_pass_number
static
vec<rtx> sibling_labels
static
int* uid_to_ruid
static
Mapping between INSN_UID's and position in the code since INSN_UID's do
   not always monotonically increase.   

Referenced by dbr_schedule(), and get_jump_flags().

rtx* unfilled_firstobj
static

Referenced by dbr_schedule(), and make_return_insns().

struct obstack unfilled_slots_obstack
static
Insns which have delay slots that have not yet been filled.   

Referenced by dbr_schedule(), delete_from_delay_slot(), find_end_label(), and make_return_insns().