GCC Middle and Back End API Reference
tree-ssa-reassoc.c File Reference

Data Structures

struct  operand_entry
struct  oecount_s
struct  oecount_hasher
struct  range_entry
struct  repeat_factor_d

Typedefs

typedef struct operand_entryoperand_entry_t
typedef struct oecount_s oecount
typedef struct repeat_factor_d repeat_factor
typedef struct repeat_factor_drepeat_factor_t
typedef struct repeat_factor_dconst_repeat_factor_t

Functions

static long get_rank (tree)
static long phi_rank ()
static bool loop_carried_phi ()
static long propagate_rank ()
static long find_operand_rank ()
static void insert_operand_rank ()
static long get_rank ()
static int constant_type ()
static int sort_by_operand_rank ()
static void add_to_ops_vec ()
static void add_repeat_to_ops_vec (vec< operand_entry_t > *ops, tree op, HOST_WIDE_INT repeat)
static bool is_reassociable_op ()
static tree get_unary_op ()
static bool eliminate_duplicate_pair (enum tree_code opcode, vec< operand_entry_t > *ops, bool *all_done, unsigned int i, operand_entry_t curr, operand_entry_t last)
static bool eliminate_plus_minus_pair (enum tree_code opcode, vec< operand_entry_t > *ops, unsigned int currindex, operand_entry_t curr)
static bool eliminate_not_pairs (enum tree_code opcode, vec< operand_entry_t > *ops, unsigned int currindex, operand_entry_t curr)
static void eliminate_using_constants (enum tree_code opcode, vec< operand_entry_t > *ops)
static void linearize_expr_tree (vec< operand_entry_t > *, gimple, bool, bool)
static int oecount_cmp ()
static bool stmt_is_power_of_op ()
static HOST_WIDE_INT decrement_power ()
static void propagate_op_to_single_use ()
static void zero_one_operation ()
static gimple build_and_add_sum ()
static bool undistribute_ops_list (enum tree_code opcode, vec< operand_entry_t > *ops, struct loop *loop)
static bool eliminate_redundant_comparison (enum tree_code opcode, vec< operand_entry_t > *ops, unsigned int currindex, operand_entry_t curr)
static void optimize_ops_list (enum tree_code opcode, vec< operand_entry_t > *ops)
static void init_range_entry ()
static int range_entry_cmp ()
static bool update_range_test (struct range_entry *range, struct range_entry *otherrange, unsigned int count, enum tree_code opcode, vec< operand_entry_t > *ops, tree exp, bool in_p, tree low, tree high, bool strict_overflow_p)
static void optimize_range_tests (enum tree_code opcode, vec< operand_entry_t > *ops)
static bool final_range_test_p ()
static bool suitable_cond_bb (basic_block bb, basic_block test_bb, basic_block *other_bb, bool backward)
static bool no_side_effect_bb ()
static bool get_ops (tree var, enum tree_code code, vec< operand_entry_t > *ops, struct loop *loop)
static void maybe_optimize_range_tests ()
static bool is_phi_for_stmt ()
static void remove_visited_stmt_chain ()
static void swap_ops_for_binary_stmt (vec< operand_entry_t > ops, unsigned int opindex, gimple stmt)
static bool not_dominated_by ()
static gimple appears_later_in_bb ()
static gimple find_insert_point ()
static void insert_stmt_after ()
static gimple get_def_stmt ()
static void ensure_ops_are_available ()
static void rewrite_expr_tree (gimple stmt, unsigned int opindex, vec< operand_entry_t > ops, bool moved)
static int get_required_cycles ()
static int get_reassociation_width (int ops_num, enum tree_code opc, enum machine_mode mode)
static void rewrite_expr_tree_parallel (gimple stmt, int width, vec< operand_entry_t > ops)
static void linearize_expr ()
static gimple get_single_immediate_use ()
static tree negate_value ()
static bool should_break_up_subtract ()
static void break_up_subtract ()
static bool acceptable_pow_call ()
static void repropagate_negates ()
static bool can_reassociate_p ()
static void break_up_subtract_bb ()
static int compare_repeat_factors ()
static tree attempt_builtin_powi ()
static void transform_stmt_to_copy ()
static void transform_stmt_to_multiply (gimple_stmt_iterator *gsi, gimple stmt, tree rhs1, tree rhs2)
static void reassociate_bb ()
void dump_ops_vector (FILE *file, vec< operand_entry_t > ops)
void debug_ops_vector (vec< operand_entry_t > ops)
void dump_ops_vector ()
DEBUG_FUNCTION void debug_ops_vector ()
static void do_reassoc ()
static void init_reassoc ()
static void fini_reassoc ()
static unsigned int execute_reassoc ()
static bool gate_tree_ssa_reassoc ()
gimple_opt_passmake_pass_reassoc ()

Variables

static struct { ... }  reassociate_stats
static alloc_pool operand_entry_pool
static int next_operand_entry_id
static long * bb_rank
static struct pointer_map_toperand_rank
static vec< treeplus_negates
static vec< oecountcvec
static vec< repeat_factorrepeat_factor_vec

Typedef Documentation

typedef struct oecount_s oecount
Structure for tracking and counting operands.   
typedef struct operand_entry * operand_entry_t
Operator, rank pair.   
typedef struct repeat_factor_d * repeat_factor_t

Function Documentation

static bool acceptable_pow_call ( )
static
Determine whether STMT is a builtin call that raises an SSA name
   to an integer power and has only one use.  If so, and this is early
   reassociation and unsafe math optimizations are permitted, place
   the SSA name in *BASE and the exponent in *EXPONENT, and return TRUE.
   If any of these conditions does not hold, return FALSE.   

References BUILT_IN_NORMAL, first_pass_instance, gimple_call_arg(), gimple_call_fndecl(), gimple_call_lhs(), has_single_use(), HOST_BITS_PER_WIDE_INT, host_integerp(), is_gimple_call(), real_from_integer(), real_identical(), and real_to_integer().

Referenced by linearize_expr_tree().

static void add_repeat_to_ops_vec ( vec< operand_entry_t > *  ops,
tree  op,
HOST_WIDE_INT  repeat 
)
static
Add an operand entry to *OPS for the tree operand OP with repeat
   count REPEAT.   

References operand_entry::count, get_rank(), operand_entry::id, next_operand_entry_id, operand_entry::op, pool_alloc(), operand_entry::rank, and reassociate_stats.

Referenced by linearize_expr_tree().

static gimple appears_later_in_bb ( )
static
Among STMT1 and STMT2, return the statement that appears later. Both
   statements are in same BB and have the same UID.   

References gimple_uid(), gsi_end_p(), gsi_for_stmt(), gsi_next(), and gsi_stmt().

Referenced by find_insert_point().

static tree attempt_builtin_powi ( )
static
static void break_up_subtract ( )
static
static void break_up_subtract_bb ( )
static
Break up subtract operations in block BB.

   We do this top down because we don't know whether the subtract is
   part of a possible chain of reassociation except at the top.

   IE given
   d = f + g
   c = a + e
   b = c - d
   q = b - r
   k = t - q

   we want to break up k = t - q, but we won't until we've transformed q
   = b - r, which won't be broken up until we transform b = c - d.

   En passant, clear the GIMPLE visited flag on every statement.   

References break_up_subtract(), can_reassociate_p(), CDI_DOMINATORS, first_dom_son(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_set_visited(), gsi_end_p(), gsi_next(), gsi_start_bb(), gsi_stmt(), is_gimple_assign(), next_dom_son(), and should_break_up_subtract().

Referenced by do_reassoc().

static gimple build_and_add_sum ( )
static
static bool can_reassociate_p ( )
static
Returns true if OP is of a type for which we can do reassociation.
   That is for integral or non-saturating fixed-point types, and for
   floating point type when associative-math is enabled.   

Referenced by break_up_subtract_bb(), and reassociate_bb().

static int compare_repeat_factors ( )
static
Used for sorting the repeat factor vector.  Sort primarily by
   ascending occurrence count, secondarily by descending rank.   

References repeat_factor_d::count, and repeat_factor_d::rank.

Referenced by attempt_builtin_powi().

static int constant_type ( )
inlinestatic
Classify an invariant tree into integer, float, or other, so that
   we can sort them to be near other constants of the same type.   

Referenced by sort_by_operand_rank().

void debug_ops_vector ( vec< operand_entry_t ops)
DEBUG_FUNCTION void debug_ops_vector ( )
Dump the operand entry vector OPS to STDERR.   

References dump_ops_vector().

static HOST_WIDE_INT decrement_power ( )
static
Given STMT which is a __builtin_pow* call, decrement its exponent
   in place and return the result.  Assumes that stmt_is_power_of_op
   was previously called for STMT and returned TRUE.   

References build_int_cst(), build_real(), gimple_call_arg(), gimple_call_fndecl(), gimple_call_set_arg(), HOST_WIDE_INT, real_from_integer(), and real_to_integer().

Referenced by zero_one_operation().

static void do_reassoc ( )
static
void dump_ops_vector ( FILE *  file,
vec< operand_entry_t ops 
)

Referenced by debug_ops_vector().

void dump_ops_vector ( )
Dump the operand entry vector OPS to FILE.   

References operand_entry::op, print_generic_expr(), and operand_entry::rank.

static bool eliminate_duplicate_pair ( enum tree_code  opcode,
vec< operand_entry_t > *  ops,
bool *  all_done,
unsigned int  i,
operand_entry_t  curr,
operand_entry_t  last 
)
static
If CURR and LAST are a pair of ops that OPCODE allows us to
   eliminate through equivalences, do so, remove them from OPS, and
   return true.  Otherwise, return false.   

References add_to_ops_vec(), build_zero_cst(), dump_file, dump_flags, operand_entry::op, print_generic_expr(), print_generic_stmt(), and reassociate_stats.

Referenced by optimize_ops_list().

static bool eliminate_not_pairs ( enum tree_code  opcode,
vec< operand_entry_t > *  ops,
unsigned int  currindex,
operand_entry_t  curr 
)
static
If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
   bitwise not expression, look in OPS for a corresponding operand to
   cancel it out.  If we find one, remove the other from OPS, replace
   OPS[CURRINDEX] with 0, and return true.  Otherwise, return
   false.  

References build_low_bits_mask(), build_zero_cst(), dump_file, dump_flags, get_unary_op(), operand_entry::op, print_generic_expr(), operand_entry::rank, and reassociate_stats.

Referenced by optimize_ops_list().

static bool eliminate_plus_minus_pair ( enum tree_code  opcode,
vec< operand_entry_t > *  ops,
unsigned int  currindex,
operand_entry_t  curr 
)
static
If OPCODE is PLUS_EXPR, CURR->OP is a negate expression or a bitwise not
   expression, look in OPS for a corresponding positive operation to cancel
   it out.  If we find one, remove the other from OPS, replace
   OPS[CURRINDEX] with 0 or -1, respectively, and return true.  Otherwise,
   return false.  

References add_to_ops_vec(), build_int_cst_type(), build_zero_cst(), dump_file, dump_flags, get_unary_op(), operand_entry::op, print_generic_expr(), operand_entry::rank, and reassociate_stats.

Referenced by optimize_ops_list().

static bool eliminate_redundant_comparison ( enum tree_code  opcode,
vec< operand_entry_t > *  ops,
unsigned int  currindex,
operand_entry_t  curr 
)
static
If OPCODE is BIT_IOR_EXPR or BIT_AND_EXPR and CURR is a comparison
   expression, examine the other OPS to see if any of them are comparisons
   of the same values, which we may be able to combine or eliminate.
   For example, we can rewrite (a < b) | (a == b) as (a <= b).   

References add_to_ops_vec(), build_and_add_sum(), dump_file, dump_flags, extract_ops_from_tree(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_get_lhs(), is_gimple_assign(), is_gimple_val(), maybe_fold_and_comparisons(), maybe_fold_or_comparisons(), operand_entry::op, op_symbol_code(), operand_equal_p(), print_generic_expr(), reassociate_stats, tcc_comparison, and useless_type_conversion_p().

Referenced by optimize_ops_list().

static void eliminate_using_constants ( enum tree_code  opcode,
vec< operand_entry_t > *  ops 
)
static
Use constant value that may be present in OPS to try to eliminate
   operands.  Note that this function is only really used when we've
   eliminated ops for other reasons, or merged constants.  Across
   single statements, fold already does all of this, plus more.  There
   is little point in duplicating logic, so I've only included the
   identities that I could ever construct testcases to trigger.   

References dump_file, dump_flags, fold_real_zero_addition_p(), integer_all_onesp(), integer_onep(), integer_zerop(), operand_entry::op, operand_entry::rank, real_onep(), real_zerop(), and reassociate_stats.

Referenced by optimize_ops_list().

static void ensure_ops_are_available ( )
static
Ensure that operands in the OPS vector are available for STMT and all
   gimple statements on which STMT depends.   

References find_insert_point(), get_def_stmt(), gimple_assign_rhs1(), insert_stmt_after(), len, and operand_entry::op.

Referenced by rewrite_expr_tree().

static unsigned int execute_reassoc ( )
static
Gate and execute functions for Reassociation.   

References do_reassoc(), fini_reassoc(), init_reassoc(), and repropagate_negates().

static bool final_range_test_p ( )
static
Return true if STMT is a cast like:
   <bb N>:
   ...
   _123 = (int) _234;

   <bb M>:
   # _345 = PHI <_123(N), 1(...), 1(...)>
   where _234 has bool type, _123 has single use and
   bb N has a single successor M.  This is commonly used in
   the last block of a range test.   

References edge_def::dest, edge_def::flags, flow_bb_inside_loop_p(), gimple_assign_cast_p(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_bb(), loop_containing_stmt(), single_imm_use(), single_succ_edge(), and single_succ_p().

Referenced by maybe_optimize_range_tests(), and suitable_cond_bb().

static gimple find_insert_point ( )
static
Find the statement after which STMT must be moved so that the
   dependency from DEP_STMT to STMT is maintained.   

References appears_later_in_bb(), gimple_uid(), and not_dominated_by().

Referenced by ensure_ops_are_available().

static long find_operand_rank ( )
inlinestatic
Look up the operand rank structure for expression E.   

References pointer_map_contains().

Referenced by get_rank().

static void fini_reassoc ( )
static
Cleanup after the reassociation pass, and print stats if
   requested.   

References bb_rank, CDI_POST_DOMINATORS, cfun, free(), free_alloc_pool(), free_dominance_info(), loop_optimizer_finalize(), pointer_map_destroy(), reassociate_stats, and statistics_counter_event().

Referenced by execute_reassoc().

static bool gate_tree_ssa_reassoc ( )
static
static gimple get_def_stmt ( )
inlinestatic
If OP is a SSA variable and is not the default definition, return the
   gimple statement that defines OP. Else return NULL.   

Referenced by ensure_ops_are_available().

static bool get_ops ( tree  var,
enum tree_code  code,
vec< operand_entry_t > *  ops,
struct loop loop 
)
static
If VAR is set by CODE (BIT_{AND,IOR}_EXPR) which is reassociable,
   return true and fill in *OPS recursively.   

References operand_entry::count, gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_set_visited(), has_single_use(), operand_entry::id, is_reassociable_op(), operand_entry::op, pool_alloc(), and operand_entry::rank.

Referenced by maybe_optimize_range_tests().

static long get_rank ( tree  )
static
static int get_reassociation_width ( int  ops_num,
enum tree_code  opc,
enum machine_mode  mode 
)
static
Returns an optimal number of registers to use for computation of
   given statements.   

References get_required_cycles(), and targetm.

Referenced by reassociate_bb().

static int get_required_cycles ( )
static
Find out how many cycles we need to compute statements chain.
   OPS_NUM holds number os statements in a chain.  CPU_WIDTH is a
   maximum number of independent statements we may execute per cycle.   

References exact_log2(), and floor_log2().

Referenced by get_reassociation_width().

static gimple get_single_immediate_use ( )
static
If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
   it.  Otherwise, return NULL.   

References is_gimple_assign(), and single_imm_use().

Referenced by repropagate_negates(), and should_break_up_subtract().

static tree get_unary_op ( )
static
Given NAME, if NAME is defined by a unary operation OPCODE, return the
   operand of the negate operation.  Otherwise, return NULL.   

References gimple_assign_rhs1(), gimple_assign_rhs_code(), and is_gimple_assign().

Referenced by eliminate_not_pairs(), eliminate_plus_minus_pair(), and repropagate_negates().

static void init_range_entry ( )
static
This is similar to make_range in fold-const.c, but on top of
   GIMPLE instead of trees.  If EXP is non-NULL, it should be
   an SSA_NAME and STMT argument is ignored, otherwise STMT
   argument should be a GIMPLE_COND.   

References build_int_cst(), exp(), range_entry::exp, gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_cond_code(), gimple_cond_lhs(), gimple_cond_rhs(), gimple_location(), range_entry::high, range_entry::in_p, is_gimple_assign(), range_entry::low, make_range_step(), and range_entry::strict_overflow_p.

Referenced by optimize_range_tests().

static void insert_operand_rank ( )
inlinestatic
Insert {E,RANK} into the operand rank hashtable.   

References pointer_map_insert(), and rank().

Referenced by get_rank(), and init_reassoc().

static bool is_phi_for_stmt ( )
static
Return true if OPERAND is defined by a PHI node which uses the LHS
   of STMT in it's operands.  This is also known as a "destructive
   update" operation.   

References gimple_assign_lhs().

Referenced by swap_ops_for_binary_stmt().

static bool is_reassociable_op ( )
static
Return true if STMT is reassociable operation containing a binary
   operation with tree code CODE, and is inside LOOP.   

References flow_bb_inside_loop_p(), gimple_assign_lhs(), gimple_assign_rhs_code(), gimple_bb(), has_single_use(), and is_gimple_assign().

Referenced by get_ops(), linearize_expr(), linearize_expr_tree(), should_break_up_subtract(), and undistribute_ops_list().

static void linearize_expr_tree ( vec< operand_entry_t > *  ops,
gimple  stmt,
bool  is_associative,
bool  set_visited 
)
static
static bool loop_carried_phi ( )
static
If EXP is an SSA_NAME defined by a PHI statement that represents a
   loop-carried dependence of an innermost loop, return TRUE; else
   return FALSE.   

References bb_rank, gimple_bb(), basic_block_def::index, and phi_rank().

Referenced by propagate_rank().

gimple_opt_pass* make_pass_reassoc ( )
static tree negate_value ( )
static
Recursively negate the value of TONEGATE, and return the SSA_NAME
   representing the negated value.  Insertions of any necessary
   instructions go before GSI.
   This function is recursive in that, if you hand it "a_5" as the
   value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
   transform b_3 + b_4 into a_5 = -b_3 + -b_4.   

References force_gimple_operand_gsi(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_assign_set_rhs1(), gimple_assign_set_rhs2(), gsi_for_stmt(), GSI_SAME_STMT, has_single_use(), is_gimple_assign(), and update_stmt().

Referenced by break_up_subtract().

static bool no_side_effect_bb ( )
static
Return true if BB doesn't have side-effects that would disallow
   range test optimization, all SSA_NAMEs set in the bb are consumed
   in the bb and there are no PHIs.   

References gimple_assign_lhs(), gimple_assign_rhs_could_trap_p(), gimple_has_side_effects(), gimple_seq_empty_p(), gsi_end_p(), gsi_next(), gsi_start_bb(), gsi_stmt(), is_gimple_assign(), is_gimple_debug(), last, last_stmt(), and phi_nodes().

Referenced by maybe_optimize_range_tests().

static bool not_dominated_by ( )
inlinestatic
Determine if stmt A is not dominated by stmt B. If A and B are in
   same basic block, then A's UID has to be less than B. If they are
   in different BB's, then A's BB must not be dominated by B's BB.   

References CDI_DOMINATORS, dominated_by_p(), gimple_bb(), and gimple_uid().

Referenced by find_insert_point(), and insert_stmt_after().

static int oecount_cmp ( )
static
Comparison function for qsort sorting oecount elements by count.   

References oecount_s::cnt, and oecount_s::id.

Referenced by undistribute_ops_list().

static void optimize_ops_list ( enum tree_code  opcode,
vec< operand_entry_t > *  ops 
)
static
Perform various identities and other optimizations on the list of
   operand entries, stored in OPS.  The tree code for the binary
   operation between all the operands is OPCODE.   

References add_to_ops_vec(), dump_file, dump_flags, eliminate_duplicate_pair(), eliminate_not_pairs(), eliminate_plus_minus_pair(), eliminate_redundant_comparison(), eliminate_using_constants(), is_gimple_min_invariant(), operand_entry::op, operand_entry::rank, reassociate_stats, and useless_type_conversion_p().

Referenced by reassociate_bb().

static void optimize_range_tests ( enum tree_code  opcode,
vec< operand_entry_t > *  ops 
)
static
Optimize range tests, similarly how fold_range_test optimizes
   it on trees.  The tree code for the binary
   operation between all the operands is OPCODE.
   If OPCODE is ERROR_MARK, optimize_range_tests is called from within
   maybe_optimize_range_tests for inter-bb range optimization.
   In that case if oe->op is NULL, oe->id is bb->index whose
   GIMPLE_COND is && or ||ed into the test, and oe->rank says
   the actual opcode.   

References build_int_cst(), exp(), first, range_entry::high, operand_entry::id, range_entry::idx, range_entry::in_p, init_range_entry(), integer_onep(), integer_zerop(), last_stmt(), range_entry::low, merge_ranges(), operand_entry::op, range_entry_cmp(), operand_entry::rank, range_entry::strict_overflow_p, tree_int_cst_equal(), type(), and update_range_test().

Referenced by maybe_optimize_range_tests(), and reassociate_bb().

static long phi_rank ( )
static
Rank assigned to a phi statement.  If STMT is a loop-carried phi of
   an innermost loop, and the phi has only a single use which is inside
   the loop, then the rank is the block rank of the loop latch plus an
   extra bias for the loop-carried dependence.  This causes expressions
   calculated into an accumulator variable to be independent for each
   iteration of the loop.  If STMT is some other phi, the rank is the
   block rank of its containing block.   

References bb_rank, gimple_bb(), gimple_phi_arg_def(), gimple_phi_num_args(), gimple_phi_result(), loop::header, basic_block_def::index, loop::inner, loop::latch, basic_block_def::loop_father, single_imm_use(), and virtual_operand_p().

Referenced by get_rank(), and loop_carried_phi().

static void propagate_op_to_single_use ( )
static
Find the single immediate use of STMT's LHS, and replace it
   with OP.  Remove STMT.  If STMT's LHS is the same as *DEF,
   replace *DEF with OP as well.   

References gimple_assign_lhs(), gimple_call_lhs(), gsi_for_stmt(), gsi_remove(), has_single_use(), is_gimple_call(), release_defs(), single_imm_use(), unlink_stmt_vdef(), and update_stmt().

Referenced by zero_one_operation().

static long propagate_rank ( )
static
Return the maximum of RANK and the rank that should be propagated
   from expression OP.  For most operands, this is just the rank of OP.
   For loop-carried phis, the value is zero to avoid undoing the bias
   in favor of the phi.   

References get_rank(), loop_carried_phi(), and rank().

Referenced by get_rank().

static int range_entry_cmp ( )
static
Comparison function for qsort.  Sort entries
   without SSA_NAME exp first, then with SSA_NAMEs sorted
   by increasing SSA_NAME_VERSION, and for the same SSA_NAMEs
   by increasing ->low and if ->low is the same, by increasing
   ->high.  ->low == NULL_TREE means minimum, ->high == NULL_TREE
   maximum.   

References range_entry::exp, range_entry::high, range_entry::idx, integer_onep(), and range_entry::low.

Referenced by optimize_range_tests().

static void remove_visited_stmt_chain ( )
static
static void rewrite_expr_tree ( gimple  stmt,
unsigned int  opindex,
vec< operand_entry_t ops,
bool  moved 
)
static
Recursively rewrite our linearized statements so that the operators
   match those in OPS[OPINDEX], putting the computation in rank
   order.   

References dump_file, dump_flags, ensure_ops_are_available(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_set_rhs1(), gimple_assign_set_rhs2(), operand_entry::op, print_gimple_stmt(), remove_visited_stmt_chain(), and update_stmt().

Referenced by reassociate_bb().

static void rewrite_expr_tree_parallel ( gimple  stmt,
int  width,
vec< operand_entry_t ops 
)
static
Recursively rewrite our linearized statements so that the operators
   match those in OPS[OPINDEX], putting the computation in rank
   order and trying to allow operations to be executed in
   parallel.   

References build_and_add_sum(), dump_file, dump_flags, gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_assign_set_rhs1(), gimple_assign_set_rhs2(), print_gimple_stmt(), remove_visited_stmt_chain(), swap_ops_for_binary_stmt(), and update_stmt().

Referenced by reassociate_bb().

static bool should_break_up_subtract ( )
static
Return true if we should break up the subtract in STMT into an add
   with negate.  This is true when we the subtract operands are really
   adds, or the subtract itself is used in an add expression.  In
   either case, breaking up the subtract into an add with negate
   exposes the adds to reassociation.   

References get_single_immediate_use(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), is_gimple_assign(), is_reassociable_op(), and loop_containing_stmt().

Referenced by break_up_subtract_bb().

static int sort_by_operand_rank ( )
static
qsort comparison function to sort operand entries PA and PB by rank
   so that the sorted array is ordered by rank in decreasing order.   

References constant_type(), operand_entry::id, operand_entry::op, and operand_entry::rank.

Referenced by attempt_builtin_powi(), and reassociate_bb().

static bool stmt_is_power_of_op ( )
static
Return TRUE iff STMT represents a builtin call that raises OP
   to some exponent.   

References BUILT_IN_NORMAL, gimple_call_arg(), gimple_call_fndecl(), is_gimple_call(), and operand_equal_p().

Referenced by zero_one_operation().

static bool suitable_cond_bb ( basic_block  bb,
basic_block  test_bb,
basic_block other_bb,
bool  backward 
)
static
Return true if BB is suitable basic block for inter-bb range test
   optimization.  If BACKWARD is true, BB should be the only predecessor
   of TEST_BB, and *OTHER_BB is either NULL and filled by the routine,
   or compared with to find a common basic block to which all conditions
   branch to if true resp. false.  If BACKWARD is false, TEST_BB should
   be the only predecessor of BB.   

References edge_def::dest, edge_def::dest_idx, final_range_test_p(), find_edge(), edge_def::flags, gimple_assign_lhs(), gimple_phi_arg_def(), gimple_visited_p(), gsi_end_p(), gsi_next(), gsi_start_phis(), gsi_stmt(), integer_onep(), integer_zerop(), last_stmt(), operand_equal_p(), single_succ(), stmt_could_throw_p(), and basic_block_def::succs.

Referenced by maybe_optimize_range_tests().

static void swap_ops_for_binary_stmt ( vec< operand_entry_t ops,
unsigned int  opindex,
gimple  stmt 
)
static
This function checks three consequtive operands in
   passed operands vector OPS starting from OPINDEX and
   swaps two operands if it is profitable for binary operation
   consuming OPINDEX + 1 abnd OPINDEX + 2 operands.

   We pair ops with the same rank if possible.

   The alternative we try is to see if STMT is a destructive
   update style statement, which is like:
   b = phi (a, ...)
   a = c + b;
   In that case, we want to use the destructive update form to
   expose the possible vectorizer sum reduction opportunity.
   In that case, the third operand will be the phi node. This
   check is not performed if STMT is null.

   We could, of course, try to be better as noted above, and do a
   lot of work to try to find these opportunities in >3 operand
   cases, but it is unlikely to be worth it.   

References is_phi_for_stmt(), operand_entry::op, and operand_entry::rank.

Referenced by reassociate_bb(), and rewrite_expr_tree_parallel().

static void transform_stmt_to_copy ( )
static
Transform STMT at *GSI into a copy by replacing its rhs with NEW_RHS.   

References dump_file, dump_flags, gimple_assign_rhs1(), gimple_assign_set_rhs_from_tree(), print_gimple_stmt(), remove_visited_stmt_chain(), and update_stmt().

Referenced by reassociate_bb().

static void transform_stmt_to_multiply ( gimple_stmt_iterator gsi,
gimple  stmt,
tree  rhs1,
tree  rhs2 
)
static
Transform STMT at *GSI into a multiply of RHS1 and RHS2.   

References dump_file, dump_flags, gimple_assign_set_rhs_with_ops(), gsi_stmt(), print_gimple_stmt(), remove_visited_stmt_chain(), and update_stmt().

Referenced by reassociate_bb().

static bool undistribute_ops_list ( enum tree_code  opcode,
vec< operand_entry_t > *  ops,
struct loop loop 
)
static
Perform un-distribution of divisions and multiplications.
   A * X + B * X is transformed into (A + B) * X and A / X + B / X
   to (A + B) / X for real X.

   The algorithm is organized as follows.

    - First we walk the addition chain *OPS looking for summands that
      are defined by a multiplication or a real division.  This results
      in the candidates bitmap with relevant indices into *OPS.

    - Second we build the chains of multiplications or divisions for
      these candidates, counting the number of occurrences of (operand, code)
      pairs in all of the candidates chains.

    - Third we sort the (operand, code) pairs by number of occurrence and
      process them starting with the pair with the most uses.

      * For each such pair we walk the candidates again to build a
        second candidate bitmap noting all multiplication/division chains
        that have at least one occurrence of (operand, code).

      * We build an alternate addition chain only covering these
        candidates with one (operand, code) operation removed from their
        multiplication/division chain.

      * The first candidate gets replaced by the alternate addition chain
        multiplied/divided by the operand.

      * All candidate chains get disabled for further processing and
        processing of (operand, code) pairs continues.

  The alternate addition chains built are re-processed by the main
  reassociation algorithm which allows optimizing a * x * y + b * y * x
  to (a + b ) * x * y in one invocation of the reassociation pass.   

References associative_tree_code(), bitmap_clear(), bitmap_first_set_bit(), bitmap_set_bit(), build_and_add_sum(), build_zero_cst(), candidates, changed, oecount_s::cnt, hash_table< Descriptor, Allocator >::create(), hash_table< Descriptor, Allocator >::dispose(), dump_file, dump_flags, hash_table< Descriptor, Allocator >::find_slot(), first, free(), get_rank(), gimple_assign_lhs(), gimple_assign_rhs_code(), gimple_get_lhs(), oecount_s::id, is_gimple_assign(), is_reassociable_op(), linearize_expr_tree(), oecount_s::oecode, oecount_cmp(), operand_entry::op, oecount_s::op, print_generic_expr(), operand_entry::rank, sbitmap_alloc(), sbitmap_free(), and zero_one_operation().

Referenced by reassociate_bb().

static bool update_range_test ( struct range_entry range,
struct range_entry otherrange,
unsigned int  count,
enum tree_code  opcode,
vec< operand_entry_t > *  ops,
tree  exp,
bool  in_p,
tree  low,
tree  high,
bool  strict_overflow_p 
)
static
Helper routine of optimize_range_test.
   [EXP, IN_P, LOW, HIGH, STRICT_OVERFLOW_P] is a merged range for
   RANGE and OTHERRANGE through OTHERRANGE + COUNT - 1 ranges,
   OPCODE and OPS are arguments of optimize_range_tests.  Return
   true if the range merge has been successful.
   If OPCODE is ERROR_MARK, this is called from within
   maybe_optimize_range_tests and is performing inter-bb range optimization.
   Changes should be then performed right away, and whether an op is
   BIT_AND_EXPR or BIT_IOR_EXPR is found in oe->rank.   

References build_int_cst(), build_range_check(), count, dump_file, dump_flags, exp(), range_entry::exp, fold_convert_loc(), force_gimple_operand_gsi(), gimple_assign_cast_p(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_assign_set_rhs_with_ops(), gimple_cond_make_false(), gimple_cond_make_true(), gimple_cond_set_code(), gimple_cond_set_lhs(), gimple_cond_set_rhs(), gimple_location(), gsi_for_stmt(), GSI_SAME_STMT, range_entry::high, operand_entry::id, range_entry::idx, range_entry::in_p, invert_truthvalue_loc(), is_gimple_assign(), is_gimple_debug(), last_stmt(), range_entry::low, operand_entry::op, print_generic_expr(), operand_entry::rank, range_entry::strict_overflow_p, update_stmt(), WARN_STRICT_OVERFLOW_COMPARISON, and warning_at().

Referenced by optimize_range_tests().

static void zero_one_operation ( )
static
Walks the linear chain with result *DEF searching for an operation
   with operand OP and code OPCODE removing that from the chain.  *DEF
   is updated if there is only one operand but no operation left.   

References decrement_power(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), has_single_use(), propagate_op_to_single_use(), and stmt_is_power_of_op().

Referenced by undistribute_ops_list().


Variable Documentation

long* bb_rank
static
Starting rank number for a given basic block, so that we can rank
   operations using unmovable instructions in that BB based on the bb
   depth.   

Referenced by fini_reassoc(), get_rank(), init_reassoc(), loop_carried_phi(), and phi_rank().

vec<oecount> cvec
static
The heap for the oecount hashtable and the sorted list of operands.   
int next_operand_entry_id
static
This is used to assign a unique ID to each struct operand_entry
   so that qsort results are identical on different hosts.   

Referenced by add_repeat_to_ops_vec(), add_to_ops_vec(), and init_reassoc().

alloc_pool operand_entry_pool
static
struct pointer_map_t* operand_rank
static
Operand->rank hashtable.   
vec<tree> plus_negates
static
struct { ... } reassociate_stats
@verbatim Reassociation for trees.

Copyright (C) 2005-2013 Free Software Foundation, Inc. Contributed by Daniel Berlin dan@d.nosp@m.berl.nosp@m.in.or.nosp@m.g

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.

You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/.

This is a simple global reassociation pass.  It is, in part, based
    on the LLVM pass of the same name (They do some things more/less
    than we do, in different orders, etc).

    It consists of five steps:

    1. Breaking up subtract operations into addition + negate, where
    it would promote the reassociation of adds.

    2. Left linearization of the expression trees, so that (A+B)+(C+D)
    becomes (((A+B)+C)+D), which is easier for us to rewrite later.
    During linearization, we place the operands of the binary
    expressions into a vector of operand_entry_t

    3. Optimization of the operand lists, eliminating things like a +
    -a, a & a, etc.

    3a. Combine repeated factors with the same occurrence counts
    into a __builtin_powi call that will later be optimized into
    an optimal number of multiplies.

    4. Rewrite the expression trees we linearized and optimized so
    they are in proper rank order.

    5. Repropagate negates, as nothing else will clean it up ATM.

    A bit of theory on #4, since nobody seems to write anything down
    about why it makes sense to do it the way they do it:

    We could do this much nicer theoretically, but don't (for reasons
    explained after how to do it theoretically nice :P).

    In order to promote the most redundancy elimination, you want
    binary expressions whose operands are the same rank (or
    preferably, the same value) exposed to the redundancy eliminator,
    for possible elimination.

    So the way to do this if we really cared, is to build the new op
    tree from the leaves to the roots, merging as you go, and putting the
    new op on the end of the worklist, until you are left with one
    thing on the worklist.

    IE if you have to rewrite the following set of operands (listed with
    rank in parentheses), with opcode PLUS_EXPR:

    a (1),  b (1),  c (1),  d (2), e (2)


    We start with our merge worklist empty, and the ops list with all of
    those on it.

    You want to first merge all leaves of the same rank, as much as
    possible.

    So first build a binary op of

    mergetmp = a + b, and put "mergetmp" on the merge worklist.

    Because there is no three operand form of PLUS_EXPR, c is not going to
    be exposed to redundancy elimination as a rank 1 operand.

    So you might as well throw it on the merge worklist (you could also
    consider it to now be a rank two operand, and merge it with d and e,
    but in this case, you then have evicted e from a binary op. So at
    least in this situation, you can't win.)

    Then build a binary op of d + e
    mergetmp2 = d + e

    and put mergetmp2 on the merge worklist.

    so merge worklist = {mergetmp, c, mergetmp2}

    Continue building binary ops of these operations until you have only
    one operation left on the worklist.

    So we have

    build binary op
    mergetmp3 = mergetmp + c

    worklist = {mergetmp2, mergetmp3}

    mergetmp4 = mergetmp2 + mergetmp3

    worklist = {mergetmp4}

    because we have one operation left, we can now just set the original
    statement equal to the result of that operation.

    This will at least expose a + b  and d + e to redundancy elimination
    as binary operations.

    For extra points, you can reuse the old statements to build the
    mergetmps, since you shouldn't run out.

    So why don't we do this?

    Because it's expensive, and rarely will help.  Most trees we are
    reassociating have 3 or less ops.  If they have 2 ops, they already
    will be written into a nice single binary op.  If you have 3 ops, a
    single simple check suffices to tell you whether the first two are of the
    same rank.  If so, you know to order it

    mergetmp = op1 + op2
    newstmt = mergetmp + op3

    instead of
    mergetmp = op2 + op3
    newstmt = mergetmp + op1

    If all three are of the same rank, you can't expose them all in a
    single binary operator anyway, so the above is *still* the best you
    can do.

    Thus, this is what we do.  When we have three ops left, we check to see
    what order to put them in, and call it a day.  As a nod to vector sum
    reduction, we check if any of the ops are really a phi node that is a
    destructive update for the associating op, and keep the destructive
    update together for vector sum reduction recognition.   
Statistics  

Referenced by add_repeat_to_ops_vec(), attempt_builtin_powi(), eliminate_duplicate_pair(), eliminate_not_pairs(), eliminate_plus_minus_pair(), eliminate_redundant_comparison(), eliminate_using_constants(), fini_reassoc(), init_reassoc(), linearize_expr(), and optimize_ops_list().

vec<repeat_factor> repeat_factor_vec
static