GCC Middle and Back End API Reference
expr.h File Reference
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Data Structures

struct  args_size
struct  locate_and_pad_arg_data
struct  separate_ops

Typedefs

typedef struct separate_opssepops

Enumerations

enum  expand_modifier {
  EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM, EXPAND_CONST_ADDRESS,
  EXPAND_INITIALIZER, EXPAND_WRITE, EXPAND_MEMORY
}
enum  direction { none, upward, downward }
enum  optab_methods {
  OPTAB_DIRECT, OPTAB_LIB, OPTAB_WIDEN, OPTAB_LIB_WIDEN,
  OPTAB_MUST_WIDEN
}
enum  block_op_methods { BLOCK_OP_NORMAL, BLOCK_OP_NO_LIBCALL, BLOCK_OP_CALL_PARM, BLOCK_OP_TAILCALL }
enum  save_level { SAVE_BLOCK, SAVE_FUNCTION, SAVE_NONLOCAL }

Functions

rtx expand_simple_binop (enum machine_mode, enum rtx_code, rtx, rtx, rtx, int, enum optab_methods)
rtx expand_simple_unop (enum machine_mode, enum rtx_code, rtx, rtx, int)
int have_insn_for (enum rtx_code, enum machine_mode)
void emit_libcall_block (rtx, rtx, rtx, rtx)
rtx gen_add2_insn (rtx, rtx)
rtx gen_add3_insn (rtx, rtx, rtx)
rtx gen_sub2_insn (rtx, rtx)
rtx gen_sub3_insn (rtx, rtx, rtx)
rtx gen_move_insn (rtx, rtx)
int have_add2_insn (rtx, rtx)
int have_sub2_insn (rtx, rtx)
void emit_cmp_and_jump_insns (rtx, rtx, enum rtx_code, rtx, enum machine_mode, int, rtx, int prob=-1)
void emit_indirect_jump (rtx)
rtx gen_cond_trap (enum rtx_code, rtx, rtx, rtx)
rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, enum machine_mode, rtx, rtx, enum machine_mode, int)
int can_conditionally_move_p (enum machine_mode mode)
rtx emit_conditional_add (rtx, enum rtx_code, rtx, rtx, enum machine_mode, rtx, rtx, enum machine_mode, int)
rtx expand_sync_operation (rtx, rtx, enum rtx_code)
rtx expand_sync_fetch_operation (rtx, rtx, enum rtx_code, bool, rtx)
rtx expand_sync_lock_test_and_set (rtx, rtx, rtx)
rtx expand_atomic_exchange (rtx, rtx, rtx, enum memmodel)
rtx expand_atomic_load (rtx, rtx, enum memmodel)
rtx expand_atomic_store (rtx, rtx, enum memmodel, bool)
rtx expand_atomic_fetch_op (rtx, rtx, rtx, enum rtx_code, enum memmodel, bool)
rtx expand_atomic_test_and_set (rtx, rtx, enum memmodel)
rtx expand_atomic_clear (rtx, enum memmodel)
void expand_atomic_thread_fence (enum memmodel)
void expand_atomic_signal_fence (enum memmodel)
rtx negate_rtx (enum machine_mode, rtx)
rtx expand_and (enum machine_mode, rtx, rtx, rtx)
rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, enum machine_mode, int, int)
rtx emit_store_flag_force (rtx, enum rtx_code, rtx, rtx, enum machine_mode, int, int)
unsigned HOST_WIDE_INT choose_multiplier (unsigned HOST_WIDE_INT, int, int, unsigned HOST_WIDE_INT *, int *, int *)
rtx expand_builtin (tree, rtx, rtx, enum machine_mode, int)
tree std_build_builtin_va_list (void)
tree std_fn_abi_va_list (tree)
tree std_canonical_va_list_type (tree)
void std_expand_builtin_va_start (tree, rtx)
rtx default_expand_builtin (tree, rtx, rtx, enum machine_mode, int)
void expand_builtin_setjmp_setup (rtx, rtx)
void expand_builtin_setjmp_receiver (rtx)
rtx expand_builtin_saveregs (void)
void expand_builtin_trap (void)
rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode)
void init_expr_target (void)
void init_expr (void)
void convert_move (rtx, rtx, int)
rtx convert_to_mode (enum machine_mode, rtx, int)
rtx convert_modes (enum machine_mode, enum machine_mode, rtx, int)
void init_block_move_fn (const char *)
void init_block_clear_fn (const char *)
rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods)
rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool)
rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods, unsigned int, HOST_WIDE_INT)
bool emit_storent_insn (rtx to, rtx from)
void move_block_to_reg (int, rtx, int, enum machine_mode)
void move_block_from_reg (int, rtx, int)
rtx gen_group_rtx (rtx)
void emit_group_load (rtx, rtx, tree, int)
rtx emit_group_load_into_temps (rtx, rtx, tree, int)
void emit_group_move (rtx, rtx)
rtx emit_group_move_into_temps (rtx)
void emit_group_store (rtx, rtx, tree, int)
rtx maybe_emit_group_store (rtx, tree)
void copy_blkmode_from_reg (rtx, rtx, tree)
void use_reg_mode (rtx *, rtx, enum machine_mode)
rtx copy_blkmode_to_reg (enum machine_mode, tree)
static void use_reg ()
void use_regs (rtx *, int, int)
void use_group_regs (rtx *, rtx)
rtx clear_storage (rtx, rtx, enum block_op_methods)
rtx clear_storage_hints (rtx, rtx, enum block_op_methods, unsigned int, HOST_WIDE_INT)
rtx set_storage_via_libcall (rtx, rtx, rtx, bool)
bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int, unsigned int, HOST_WIDE_INT)
unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, unsigned int, unsigned int)
int can_store_by_pieces (unsigned HOST_WIDE_INT, rtx(*)(void *, HOST_WIDE_INT, enum machine_mode), void *, unsigned int, bool)
rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT, rtx(*)(void *, HOST_WIDE_INT, enum machine_mode), void *, unsigned int, bool, int)
rtx emit_move_insn (rtx, rtx)
rtx emit_move_insn_1 (rtx, rtx)
rtx emit_move_complex_push (enum machine_mode, rtx, rtx)
rtx emit_move_complex_parts (rtx, rtx)
rtx push_block (rtx, int, int)
void emit_push_insn (rtx, enum machine_mode, tree, rtx, unsigned int, int, rtx, int, rtx, rtx, int, rtx)
void expand_assignment (tree, tree, bool)
rtx store_expr (tree, rtx, int, bool)
rtx force_operand (rtx, rtx)
rtx expand_expr_real (tree, rtx, enum machine_mode, enum expand_modifier, rtx *)
rtx expand_expr_real_1 (tree, rtx, enum machine_mode, enum expand_modifier, rtx *)
rtx expand_expr_real_2 (sepops, rtx, enum machine_mode, enum expand_modifier)
static rtx expand_expr (tree exp, rtx target, enum machine_mode mode, enum expand_modifier modifier)
static rtx expand_normal ()
void init_pending_stack_adjust (void)
void discard_pending_stack_adjust (void)
void clear_pending_stack_adjust (void)
void do_pending_stack_adjust (void)
tree string_constant (tree, tree *)
void jumpifnot (tree, rtx, int)
void jumpifnot_1 (enum tree_code, tree, tree, rtx, int)
void jumpif (tree, rtx, int)
void jumpif_1 (enum tree_code, tree, tree, rtx, int)
void do_jump (tree, rtx, rtx, int)
void do_jump_1 (enum tree_code, tree, tree, rtx, rtx, int)
void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int, enum machine_mode, rtx, rtx, rtx, int)
int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, int)
int try_tablejump (tree, tree, tree, tree, rtx, rtx, int)
rtx expr_size (tree)
HOST_WIDE_INT int_expr_size (tree)
rtx hard_function_value (const_tree, const_tree, const_tree, int)
rtx prepare_call_address (tree, rtx, rtx, rtx *, int, int)
bool shift_return_value (enum machine_mode, bool, rtx)
rtx expand_call (tree, rtx, int)
void fixup_tail_calls (void)
rtx expand_variable_shift (enum tree_code, enum machine_mode, rtx, tree, rtx, int)
rtx expand_shift (enum tree_code, enum machine_mode, rtx, int, rtx, int)
rtx expand_divmod (int, enum tree_code, enum machine_mode, rtx, rtx, rtx, int)
void locate_and_pad_parm (enum machine_mode, tree, int, int, tree, struct args_size *, struct locate_and_pad_arg_data *)
rtx label_rtx (tree)
rtx force_label_rtx (tree)
rtx eliminate_constant_term (rtx, rtx *)
rtx memory_address_addr_space (enum machine_mode, rtx, addr_space_t)
rtx change_address (rtx, enum machine_mode, rtx)
rtx adjust_address_1 (rtx, enum machine_mode, HOST_WIDE_INT, int, int, int, HOST_WIDE_INT)
rtx adjust_automodify_address_1 (rtx, enum machine_mode, rtx, HOST_WIDE_INT, int)
rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT)
rtx widen_memory_access (rtx, enum machine_mode, HOST_WIDE_INT)
rtx validize_mem (rtx)
rtx use_anchored_address (rtx)
void set_mem_attributes (rtx, tree, int)
void set_mem_attributes_minus_bitpos (rtx, tree, int, HOST_WIDE_INT)
int get_mem_align_offset (rtx, unsigned int)
rtx assemble_trampoline_template (void)
rtx copy_to_reg (rtx)
rtx copy_addr_to_reg (rtx)
rtx copy_to_mode_reg (enum machine_mode, rtx)
rtx copy_to_suggested_reg (rtx, rtx, enum machine_mode)
rtx force_reg (enum machine_mode, rtx)
rtx force_not_mem (rtx)
enum machine_mode promote_function_mode (const_tree, enum machine_mode, int *, const_tree, int)
enum machine_mode promote_mode (const_tree, enum machine_mode, int *)
enum machine_mode promote_decl_mode (const_tree, int *)
void adjust_stack (rtx)
void anti_adjust_stack (rtx)
void anti_adjust_stack_and_probe (rtx, bool)
void emit_stack_save (enum save_level, rtx *)
void emit_stack_restore (enum save_level, rtx)
void update_nonlocal_goto_save_area (void)
rtx allocate_dynamic_stack_space (rtx, unsigned, unsigned, bool)
void emit_stack_probe (rtx)
void probe_stack_range (HOST_WIDE_INT, rtx)
rtx hard_libcall_value (enum machine_mode, rtx)
void store_bit_field (rtx, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, enum machine_mode, rtx)
rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, int, bool, rtx, enum machine_mode, enum machine_mode)
rtx extract_low_bits (enum machine_mode, enum machine_mode, rtx)
rtx expand_mult (enum machine_mode, rtx, rtx, rtx, int)
rtx expand_mult_highpart_adjust (enum machine_mode, rtx, rtx, rtx, rtx, int)
rtx assemble_static_space (unsigned HOST_WIDE_INT)
int safe_from_p (const_rtx, tree, int)
bool split_comparison (enum rtx_code, enum machine_mode, enum rtx_code *, enum rtx_code *)
void init_optabs (void)
void init_all_optabs (struct target_optabs *)
rtx init_one_libfunc (const char *)
rtx set_user_assembler_libfunc (const char *, const char *)
tree build_libfunc_function (const char *)
rtx get_personality_function (tree)
void expand_case (gimple)
void expand_sjlj_dispatch_table (rtx, vec< tree >)

Variables

tree block_clear_fn

Typedef Documentation

typedef struct separate_ops * sepops
This structure is used to pass around information about exploded
   unary, binary and trinary expressions between expand_expr_real_1 and
   friends.   

Enumeration Type Documentation

Emit code to move a block Y to a block X.   
Enumerator:
BLOCK_OP_NORMAL 
BLOCK_OP_NO_LIBCALL 
BLOCK_OP_CALL_PARM 
BLOCK_OP_TAILCALL 
enum direction
Enumerator:
none 
upward 
downward 
For inhibit_defer_pop  
For XEXP, GEN_INT, rtx_code  
For optimize_size  
For host_integerp, tree_low_cst, fold_convert, size_binop, ssize_int,
   TREE_CODE, TYPE_SIZE, int_size_in_bytes,     
For GET_MODE_BITSIZE, word_mode  
This is the 4th arg to `expand_expr'.
   EXPAND_STACK_PARM means we are possibly expanding a call param onto
   the stack.
   EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
   EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
   EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
    is a constant that is not a legitimate address.
   EXPAND_WRITE means we are only going to write to the resulting rtx.
   EXPAND_MEMORY means we are interested in a memory result, even if
    the memory is constant and we could have propagated a constant value.   
Enumerator:
EXPAND_NORMAL 
EXPAND_STACK_PARM 
EXPAND_SUM 
EXPAND_CONST_ADDRESS 
EXPAND_INITIALIZER 
EXPAND_WRITE 
EXPAND_MEMORY 
Functions from optabs.c, commonly used, and without need for the optabs
   tables:   
Passed to expand_simple_binop and expand_binop to say which options
   to try to use if the requested operation can't be open-coded on the
   requisite mode.  Either OPTAB_LIB or OPTAB_LIB_WIDEN says try using
   a library call.  Either OPTAB_WIDEN or OPTAB_LIB_WIDEN says try
   using a wider mode.  OPTAB_MUST_WIDEN says try widening and don't
   try anything else.   
Enumerator:
OPTAB_DIRECT 
OPTAB_LIB 
OPTAB_WIDEN 
OPTAB_LIB_WIDEN 
OPTAB_MUST_WIDEN 
enum save_level
This enum is used for the following two functions.   
Enumerator:
SAVE_BLOCK 
SAVE_FUNCTION 
SAVE_NONLOCAL 

Function Documentation

rtx adjust_address_1 ( rtx  memref,
enum machine_mode  mode,
HOST_WIDE_INT  offset,
int  validate,
int  adjust_address,
int  adjust_object,
HOST_WIDE_INT  size 
)
Return a memory reference like MEMREF, but with its mode changed
   to MODE and its address offset by OFFSET bytes.  If VALIDATE is
   nonzero, the memory address is forced to be valid.
   If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
   and the caller is responsible for adjusting MEMREF base register.
   If ADJUST_OBJECT is zero, the underlying object associated with the
   memory reference is left unchanged and the caller is responsible for
   dealing with it.  Otherwise, if the new memory reference is outside
   the underlying object, even partially, then the object is dropped.
   SIZE, if nonzero, is the size of an access in cases where MODE
   has no inherent size.   

References mem_attrs::addrspace, mem_attrs::alias, mem_attrs::align, change_address_1(), copy_rtx(), mem_attrs::expr, get_address_mode(), get_mem_attrs(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, memory_address_addr_space_p(), mem_attrs::offset, offset, mem_attrs::offset_known_p, plus_constant(), set_mem_attrs(), shift, mem_attrs::size, mem_attrs::size_known_p, targetm, and trunc_int_for_mode().

Referenced by adjust_automodify_address_1(), and widen_memory_access().

rtx adjust_automodify_address_1 ( rtx  memref,
enum machine_mode  mode,
rtx  addr,
HOST_WIDE_INT  offset,
int  validate 
)
Return a memory reference like MEMREF, but with its mode changed
   to MODE and its address changed to ADDR, which is assumed to be
   MEMREF offset by OFFSET bytes.  If VALIDATE is
   nonzero, the memory address is forced to be valid.   

References adjust_address_1(), and change_address_1().

void adjust_stack ( rtx  )
Remove some bytes from the stack.  An rtx says how many.   
rtx allocate_dynamic_stack_space ( rtx  size,
unsigned  size_align,
unsigned  required_align,
bool  cannot_accumulate 
)
Allocate some space on the stack dynamically and return its address.   
Return an rtx representing the address of an area of memory dynamically
   pushed on the stack.

   Any required stack pointer alignment is preserved.

   SIZE is an rtx representing the size of the area.

   SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
   parameter may be zero.  If so, a proper value will be extracted 
   from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.

   REQUIRED_ALIGN is the alignment (in bits) required for the region
   of memory.

   If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
   stack space allocated by the generated code cannot be added with itself
   in the course of the execution of the function.  It is always safe to
   pass FALSE here and the following criterion is sufficient in order to
   pass TRUE: every path in the CFG that starts at the allocation point and
   loops to it executes the associated deallocation code.   

References anti_adjust_stack(), anti_adjust_stack_and_probe(), function::calls_alloca, cfun, convert_to_mode(), create_convert_operand_to(), create_fixed_operand(), do_pending_stack_adjust(), emit_barrier(), emit_cmp_and_jump_insns(), emit_insn(), emit_jump(), emit_label(), emit_library_call_value(), emit_move_insn(), error(), expand_binop(), expand_divmod(), expand_insn(), expand_mult(), find_reg_equal_equiv_note(), force_operand(), gen_label_rtx(), gen_reg_rtx(), GENERIC_STACK_CHECK, get_last_insn(), HOST_BITS_PER_INT, HOST_WIDE_INT, init_one_libfunc(), LCT_NORMAL, mark_reg_pointer(), function::nonlocal_goto_save_area, OPTAB_LIB_WIDEN, OPTAB_WIDEN, plus_constant(), probe_stack_range(), round_push(), rtx_equal_p(), stack_limit_rtx, STATIC_BUILTIN_STACK_CHECK, suppress_reg_args_size, and update_nonlocal_goto_save_area().

Referenced by expand_builtin_alloca(), expand_builtin_apply(), expand_call(), expand_stack_vars(), and initialize_argument_information().

void anti_adjust_stack ( rtx  )
Add some bytes to the stack.  An rtx says how many.   
void anti_adjust_stack_and_probe ( rtx  ,
bool   
)
Add some bytes to the stack while probing it.  An rtx says how many.  
rtx assemble_static_space ( unsigned  HOST_WIDE_INT)

Referenced by expand_function_start().

rtx assemble_trampoline_template ( void  )
Assemble the static constant template for function entry trampolines.   

References asm_out_file, floor_log2(), gen_const_mem(), initial_trampoline, set_mem_align(), set_mem_size(), switch_to_section(), and targetm.

tree build_libfunc_function ( const char *  )
Build a decl for a libfunc named NAME.  

Referenced by init_one_libfunc().

rtx builtin_strncpy_read_str ( void *  data,
HOST_WIDE_INT  offset,
enum machine_mode  mode 
)
Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
   bytes from constant string DATA + OFFSET and return it as target
   constant.   

References c_readstr(), HOST_WIDE_INT, and strlen().

Referenced by expand_builtin_strncpy(), simplify_builtin_call(), and store_expr().

int can_conditionally_move_p ( enum machine_mode  mode)
Return nonzero if the conditional move is supported.   

Referenced by expand_cond_expr_using_cmove(), expand_expr_real_2(), and simplify_set().

int can_store_by_pieces ( unsigned HOST_WIDE_INT  len,
rtx(*)(void *, HOST_WIDE_INT, enum machine_mode)  constfun,
void *  constfundata,
unsigned int  align,
bool  memsetp 
)
Return nonzero if it is desirable to store LEN bytes generated by
   CONSTFUN with several move instructions by store_by_pieces
   function.  CONSTFUNDATA is a pointer which will be passed as argument
   in every CONSTFUN call.
   ALIGN is maximum alignment we can assume.
   MEMSETP is true if this is a real memset/bzero, not a copy
   of a const string.   
Determine whether the LEN bytes generated by CONSTFUN can be
   stored to memory using several move instructions.  CONSTFUNDATA is
   a pointer which will be passed as argument in every CONSTFUN call.
   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
   a memset operation and false if it's a copy of a constant string.
   Return nonzero if a call to store_by_pieces should succeed.   

References alignment_for_piecewise_move(), HOST_WIDE_INT, len, offset, optab_handler(), targetm, and widest_int_mode_for_size().

Referenced by expand_builtin_memcpy(), expand_builtin_mempcpy_args(), expand_builtin_memset_args(), expand_builtin_strncpy(), gimple_stringops_transform(), simplify_builtin_call(), and store_expr().

rtx change_address ( rtx  ,
enum  machine_mode,
rtx   
)
Return a memory reference like MEMREF, but with its mode changed
   to MODE and its address changed to ADDR.
   (VOIDmode means don't change the mode.
   NULL for ADDR means don't change the address.)   
unsigned HOST_WIDE_INT choose_multiplier ( unsigned HOST_WIDE_INT  d,
int  n,
int  precision,
unsigned HOST_WIDE_INT multiplier_ptr,
int *  post_shift_ptr,
int *  lgup_ptr 
)
Choose a minimal N + 1 bit approximation to 1/D that can be used to
   replace division by D, and put the least significant N bits of the result
   in *MULTIPLIER_PTR and return the most significant bit.   
Choose a minimal N + 1 bit approximation to 1/D that can be used to
   replace division by D, and put the least significant N bits of the result
   in *MULTIPLIER_PTR and return the most significant bit.

   The width of operations is N (should be <= HOST_BITS_PER_WIDE_INT), the
   needed precision is in PRECISION (should be <= N).

   PRECISION should be as small as possible so this function can choose
   multiplier more freely.

   The rounded-up logarithm of D is placed in *lgup_ptr.  A shift count that
   is to be used for a final right shift is placed in *POST_SHIFT_PTR.

   Using this function, x/D will be equal to (x * m) >> (*POST_SHIFT_PTR),
   where m is the full HOST_BITS_PER_WIDE_INT + 1 bit multiplier.   

References ceil_log2(), double_int::div(), double_int::from_uhwi(), double_int::high, HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, double_int::low, pow(), double_int::set_bit(), and double_int::ult().

Referenced by expand_divmod(), expand_vector_divmod(), and vect_recog_divmod_pattern().

void clear_pending_stack_adjust ( void  )
When exiting from function, if safe, clear out any pending stack adjust
   so the adjustment won't get done.   
When exiting from function, if safe, clear out any pending stack adjust
   so the adjustment won't get done.

   Note, if the current function calls alloca, then it must have a
   frame pointer regardless of the value of flag_omit_frame_pointer.   

References function::calls_alloca, cfun, and discard_pending_stack_adjust().

Referenced by expand_function_end(), expand_naked_return(), and expand_null_return_1().

rtx clear_storage ( rtx  ,
rtx  ,
enum  block_op_methods 
)
Write zeros through the storage of OBJECT.
   If OBJECT has BLKmode, SIZE is its length in bytes.   
rtx clear_storage_hints ( rtx  object,
rtx  size,
enum block_op_methods  method,
unsigned int  expected_align,
HOST_WIDE_INT  expected_size 
)
Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
   its length in bytes.   

References BLOCK_OP_NORMAL, BLOCK_OP_TAILCALL, clear_by_pieces(), emit_move_insn(), HOST_WIDE_INT, set_storage_via_libcall(), set_storage_via_setmem(), and write_complex_part().

Referenced by clear_storage(), and expand_builtin_memset_args().

rtx convert_modes ( enum  machine_mode,
enum  machine_mode,
rtx  ,
int   
)
Convert an rtx to MODE from OLDMODE and return the result.   
void convert_move ( rtx  ,
rtx  ,
int   
)
Emit some rtl insns to move data between rtx's, converting machine modes.
   Both modes must be floating or both fixed.   
rtx convert_to_mode ( enum  machine_mode,
rtx  ,
int   
)
Convert an rtx to specified machine mode and return the result.   
rtx copy_addr_to_reg ( rtx  )
Like copy_to_reg but always make the reg Pmode.   
void copy_blkmode_from_reg ( rtx  ,
rtx  ,
tree   
)
Copy BLKmode object from a set of registers.   
rtx copy_blkmode_to_reg ( enum  machine_mode,
tree   
)
rtx copy_to_mode_reg ( enum  machine_mode,
rtx   
)
Like copy_to_reg but always make the reg the specified mode MODE.   
rtx copy_to_reg ( rtx  )
Copy given rtx to a new temp reg and return that.   
rtx copy_to_suggested_reg ( rtx  ,
rtx  ,
enum  machine_mode 
)
Copy given rtx to given temp reg and return that.   
rtx default_expand_builtin ( tree  exp,
rtx  target,
rtx  subtarget,
enum machine_mode  mode,
int  ignore 
)
Default target-specific builtin expander that does nothing.   
void discard_pending_stack_adjust ( void  )
Discard any pending stack adjustment.   
Discard any pending stack adjustment.  This avoid relying on the
   RTL optimizers to remove useless adjustments when we know the
   stack pointer value is dead.   

Referenced by clear_pending_stack_adjust(), and emit_stack_restore().

void do_jump ( tree  ,
rtx  ,
rtx  ,
int   
)
Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
   the result is zero, or IF_TRUE_LABEL if the result is one.   
void do_jump_1 ( enum tree_code  code,
tree  op0,
tree  op1,
rtx  if_false_label,
rtx  if_true_label,
int  prob 
)
Subroutine of do_jump, dealing with exploded comparisons of the type
   OP0 CODE OP1 .  IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
   PROB is probability of jump to if_true_label, or -1 if unknown.   

References can_compare_p(), ccp_jump, do_compare_and_jump(), do_jump(), do_jump_by_parts_equality(), do_jump_by_parts_greater(), do_pending_stack_adjust(), emit_label(), gen_label_rtx(), integer_zerop(), and inv().

Referenced by do_jump(), jumpif_1(), and jumpifnot_1().

rtx eliminate_constant_term ( rtx  ,
rtx  
)
Return an rtx like arg but sans any constant terms.
   Returns the original rtx if it has no constant terms.
   The constant terms are added and stored via a second arg.   
rtx emit_block_move ( rtx  ,
rtx  ,
rtx  ,
enum  block_op_methods 
)
rtx emit_block_move_hints ( rtx  x,
rtx  y,
rtx  size,
enum block_op_methods  method,
unsigned int  expected_align,
HOST_WIDE_INT  expected_size 
)
Emit code to move a block Y to a block X.  This may be done with
   string-move instructions, with multiple scalar move instructions,
   or with a library call.

   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
   SIZE is an rtx that says how long they are.
   ALIGN is the maximum alignment we can assume they have.
   METHOD describes what kind of copy this is, and what mechanisms may be used.

   Return the address of the new block, if memcpy is called and returns it,
   0 otherwise.   

References block_move_libcall_safe_for_call_parm(), BLOCK_OP_CALL_PARM, BLOCK_OP_NO_LIBCALL, BLOCK_OP_NORMAL, BLOCK_OP_TAILCALL, emit_block_move_via_libcall(), emit_block_move_via_loop(), emit_block_move_via_movmem(), mark_addressable(), move_by_pieces(), and set_mem_size().

Referenced by emit_block_move(), and expand_builtin_memcpy().

rtx emit_block_move_via_libcall ( rtx  ,
rtx  ,
rtx  ,
bool   
)
void emit_cmp_and_jump_insns ( rtx  x,
rtx  y,
enum rtx_code  comparison,
rtx  size,
enum machine_mode  mode,
int  unsignedp,
rtx  label,
int  prob 
)
Emit a pair of rtl insns to compare two rtx's and to jump
   to a label if the comparison is true.   
Generate code to compare X with Y so that the condition codes are
   set and to jump to LABEL if the condition is true.  If X is a
   constant and Y is not a constant, then the comparison is swapped to
   ensure that the comparison RTL has the canonical form.

   UNSIGNEDP nonzero says that X and Y are unsigned; this matters if they
   need to be widened.  UNSIGNEDP is also used to select the proper
   branch condition code.

   If X and Y have mode BLKmode, then SIZE specifies the size of both X and Y.

   MODE is the mode of the inputs (in case they are const_int).

   COMPARISON is the rtl operator to compare with (EQ, NE, GT, etc.).
   It will be potentially converted into an unsigned variant based on
   UNSIGNEDP to select a proper jump instruction.
   
   PROB is the probability of jumping to LABEL.   

References can_compare_p(), ccp_jump, emit_cmp_and_jump_insn_1(), force_reg(), OPTAB_LIB_WIDEN, prepare_cmp_insn(), swap_commutative_operands_p(), swap_condition(), and unsigned_condition().

Referenced by allocate_dynamic_stack_space(), anti_adjust_stack_and_probe(), asan_clear_shadow(), do_compare_rtx_and_jump(), do_tablejump(), emit_block_move_via_loop(), emit_case_nodes(), expand_compare_and_swap_loop(), expand_copysign_absneg(), expand_doubleword_clz(), expand_ffs(), expand_fix(), expand_float(), probe_stack_range(), sjlj_emit_function_enter(), stack_protect_epilogue(), store_expr(), and try_casesi().

rtx emit_conditional_add ( rtx  target,
enum rtx_code  code,
rtx  op0,
rtx  op1,
enum machine_mode  cmode,
rtx  op2,
rtx  op3,
enum machine_mode  mode,
int  unsignedp 
)
Emit a conditional addition instruction if the machine supports one for that
   condition and machine mode.

   OP0 and OP1 are the operands that should be compared using CODE.  CMODE is
   the mode to use should they be constants.  If it is VOIDmode, they cannot
   both be constants.

   OP2 should be stored in TARGET if the comparison is false, otherwise OP2+OP3
   should be stored there.  MODE is the mode to use should they be constants.
   If it is VOIDmode, they cannot both be constants.

   The result is either TARGET (perhaps modified) or NULL_RTX if the operation
   is not supported.   

References convert_move(), create_fixed_operand(), create_input_operand(), create_output_operand(), delete_insns_since(), do_pending_stack_adjust(), gen_reg_rtx(), get_last_insn(), last, maybe_expand_insn(), optab_handler(), OPTAB_WIDEN, prepare_cmp_insn(), simplify_gen_relational(), swap_commutative_operands_p(), swap_condition(), unsigned_condition(), and expand_operand::value.

Referenced by noce_try_addcc().

rtx emit_conditional_move ( rtx  target,
enum rtx_code  code,
rtx  op0,
rtx  op1,
enum machine_mode  cmode,
rtx  op2,
rtx  op3,
enum machine_mode  mode,
int  unsignedp 
)
Emit a conditional move operation.   
Emit a conditional move instruction if the machine supports one for that
   condition and machine mode.

   OP0 and OP1 are the operands that should be compared using CODE.  CMODE is
   the mode to use should they be constants.  If it is VOIDmode, they cannot
   both be constants.

   OP2 should be stored in TARGET if the comparison is true, otherwise OP3
   should be stored there.  MODE is the mode to use should they be constants.
   If it is VOIDmode, they cannot both be constants.

   The result is either TARGET (perhaps modified) or NULL_RTX if the operation
   is not supported.   

References convert_move(), create_fixed_operand(), create_input_operand(), create_output_operand(), delete_insns_since(), direct_optab_handler(), do_pending_stack_adjust(), gen_reg_rtx(), get_last_insn(), last, maybe_expand_insn(), OPTAB_WIDEN, prepare_cmp_insn(), reversed_comparison_code_parts(), simplify_gen_relational(), swap_commutative_operands_p(), swap_condition(), unsigned_condition(), and expand_operand::value.

Referenced by emit_store_flag(), expand_cond_expr_using_cmove(), expand_doubleword_shift_condmove(), expand_expr_real_2(), expand_sdiv_pow2(), and noce_emit_cmove().

void emit_group_load ( rtx  ,
rtx  ,
tree  ,
int   
)
Load a BLKmode value into non-consecutive registers represented by a
   PARALLEL.   
rtx emit_group_load_into_temps ( rtx  ,
rtx  ,
tree  ,
int   
)
Similarly, but load into new temporaries.   
void emit_group_move ( rtx  ,
rtx   
)
Move a non-consecutive group of registers represented by a PARALLEL into
   a non-consecutive group of registers represented by a PARALLEL.   
rtx emit_group_move_into_temps ( rtx  )
Move a group of registers represented by a PARALLEL into pseudos.   
void emit_group_store ( rtx  ,
rtx  ,
tree  ,
int   
)
Store a BLKmode value from non-consecutive registers represented by a
   PARALLEL.   
void emit_indirect_jump ( rtx  )
Generate code to indirectly jump to a location given in the rtx LOC.   

Referenced by expand_builtin_longjmp(), expand_builtin_nonlocal_goto(), expand_computed_goto(), and fix_crossing_unconditional_branches().

void emit_libcall_block ( rtx  ,
rtx  ,
rtx  ,
rtx   
)
Emit code to make a call to a constant function or a library call.   

Referenced by convert_move(), expand_fix(), expand_fixed_convert(), expand_float(), expand_twoval_binop_libfunc(), and prepare_float_lib_cmp().

rtx emit_move_complex_parts ( rtx  ,
rtx   
)
rtx emit_move_complex_push ( enum  machine_mode,
rtx  ,
rtx   
)
rtx emit_move_insn ( rtx  ,
rtx   
)
Emit insns to set X from Y.   
rtx emit_move_insn_1 ( rtx  ,
rtx   
)
Emit insns to set X from Y, with no frills.   
void emit_push_insn ( rtx  x,
enum machine_mode  mode,
tree  type,
rtx  size,
unsigned int  align,
int  partial,
rtx  reg,
int  extra,
rtx  args_addr,
rtx  args_so_far,
int  reg_parm_stack_space,
rtx  alignment_pad 
)
Generate code to push something onto the stack, given its mode and type.   
Generate code to push X onto the stack, assuming it has mode MODE and
   type TYPE.
   MODE is redundant except when X is a CONST_INT (since they don't
   carry mode info).
   SIZE is an rtx for the size of data to be copied (in bytes),
   needed only if X is BLKmode.

   ALIGN (in bits) is maximum alignment we can assume.

   If PARTIAL and REG are both nonzero, then copy that many of the first
   bytes of X into registers starting with REG, and push the rest of X.
   The amount of space pushed is decreased by PARTIAL bytes.
   REG must be a hard register in this case.
   If REG is zero but PARTIAL is not, take any all others actions for an
   argument partially in registers, but do not actually load any
   registers.

   EXTRA is the amount in bytes of extra space to leave next to this arg.
   This is ignored if an argument block has already been allocated.

   On a machine that lacks real push insns, ARGS_ADDR is the address of
   the bottom of the argument block for this call.  We use indexing off there
   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
   argument block has not been preallocated.

   ARGS_SO_FAR is the size of args previously pushed for this call.

   REG_PARM_STACK_SPACE is nonzero if functions require stack space
   for arguments passed in registers.  If nonzero, it will be the number
   of bytes required.   

References anti_adjust_stack(), assign_temp(), BLOCK_OP_CALL_PARM, copy_to_reg(), downward, emit_block_move(), emit_group_load(), emit_move_insn(), emit_push_insn(), emit_single_push_insn(), expand_binop(), force_const_mem(), gen_rtx_MEM(), HOST_WIDE_INT, move_block_to_reg(), move_by_pieces(), none, offset, operand_subword_force(), OPTAB_LIB_WIDEN, plus_constant(), push_block(), reg_mentioned_p(), set_mem_align(), targetm, upward, validize_mem(), and word_mode.

Referenced by emit_library_call_value_1(), emit_push_insn(), and store_one_arg().

void emit_stack_probe ( rtx  )
Emit one stack probe at ADDRESS, an address within the stack.   
void emit_stack_restore ( enum  save_level,
rtx   
)
Restore the stack pointer from a save area of the specified level.   
void emit_stack_save ( enum  save_level,
rtx  
)
Save the stack pointer at the specified level.   
rtx emit_store_flag ( rtx  target,
enum rtx_code  code,
rtx  op0,
rtx  op1,
enum machine_mode  mode,
int  unsignedp,
int  normalizep 
)
Emit a store-flag operation.   
Emit a store-flags instruction for comparison CODE on OP0 and OP1
   and storing in TARGET.  Normally return TARGET.
   Return 0 if that cannot be done.

   MODE is the mode to use for OP0 and OP1 should they be CONST_INTs.  If
   it is VOIDmode, they cannot both be CONST_INT.

   UNSIGNEDP is for the case where we have to widen the operands
   to perform the operation.  It says to use zero-extension.

   NORMALIZEP is 1 if we should convert the result to be either zero
   or one.  Normalize is -1 if we should convert the result to be
   either zero or -1.  If NORMALIZEP is zero, the result will be left
   "raw" out of the scc insn.   

References can_compare_p(), ccp_store_flag, convert_modes(), convert_move(), delete_insns_since(), emit_conditional_move(), emit_move_insn(), emit_store_flag(), emit_store_flag_1(), expand_binop(), expand_shift(), expand_unop(), get_last_insn(), last, optab_handler(), OPTAB_WIDEN, optimize_insn_for_speed_p(), reverse_condition(), reverse_condition_maybe_unordered(), rtx_cost(), rtx_equal_p(), split_comparison(), val_signbit_p(), and word_mode.

Referenced by convert_move(), emit_store_flag(), emit_store_flag_1(), emit_store_flag_force(), expand_divmod(), expand_sdiv_pow2(), expand_smod_pow2(), noce_emit_store_flag(), and noce_try_sign_mask().

rtx emit_store_flag_force ( rtx  target,
enum rtx_code  code,
rtx  op0,
rtx  op1,
enum machine_mode  mode,
int  unsignedp,
int  normalizep 
)
bool emit_storent_insn ( rtx  to,
rtx  from 
)
rtx expand_and ( enum  machine_mode,
rtx  ,
rtx  ,
rtx   
)
Expand a logical AND operation.   
void expand_assignment ( tree  ,
tree  ,
bool   
)
Expand an assignment that stores the value of FROM into TO.   
rtx expand_atomic_clear ( rtx  ,
enum  memmodel 
)
rtx expand_atomic_exchange ( rtx  ,
rtx  ,
rtx  ,
enum  memmodel 
)
rtx expand_atomic_fetch_op ( rtx  target,
rtx  mem,
rtx  val,
enum rtx_code  code,
enum memmodel  model,
bool  after 
)
This function expands an atomic fetch_OP or OP_fetch operation:
   TARGET is an option place to stick the return value.  const0_rtx indicates
   the result is unused. 
   atomically fetch MEM, perform the operation with VAL and return it to MEM.
   CODE is the operation being performed (OP)
   MEMMODEL is the memory model variant to use.
   AFTER is true to return the result of the operation (OP_fetch).
   AFTER is false to return the value before the operation (fetch_OP).   

References can_compare_and_swap_p(), emit_insn(), emit_library_call_value(), emit_move_insn(), end_sequence(), expand_atomic_fetch_op_no_fallback(), expand_compare_and_swap_loop(), expand_simple_binop(), expand_simple_unop(), atomic_op_functions::fetch_after, atomic_op_functions::fetch_before, gen_reg_rtx(), get_atomic_op_for_code(), get_insns(), LCT_NORMAL, OPTAB_LIB_WIDEN, optab_libfunc(), ptr_mode, register_operand(), atomic_op_functions::reverse_code, and start_sequence().

Referenced by expand_builtin_atomic_fetch_op(), and expand_builtin_sync_operation().

rtx expand_atomic_load ( rtx  ,
rtx  ,
enum  memmodel 
)
void expand_atomic_signal_fence ( enum  memmodel)
rtx expand_atomic_store ( rtx  ,
rtx  ,
enum  memmodel,
bool   
)
rtx expand_atomic_test_and_set ( rtx  ,
rtx  ,
enum  memmodel 
)
void expand_atomic_thread_fence ( enum  memmodel)
rtx expand_builtin ( tree  exp,
rtx  target,
rtx  subtarget,
enum machine_mode  mode,
int  ignore 
)
Functions from builtins.c:   
Expand an expression EXP that calls a built-in function,
   with result going to TARGET if that's convenient
   (and in mode MODE if that's convenient).
   SUBTARGET may be used as the target for computing one of EXP's operands.
   IGNORE is nonzero if the value is to be ignored.   

References build_call_vec(), BUILT_IN_MD, called_as_built_in(), current_function_decl, error(), expand_builtin___clear_cache(), expand_builtin_adjust_trampoline(), expand_builtin_alloca(), expand_builtin_apply(), expand_builtin_apply_args(), expand_builtin_assume_aligned(), expand_builtin_atomic_always_lock_free(), expand_builtin_atomic_clear(), expand_builtin_atomic_compare_exchange(), expand_builtin_atomic_exchange(), expand_builtin_atomic_fetch_op(), expand_builtin_atomic_is_lock_free(), expand_builtin_atomic_load(), expand_builtin_atomic_signal_fence(), expand_builtin_atomic_store(), expand_builtin_atomic_test_and_set(), expand_builtin_atomic_thread_fence(), expand_builtin_bswap(), expand_builtin_bzero(), expand_builtin_cexpi(), expand_builtin_classify_type(), expand_builtin_compare_and_swap(), expand_builtin_copysign(), expand_builtin_dwarf_sp_column(), expand_builtin_eh_copy_values(), expand_builtin_eh_filter(), expand_builtin_eh_pointer(), expand_builtin_eh_return(), expand_builtin_eh_return_data_regno(), expand_builtin_expect(), expand_builtin_extend_pointer(), expand_builtin_extract_return_addr(), expand_builtin_fabs(), expand_builtin_fork_or_exec(), expand_builtin_frame_address(), expand_builtin_frob_return_addr(), expand_builtin_init_dwarf_reg_sizes(), expand_builtin_init_trampoline(), expand_builtin_int_roundingfn(), expand_builtin_int_roundingfn_2(), expand_builtin_interclass_mathfn(), expand_builtin_longjmp(), expand_builtin_mathfn(), expand_builtin_mathfn_2(), expand_builtin_mathfn_3(), expand_builtin_mathfn_ternary(), expand_builtin_memcmp(), expand_builtin_memcpy(), expand_builtin_memory_chk(), expand_builtin_mempcpy(), expand_builtin_memset(), expand_builtin_next_arg(), expand_builtin_nonlocal_goto(), expand_builtin_object_size(), expand_builtin_powi(), expand_builtin_prefetch(), expand_builtin_return(), expand_builtin_saveregs(), expand_builtin_set_thread_pointer(), expand_builtin_setjmp_receiver(), expand_builtin_setjmp_setup(), expand_builtin_signbit(), expand_builtin_sincos(), expand_builtin_stpcpy(), expand_builtin_strcmp(), expand_builtin_strcpy(), expand_builtin_strlen(), expand_builtin_strncmp(), expand_builtin_strncpy(), expand_builtin_sync_lock_release(), expand_builtin_sync_lock_test_and_set(), expand_builtin_sync_operation(), expand_builtin_sync_synchronize(), expand_builtin_thread_pointer(), expand_builtin_trap(), expand_builtin_unop(), expand_builtin_unreachable(), expand_builtin_unwind_init(), expand_builtin_update_setjmp_buf(), expand_builtin_va_copy(), expand_builtin_va_end(), expand_builtin_va_start(), expand_call(), expand_expr(), EXPAND_NORMAL, expand_normal(), expand_stack_restore(), expand_stack_save(), flags_from_decl_or_type(), fold_builtin_next_arg(), gen_reg_rtx(), get_builtin_sync_mode(), get_callee_fndecl(), label_rtx(), maybe_emit_chk_warning(), maybe_emit_free_warning(), maybe_emit_sprintf_chk_warning(), register_operand(), remove_node_from_expr_list(), targetm, validate_arglist(), expand_operand::value, and vec_alloc().

Referenced by expand_expr_real_1(), and expand_expr_real_2().

rtx expand_builtin_saveregs ( void  )
Expand a call to __builtin_saveregs, generating the result in TARGET,
   if that's convenient.   

References emit_insn_after(), end_sequence(), entry_of_function(), get_insns(), pop_topmost_sequence(), push_topmost_sequence(), start_sequence(), and targetm.

Referenced by expand_builtin().

void expand_builtin_setjmp_receiver ( rtx  )
void expand_builtin_setjmp_setup ( rtx  ,
rtx   
)
rtx expand_call ( tree  ,
rtx  ,
int   
)
void expand_case ( gimple  )
In stmt.c  
Expand a GIMPLE_SWITCH statement.   

Referenced by expand_gimple_stmt_1().

rtx expand_divmod ( int  rem_flag,
enum tree_code  code,
enum machine_mode  mode,
rtx  op0,
rtx  op1,
rtx  target,
int  unsignedp 
)
Emit the code to divide OP0 by OP1, putting the result in TARGET
   if that is convenient, and returning where the result is.
   You may request either the quotient or the remainder as the result;
   specify REM_FLAG nonzero to get the remainder.

   CODE is the expression code for which kind of division this is;
   it controls how rounding is done.  MODE is the machine mode to use.
   UNSIGNEDP nonzero means do unsigned division.   
??? For CEIL_MOD_EXPR, can compute incorrect remainder with ANDI
   and then correct it by or'ing in missing high bits
   if result of ANDI is nonzero.
   For ROUND_MOD_EXPR, can use ANDI and then sign-extend the result.
   This could optimize to a bfexts instruction.
   But C doesn't use these operations, so their optimizations are
   left for later.   
??? For modulo, we don't actually need the highpart of the first product,
   the low part will do nicely.  And for small divisors, the second multiply
   can also be a low-part only multiply or even be completely left out.
   E.g. to calculate the remainder of a division by 3 with a 32 bit
   multiply, multiply with 0x55555556 and extract the upper two bits;
   the result is exact for inputs up to 0x1fffffff.
   The input range can be reduced by using cross-sum rules.
   For odd divisors >= 3, the following table gives right shift counts
   so that if a number is shifted by an integer multiple of the given
   amount, the remainder stays the same:
   2, 4, 3, 6, 10, 12, 4, 8, 18, 6, 11, 20, 18, 0, 5, 10, 12, 0, 12, 20,
   14, 12, 23, 21, 8, 0, 20, 18, 0, 0, 6, 12, 0, 22, 0, 18, 20, 30, 0, 0,
   0, 8, 0, 11, 12, 10, 36, 0, 30, 0, 0, 12, 0, 0, 0, 0, 44, 12, 24, 0,
   20, 0, 7, 14, 0, 18, 36, 0, 0, 46, 60, 0, 42, 0, 15, 24, 20, 0, 0, 33,
   0, 20, 0, 0, 18, 0, 60, 0, 0, 0, 0, 0, 40, 18, 0, 0, 12

   Cross-sum rules for even numbers can be derived by leaving as many bits
   to the right alone as the divisor has zeros to the right.
   E.g. if x is an unsigned 32 bit number:
   (x mod 12) == (((x & 1023) + ((x >> 8) & ~3)) * 0x15555558 >> 2 * 3) >> 28

References add_cost(), choose_multiplier(), convert_modes(), copy_to_mode_reg(), delete_insns_since(), do_cmp_and_jump(), emit_barrier(), emit_jump_insn(), emit_label(), emit_move_insn(), emit_store_flag(), emit_store_flag_force(), expand_abs(), expand_binop(), expand_dec(), expand_divmod(), expand_inc(), expand_mult(), expand_sdiv_pow2(), expand_shift(), expand_smod_pow2(), expand_twoval_binop(), expand_twoval_binop_libfunc(), expand_unop(), expmed_mult_highpart(), floor_log2(), force_operand(), force_reg(), gen_int_mode(), gen_label_rtx(), gen_reg_rtx(), get_last_insn(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, invert_mod2n(), last, mul_cost(), OPTAB_DIRECT, optab_handler(), OPTAB_LIB_WIDEN, optab_libfunc(), OPTAB_WIDEN, optimize_insn_for_speed_p(), plus_constant(), reg_mentioned_p(), sdiv_cost(), sdiv_pow2_cheap(), set_dst_reg_note(), shift_cost(), sign_expand_binop(), smod_pow2_cheap(), and udiv_cost().

Referenced by allocate_dynamic_stack_space(), expand_divmod(), expand_expr_real_2(), force_operand(), and round_push().

static rtx expand_expr ( tree  exp,
rtx  target,
enum machine_mode  mode,
enum expand_modifier  modifier 
)
inlinestatic
Generate code for computing expression EXP.
   An rtx for the computed value is returned.  The value is never null.
   In the case of a void EXP, const0_rtx is returned.   

References expand_expr_real().

Referenced by addr_for_mem_ref(), assign_parms(), computation_cost(), dbxout_expand_expr(), do_store_flag(), emit_case_nodes(), expand_asm_operands(), expand_assignment(), expand_builtin(), expand_builtin___clear_cache(), expand_builtin_assume_aligned(), expand_builtin_bswap(), expand_builtin_cexpi(), expand_builtin_copysign(), expand_builtin_eh_return(), expand_builtin_expect(), expand_builtin_extend_pointer(), expand_builtin_extract_return_addr(), expand_builtin_fabs(), expand_builtin_frob_return_addr(), expand_builtin_int_roundingfn(), expand_builtin_int_roundingfn_2(), expand_builtin_interclass_mathfn(), expand_builtin_mathfn(), expand_builtin_mathfn_2(), expand_builtin_mathfn_3(), expand_builtin_mathfn_ternary(), expand_builtin_memory_chk(), expand_builtin_mempcpy_args(), expand_builtin_memset_args(), expand_builtin_powi(), expand_builtin_prefetch(), expand_builtin_set_thread_pointer(), expand_builtin_signbit(), expand_builtin_stpcpy(), expand_builtin_strlen(), expand_builtin_unop(), expand_builtin_va_copy(), expand_builtin_va_end(), expand_call(), expand_debug_expr(), expand_expr_addr_expr_1(), expand_expr_force_mode(), expand_expr_real_1(), expand_expr_real_2(), expand_function_start(), expand_gimple_stmt_1(), expand_LOAD_LANES(), expand_operands(), expand_return(), expand_STORE_LANES(), expr_size(), get_builtin_sync_mem(), get_memory_rtx(), insert_value_copy_on_edge(), optimize_bitfield_assignment_op(), output_constant(), output_ttype(), read_complex_part(), reduce_to_bit_field_precision(), rtl_for_decl_init(), std_expand_builtin_va_start(), store_expr(), store_field(), store_one_arg(), update_nonlocal_goto_save_area(), and vector_compare_rtx().

rtx expand_expr_real ( tree  exp,
rtx  target,
enum machine_mode  tmode,
enum expand_modifier  modifier,
rtx alt_rtl 
)
Work horses for expand_expr.   
expand_expr: generate code for computing expression EXP.
   An rtx for the computed value is returned.  The value is never null.
   In the case of a void EXP, const0_rtx is returned.

   The value may be stored in TARGET if TARGET is nonzero.
   TARGET is just a suggestion; callers must assume that
   the rtx returned may not be the same as TARGET.

   If TARGET is CONST0_RTX, it means that the value will be ignored.

   If TMODE is not VOIDmode, it suggests generating the
   result in mode TMODE.  But this is done only when convenient.
   Otherwise, TMODE is ignored and the value generated in its natural mode.
   TMODE is just a suggestion; callers must assume that
   the rtx returned may not have mode TMODE.

   Note that TARGET may have neither TMODE nor MODE.  In that case, it
   probably will not be used.

   If MODIFIER is EXPAND_SUM then when EXP is an addition
   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
   or a nest of (PLUS ...) and (MINUS ...) where the terms are
   products as above, or REG or MEM, or constant.
   Ordinarily in such cases we would output mul or add instructions
   and then return a pseudo reg containing the sum.

   EXPAND_INITIALIZER is much like EXPAND_SUM except that
   it also marks a label as absolutely required (it can't be dead).
   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
   This is used for outputting expressions used in initializers.

   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
   with a constant address even if that address is not normally legitimate.
   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.

   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
   a call parameter.  Such targets require special care as we haven't yet
   marked TARGET so that it's safe from being trashed by libcalls.  We
   don't want to use TARGET for anything but the final result;
   Intermediate values must go elsewhere.   Additionally, calls to
   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.

   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
   recursively.   

References expand_expr_real_1().

Referenced by expand_expr(), expand_expr_real_1(), expand_normal(), and store_expr().

rtx expand_expr_real_1 ( tree  ,
rtx  ,
enum  machine_mode,
enum  expand_modifier,
rtx  
)

References add_to_hard_reg_set(), addr_for_mem_ref(), array_ref_low_bound(), assign_stack_temp(), assign_stack_temp_for_type(), assign_temp(), BLOCK_OP_CALL_PARM, BLOCK_OP_NORMAL, build_constructor(), build_int_cst(), build_qualified_type(), BUILT_IN_FRONTEND, separate_ops::code, compare_tree_int(), CONST_FIXED_FROM_FIXED_VALUE, const_vector_from_tree(), convert_modes(), convert_move(), convert_to_mode(), copy_rtx(), copy_to_reg(), count, create_fixed_operand(), create_output_operand(), ctor_for_folding(), curr_insn_location(), current_function_decl, currently_expanding_to_rtl, decl_function_context(), lang_hooks::decl_printable_name, do_jump(), do_pending_stack_adjust(), emit_block_move(), emit_insn(), emit_label(), emit_move_insn(), error(), exp(), expand_and(), expand_assignment(), expand_builtin(), expand_call(), EXPAND_CONST_ADDRESS, expand_constructor(), expand_expr(), expand_expr_addr_expr(), expand_expr_constant(), expand_expr_real(), expand_expr_real_1(), expand_expr_real_2(), EXPAND_INITIALIZER, expand_insn(), EXPAND_MEMORY, EXPAND_NORMAL, expand_normal(), expand_shift(), EXPAND_STACK_PARM, EXPAND_SUM, EXPAND_WRITE, extract_bit_field(), fold(), fold_convert_loc(), fold_read_from_constant_string(), fold_unary_loc(), force_const_mem(), force_reg(), g, gen_int_mode(), gen_label_rtx(), gen_lowpart_common(), gen_lowpart_SUBREG(), gen_raw_REG(), gen_reg_rtx(), gen_rtx_MEM(), get_address_description(), get_address_mode(), get_callee_fndecl(), get_def_for_expr(), get_gimple_for_ssa_name(), get_inner_reference(), get_object_alignment(), get_rtx_for_ssa_name(), get_subtarget(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_to_tree(), gimple_call_fntype(), gimple_call_internal_p(), gimple_location(), handled_component_p(), highest_pow2_factor(), HOST_BITS_PER_WIDE_INT, host_integerp(), HOST_WIDE_INT, identifier_to_locale(), immed_double_const(), immed_double_int_const(), int_size_in_bytes(), integer_onep(), integer_zerop(), is_aligning_offset(), label_rtx(), layout_decl(), separate_ops::location, lookup_attribute(), double_int::low, mark_reg_pointer(), mem_ref_offset(), mem_ref_refers_to_non_mem_p(), memory_address_addr_space(), memory_address_addr_space_p(), mode_for_size(), offset, offset_address(), separate_ops::op0, separate_ops::op1, separate_ops::op2, optab_handler(), promote_decl_mode(), promote_function_mode(), read_complex_part(), replace_equiv_address(), set_curr_insn_location(), set_mem_addr_space(), set_mem_align(), set_mem_attributes(), set_mem_size(), set_reg_attrs_for_decl_rtl(), simplify_gen_binary(), size_diffop_loc(), stmt_is_replaceable_p(), store_expr(), targetm, tcc_binary, tcc_comparison, tcc_unary, tree_int_cst_equal(), tree_nonartificial_location(), separate_ops::type, type(), lang_hooks_for_types::type_for_mode, TYPE_QUAL_CONST, lang_hooks::types, use_anchored_address(), validize_mem(), expand_operand::value, vec_alloc(), and warning_at().

Referenced by expand_call_stmt(), expand_expr_real(), and expand_expr_real_1().

rtx expand_expr_real_2 ( sepops  ,
rtx  ,
enum  machine_mode,
enum  expand_modifier 
)

References assign_temp(), build_call_expr(), can_conditionally_move_p(), separate_ops::code, convert_modes(), convert_move(), convert_to_mode(), copy_rtx(), copy_to_mode_reg(), do_compare_rtx_and_jump(), do_pending_stack_adjust(), do_store_flag(), emit_barrier(), emit_conditional_move(), emit_insn(), emit_jump_insn(), emit_label(), emit_move_insn(), end_sequence(), expand_abs(), expand_binop(), expand_builtin(), expand_cond_expr_using_cmove(), expand_divmod(), expand_expr(), expand_fix(), expand_fixed_convert(), expand_float(), EXPAND_INITIALIZER, expand_mult(), expand_mult_highpart(), expand_mult_highpart_adjust(), EXPAND_NORMAL, expand_normal(), expand_operands(), EXPAND_STACK_PARM, EXPAND_SUM, expand_ternary_op(), expand_unop(), expand_variable_shift(), expand_vec_cond_expr(), expand_vec_perm(), expand_vec_shift_expr(), expand_widen_pattern_expr(), expand_widening_mult(), fold_convert_loc(), force_operand(), force_reg(), gen_highpart(), gen_int_mode(), gen_label_rtx(), gen_reg_rtx(), get_def_for_expr(), get_gimple_rhs_class(), get_insns(), get_subtarget(), gimple_assign_rhs1(), GIMPLE_BINARY_RHS, GIMPLE_TERNARY_RHS, GIMPLE_UNARY_RHS, HOST_BITS_PER_WIDE_INT, host_integerp(), HOST_WIDE_INT, immed_double_const(), immed_double_int_const(), int_fits_type_p(), int_size_in_bytes(), jumpifnot(), jumpifnot_1(), separate_ops::location, double_int::mask(), mathfn_built_in(), mode_for_vector(), negate_rtx(), separate_ops::op0, separate_ops::op1, separate_ops::op2, optab_default, optab_for_tree_code(), optab_handler(), OPTAB_LIB_WIDEN, OPTAB_WIDEN, plus_constant(), ptr_mode, really_constant_p(), reg_overlap_mentioned_p(), safe_from_p(), set_mem_attributes(), simplify_gen_binary(), simplify_gen_subreg(), simplify_subreg(), start_sequence(), store_expr(), store_field(), subreg_lowpart_offset(), targetm, tree_low_cst(), separate_ops::type, type(), word_mode, and write_complex_part().

Referenced by expand_expr_real_1(), and expand_gimple_stmt_1().

rtx expand_mult ( enum machine_mode  mode,
rtx  op0,
rtx  op1,
rtx  target,
int  unsignedp 
)
Perform a multiplication and return an rtx for the result.
   MODE is mode of value; OP0 and OP1 are what to multiply (rtx's);
   TARGET is a suggestion for where to store the result (an rtx).

   We check specially for a constant integer as OP1.
   If you want this check for OP0 as well, then before calling
   you should swap the two operands if OP0 would be constant.   

References choose_mult_variant(), dconst2, expand_binop(), expand_mult_const(), expand_shift(), expand_unop(), floor_log2(), force_reg(), gen_raw_REG(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, neg_cost(), OPTAB_LIB_WIDEN, optimize_insn_for_speed_p(), rtx_equal_p(), set_src_cost(), and shift.

Referenced by allocate_dynamic_stack_space(), builtin_memset_gen_str(), expand_divmod(), expand_expr_real_2(), force_operand(), and round_push().

rtx expand_mult_highpart_adjust ( enum machine_mode  mode,
rtx  adj_operand,
rtx  op0,
rtx  op1,
rtx  target,
int  unsignedp 
)
Emit code to adjust ADJ_OPERAND after multiplication of wrong signedness
   flavor of OP0 and OP1.  ADJ_OPERAND is already the high half of the
   product OP0 x OP1.  If UNSIGNEDP is nonzero, adjust the signed product
   to become unsigned, if UNSIGNEDP is zero, adjust the unsigned product to
   become signed.

   The result is put in TARGET if that is convenient.

   MODE is the mode of operation.   

References expand_and(), expand_shift(), and force_operand().

Referenced by expand_expr_real_2(), and expmed_mult_highpart_optab().

static rtx expand_normal ( )
inlinestatic

References expand_expr_real(), and EXPAND_NORMAL.

Referenced by asan_emit_stack_protection(), copy_blkmode_to_reg(), do_compare_and_jump(), do_jump(), do_jump_by_parts_equality(), do_jump_by_parts_greater(), emit_block_move_via_libcall(), emit_case_decision_tree(), emit_case_nodes(), expand_assignment(), expand_builtin(), expand_builtin_adjust_trampoline(), expand_builtin_alloca(), expand_builtin_atomic_compare_exchange(), expand_builtin_cexpi(), expand_builtin_copysign(), expand_builtin_init_dwarf_reg_sizes(), expand_builtin_init_trampoline(), expand_builtin_int_roundingfn(), expand_builtin_mathfn_2(), expand_builtin_mathfn_ternary(), expand_builtin_memcmp(), expand_builtin_memcpy(), expand_builtin_mempcpy_args(), expand_builtin_memset_args(), expand_builtin_nonlocal_goto(), expand_builtin_prefetch(), expand_builtin_signbit(), expand_builtin_sincos(), expand_builtin_stpcpy(), expand_builtin_strcmp(), expand_builtin_strncmp(), expand_call(), expand_computed_goto(), expand_cond_expr_using_cmove(), expand_expr_real_1(), expand_expr_real_2(), expand_LOAD_LANES(), expand_return(), expand_stack_restore(), expand_STORE_LANES(), expand_variable_shift(), expand_vec_cond_expr(), expand_vec_shift_expr(), fold_builtin_atomic_always_lock_free(), get_memmodel(), precompute_arguments(), precompute_register_parameters(), rtx_for_function_call(), set_storage_via_libcall(), stack_protect_epilogue(), stack_protect_prologue(), store_constructor(), store_field(), try_casesi(), and try_tablejump().

rtx expand_shift ( enum tree_code  code,
enum machine_mode  mode,
rtx  shifted,
int  amount,
rtx  target,
int  unsignedp 
)
rtx expand_simple_binop ( enum machine_mode  mode,
enum rtx_code  code,
rtx  op0,
rtx  op1,
rtx  target,
int  unsignedp,
enum optab_methods  methods 
)
Generate code for a simple binary or unary operation.  "Simple" in
   this case means "can be unambiguously described by a (mode, code)
   pair and mapped to a single optab."   
Wrapper around expand_binop which takes an rtx code to specify
   the operation to perform, not an optab pointer.  All other
   arguments are the same.   

References code_to_optab(), and expand_binop().

Referenced by asan_clear_shadow(), emit_block_move_via_loop(), emit_case_nodes(), emit_move_resolve_push(), expand_atomic_fetch_op(), expand_atomic_fetch_op_no_fallback(), expand_builtin_apply(), expand_builtin_atomic_fetch_op(), expand_expr_addr_expr_1(), expand_vec_perm(), force_operand(), generate_prolog_epilog(), instantiate_virtual_regs_in_insn(), noce_try_addcc(), noce_try_minmax(), noce_try_store_flag_constants(), noce_try_store_flag_mask(), round_trampoline_addr(), and unroll_loop_runtime_iterations().

rtx expand_simple_unop ( enum machine_mode  mode,
enum rtx_code  code,
rtx  op0,
rtx  target,
int  unsignedp 
)
Wrapper around expand_unop which takes an rtx code to specify
   the operation to perform, not an optab pointer.  All other
   arguments are the same.   

References code_to_optab(), and expand_unop().

Referenced by expand_atomic_fetch_op(), expand_atomic_fetch_op_no_fallback(), expand_builtin_atomic_fetch_op(), force_operand(), and noce_try_abs().

void expand_sjlj_dispatch_table ( rtx  dispatch_index,
vec< tree dispatch_table 
)
Like expand_case but special-case for SJLJ exception dispatching.   
Expand the dispatch to a short decrement chain if there are few cases
   to dispatch to.  Likewise if neither casesi nor tablejump is available,
   or if flag_jump_tables is set.  Otherwise, expand as a casesi or a
   tablejump.  The index mode is always the mode of integer_type_node.
   Trap if no case matches the index.

   DISPATCH_INDEX is the index expression to switch on.  It should be a
   memory or register operand.
   
   DISPATCH_TABLE is a set of case labels.  The set should be sorted in
   ascending order, be contiguous, starting with value 0, and contain only
   single-valued case labels.   

References add_case_node(), build_int_cst(), copy_to_mode_reg(), create_alloc_pool(), do_jump_if_equal(), do_pending_stack_adjust(), emit_case_dispatch_table(), emit_label(), expand_builtin_trap(), force_expand_binop(), free_alloc_pool(), free_temp_slots(), gen_label_rtx(), get_last_insn(), label_rtx(), case_node::low, make_tree(), OPTAB_DIRECT, and reorder_insns().

Referenced by sjlj_emit_dispatch_table().

rtx expand_sync_fetch_operation ( rtx  ,
rtx  ,
enum  rtx_code,
bool  ,
rtx   
)
rtx expand_sync_lock_test_and_set ( rtx  ,
rtx  ,
rtx   
)
rtx expand_sync_operation ( rtx  ,
rtx  ,
enum  rtx_code 
)
rtx expand_variable_shift ( enum tree_code  code,
enum machine_mode  mode,
rtx  shifted,
tree  amount,
rtx  target,
int  unsignedp 
)
Output a shift instruction for expression code CODE,
   with SHIFTED being the rtx for the value to shift,
   and AMOUNT the tree for the amount to shift by.
   Store the result in the rtx TARGET, if that is convenient.
   If UNSIGNEDP is nonzero, do a logical shift; otherwise, arithmetic.
   Return the rtx for where the value is.   

References expand_normal(), and expand_shift_1().

Referenced by expand_expr_real_2().

rtx expr_size ( tree  )
Functions from alias.c  
rtl.h and tree.h were included.   
Return an rtx for the size in bytes of the value of an expr.   
rtx extract_bit_field ( rtx  str_rtx,
unsigned HOST_WIDE_INT  bitsize,
unsigned HOST_WIDE_INT  bitnum,
int  unsignedp,
bool  packedp,
rtx  target,
enum machine_mode  mode,
enum machine_mode  tmode 
)
Generate code to extract a byte-field from STR_RTX
   containing BITSIZE bits, starting at BITNUM,
   and put it in TARGET if possible (if TARGET is nonzero).
   Regardless of TARGET, we return the rtx for where the value is placed.

   STR_RTX is the structure containing the byte (a REG or MEM).
   UNSIGNEDP is nonzero if this is an unsigned bit field.
   PACKEDP is nonzero if the field has the packed attribute.
   MODE is the natural mode of the field value once extracted.
   TMODE is the mode the caller would like the value to have;
   but the value may be returned with type MODE instead.

   If a TARGET is specified and we can store in it at no extra cost,
   we do so, and return TARGET.
   Otherwise, we return a REG of mode TMODE or MODE, with TMODE preferred
   if they are equally easy.   

References extract_bit_field_1().

Referenced by copy_blkmode_from_reg(), copy_blkmode_to_reg(), emit_group_load_1(), expand_expr_real_1(), read_complex_part(), store_field(), and store_unaligned_arguments_into_pseudos().

rtx extract_low_bits ( enum  machine_mode,
enum  machine_mode,
rtx   
)
void fixup_tail_calls ( void  )
A sibling call sequence invalidates any REG_EQUIV notes made for
   this function's incoming arguments.

   At the start of RTL generation we know the only REG_EQUIV notes
   in the rtl chain are those for incoming arguments, so we can look
   for REG_EQUIV notes between the start of the function and the
   NOTE_INSN_FUNCTION_BEG.

   This is (slight) overkill.  We could keep track of the highest
   argument we clobber and be more selective in removing notes, but it
   does not seem to be worth the effort.   

References find_reg_note(), get_insns(), and remove_note().

Referenced by expand_stack_alignment(), and gimple_expand_cfg().

rtx force_label_rtx ( tree  )
As label_rtx, but additionally the label is placed on the forced label
   list of its containing function (i.e. it is treated as reachable even
   if how is not obvious).   

Referenced by decode_addr_const().

rtx force_not_mem ( rtx  )
Return given rtx, copied into a new temp reg if it was in memory.   
rtx force_operand ( rtx  ,
rtx   
)
Given an rtx that may include add and multiply operations,
   generate them as insns and return a pseudo-reg containing the value.
   Useful after calling expand_expr with 1 as sum_ok.   
rtx force_reg ( enum  machine_mode,
rtx   
)
Copy a value to a register if it isn't already a register.
   Args are mode (in case value is a constant) and the value.   
rtx gen_add2_insn ( rtx  ,
rtx   
)
Create but don't emit one rtl instruction to perform certain operations.
   Modes must match; operands must meet the operation's predicates.
   Likewise for subtraction and for just copying.   

Referenced by emit_add2_insn(), emit_inc(), gen_reload(), inc_for_reload(), move_by_pieces(), move_by_pieces_1(), store_by_pieces(), and store_by_pieces_2().

rtx gen_add3_insn ( rtx  ,
rtx  ,
rtx   
)
rtx gen_cond_trap ( enum  rtx_code,
rtx  ,
rtx  ,
rtx   
)
Generate a conditional trap instruction.   

Referenced by find_cond_trap().

rtx gen_group_rtx ( rtx  )
Generate a non-consecutive group of registers represented by a PARALLEL.   
rtx gen_sub2_insn ( rtx  ,
rtx   
)

Referenced by emit_inc(), and inc_for_reload().

rtx gen_sub3_insn ( rtx  ,
rtx  ,
rtx   
)
int get_mem_align_offset ( rtx  ,
unsigned  int 
)
Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
   bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
   -1 if not known.   
rtx get_personality_function ( tree  )
Get the personality libfunc for a function decl.   
rtx hard_function_value ( const_tree  valtype,
const_tree  func,
const_tree  fntype,
int  outgoing 
)
Return an rtx that refers to the value returned by a function
   in its original home.  This becomes invalid if any more code is emitted.   
Return an rtx representing the register or memory location
   in which a scalar value of data type VALTYPE
   was returned by a function call to function FUNC.
   FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
   function is known, otherwise 0.
   OUTGOING is 1 if on a machine with register windows this function
   should return the register in which the function will put its result
   and 0 otherwise.   

References HOST_WIDE_INT, int_size_in_bytes(), and targetm.

Referenced by aggregate_value_p(), emit_library_call_value_1(), expand_call(), and expand_function_start().

rtx hard_libcall_value ( enum  machine_mode,
rtx   
)
Return an rtx that refers to the value returned by a library call
   in its original home.  This becomes invalid if any more code is emitted.   
int have_add2_insn ( rtx  ,
rtx   
)
int have_insn_for ( enum  rtx_code,
enum  machine_mode 
)
int have_sub2_insn ( rtx  ,
rtx   
)
void init_all_optabs ( struct target_optabs )
void init_block_clear_fn ( const char *  )
void init_block_move_fn ( const char *  )
void init_expr ( void  )
void init_expr_target ( void  )
Functions from expr.c:   
This is run during target initialization to set up which modes can be
   used directly in memory and to initialize the block move optab.   
This is run to set up which modes can be used
   directly in memory and to initialize the block move optab.  It is run
   at the beginning of compilation and when the target is reinitialized.   

References can_extend_p(), gen_rtx_MEM(), gen_rtx_REG(), insn_operand_matches(), and recog().

Referenced by lang_dependent_init_target().

rtx init_one_libfunc ( const char *  )
Call this to initialize an optab function entry.   

Referenced by allocate_dynamic_stack_space(), expand_main_function(), init_optabs(), set_conv_libfunc(), and set_optab_libfunc().

void init_optabs ( void  )
Call this once to initialize the contents of the optabs
   appropriately for the current target machine.   
Call this to initialize the contents of the optabs
   appropriately for the current target machine.   

References eq_libfunc(), hash_libfunc(), init_all_optabs(), init_one_libfunc(), mode_for_size(), set_optab_libfunc(), and targetm.

Referenced by lang_dependent_init_target().

void init_pending_stack_adjust ( void  )
At the start of a function, record that we have no previously-pushed
   arguments waiting to be popped.   
HOST_WIDE_INT int_expr_size ( tree  )
Return a wide integer for the size in bytes of the value of EXP, or -1
   if the size can vary or is larger than an integer.   
void jumpif ( tree  ,
rtx  ,
int   
)
Generate code to evaluate EXP and jump to LABEL if the value is nonzero.   
void jumpif_1 ( enum  tree_code,
tree  ,
tree  ,
rtx  ,
int   
)
void jumpifnot ( tree  ,
rtx  ,
int   
)
Generate code to evaluate EXP and jump to LABEL if the value is zero.   
void jumpifnot_1 ( enum  tree_code,
tree  ,
tree  ,
rtx  ,
int   
)
void locate_and_pad_parm ( enum machine_mode  passed_mode,
tree  type,
int  in_regs,
int  partial,
tree  fndecl,
struct args_size initial_offset_ptr,
struct locate_and_pad_arg_data locate 
)
Compute the size and offset from the start of the stacked arguments for a
   parm passed in mode PASSED_MODE and with type TYPE.

   INITIAL_OFFSET_PTR points to the current offset into the stacked
   arguments.

   The starting offset and size for this parm are returned in
   LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
   nonzero, the offset is that of stack slot, which is returned in
   LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
   padding required from the initial offset ptr to the stack slot.

   IN_REGS is nonzero if the argument will be passed in registers.  It will
   never be set if REG_PARM_STACK_SPACE is not defined.

   FNDECL is the function in which the argument was defined.

   There are two types of rounding that are done.  The first, controlled by
   TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
   argument list to be aligned to the specific boundary (in bits).  This
   rounding affects the initial and starting offsets, but not the argument
   size.

   The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
   optionally rounds the size of the parm to PARM_BOUNDARY.  The
   initial offset is not affected by this rounding, while the size always
   is and the starting offset may be.   
LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
    INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
    callers pass in the total size of args so far as
    INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.   

References locate_and_pad_arg_data::alignment_pad, locate_and_pad_arg_data::boundary, args_size::constant, downward, host_integerp(), none, locate_and_pad_arg_data::offset, pad_below(), pad_to_arg_alignment(), locate_and_pad_arg_data::size, size_in_bytes(), locate_and_pad_arg_data::slot_offset, targetm, tree_low_cst(), args_size::var, and locate_and_pad_arg_data::where_pad.

Referenced by assign_parm_find_entry_rtl(), emit_library_call_value_1(), and initialize_argument_information().

rtx maybe_emit_group_store ( rtx  ,
tree   
)
rtx memory_address_addr_space ( enum  machine_mode,
rtx  ,
addr_space_t   
)
Convert arg to a valid memory address for specified machine mode that points
   to a specific named address space, by emitting insns to perform arithmetic
   if necessary.   
void move_block_from_reg ( int  ,
rtx  ,
int   
)
Copy all or part of a BLKmode value X out of registers starting at REGNO.
   The number of registers to be filled is NREGS.   
void move_block_to_reg ( int  ,
rtx  ,
int  ,
enum  machine_mode 
)
Copy all or part of a value X into registers starting at REGNO.
   The number of registers to be filled is NREGS.   
unsigned HOST_WIDE_INT move_by_pieces_ninsns ( unsigned HOST_WIDE_INT  l,
unsigned int  align,
unsigned int  max_size 
)
Return number of insns required to move L bytes by pieces.
   ALIGN (in bits) is maximum alignment we can assume.   

References alignment_for_piecewise_move(), HOST_WIDE_INT, optab_handler(), and widest_int_mode_for_size().

Referenced by move_by_pieces(), and store_by_pieces_1().

rtx negate_rtx ( enum  machine_mode,
rtx   
)
Functions from expmed.c:   
Arguments MODE, RTX: return an rtx for the negation of that value.
   May emit insns.   
rtx offset_address ( rtx  ,
rtx  ,
unsigned  HOST_WIDE_INT 
)
Return a memory reference like MEMREF, but whose address is changed by
   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
   known to be in OFFSET (possibly 1).   
rtx prepare_call_address ( tree  fndecl,
rtx  funexp,
rtx  static_chain_value,
rtx call_fusage,
int  reg_parm_seen,
int  sibcallp 
)
Force FUNEXP into a form suitable for the address of a CALL,
   and return that as an rtx.  Also load the static chain register
   if FNDECL is a nested function.

   CALL_FUSAGE points to a variable holding the prospective
   CALL_INSN_FUNCTION_USAGE information.   

References emit_move_insn(), force_not_mem(), force_reg(), targetm, and use_reg().

Referenced by emit_library_call_value_1(), expand_builtin_apply(), and expand_call().

void probe_stack_range ( HOST_WIDE_INT  ,
rtx   
)
Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
   FIRST is a constant and size is a Pmode RTX.  These are offsets from
   the current stack pointer.  STACK_GROWS_DOWNWARD says whether to add
   or subtract them from the stack pointer.   
enum machine_mode promote_decl_mode ( const_tree  ,
int *   
)
Return mode and signedness to use when object is promoted.   
enum machine_mode promote_function_mode ( const_tree  type,
enum machine_mode  mode,
int *  punsignedp,
const_tree  funtype,
int  for_return 
)
Return mode and signedness to use when an argument or result in the
   given mode is promoted.   
Return the mode to use to pass or return a scalar of TYPE and MODE.
   PUNSIGNEDP points to the signedness of the type and may be adjusted
   to show what signedness to use on extension operations.

   FOR_RETURN is nonzero if the caller is promoting the return value
   of FNDECL, else it is for promoting args.   

References targetm.

Referenced by assign_parm_find_data_types(), assign_parm_setup_reg(), emit_library_call_value_1(), expand_call(), expand_expr_real_1(), expand_function_end(), expand_value_return(), initialize_argument_information(), promote_decl_mode(), and setup_incoming_promotions().

enum machine_mode promote_mode ( const_tree  type,
enum machine_mode  mode,
int *  punsignedp 
)
Return mode and signedness to use when an object in the given mode
   is promoted.   
Return the mode to use to store a scalar of TYPE and MODE.
   PUNSIGNEDP points to the signedness of the type and may be adjusted
   to show what signedness to use on extension operations.   

References targetm.

Referenced by assign_temp(), default_promote_function_mode(), default_promote_function_mode_always_promote(), expand_cond_expr_using_cmove(), precompute_arguments(), and promote_decl_mode().

rtx push_block ( rtx  ,
int  ,
int   
)
Push a block of length SIZE (perhaps variable)
   and return an rtx to address the beginning of the block.   
int safe_from_p ( const_rtx  ,
tree  ,
int   
)
void set_mem_attributes ( rtx  ,
tree  ,
int   
)
Given REF, a MEM, and T, either the type of X or the expression
   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
   if we are making a new object of this type.   
void set_mem_attributes_minus_bitpos ( rtx  ref,
tree  t,
int  objectp,
HOST_WIDE_INT  bitpos 
)
Similar, except that BITPOS has not yet been applied to REF, so if
   we alter MEM_OFFSET according to T then we should subtract BITPOS
   expecting that it'll be added back in later.   
Given REF (a MEM) and T, either the type of X or the expression
   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
   if we are making a new object of this type.  BITPOS is nonzero if
   there is an offset outstanding on T that will be applied later.   

References mem_attrs::addrspace, mem_attrs::alias, mem_attrs::align, array_ref_element_size(), array_ref_low_bound(), component_uses_parent_alias_set(), mem_attrs::expr, get_alias_set(), get_base_address(), get_object_alignment_1(), host_integerp(), HOST_WIDE_INT, integer_zerop(), memset(), mem_attrs::offset, mem_attrs::offset_known_p, set_mem_attrs(), mem_attrs::size, mem_attrs::size_known_p, tree_could_trap_p(), tree_low_cst(), and type().

Referenced by expand_assignment(), and set_mem_attributes().

rtx set_storage_via_libcall ( rtx  ,
rtx  ,
rtx  ,
bool   
)
The same, but always output an library call.   
bool set_storage_via_setmem ( rtx  object,
rtx  size,
rtx  val,
unsigned int  align,
unsigned int  expected_align,
HOST_WIDE_INT  expected_size 
)
rtx set_user_assembler_libfunc ( const char *  ,
const char *   
)
bool shift_return_value ( enum  machine_mode,
bool  ,
rtx   
)
bool split_comparison ( enum rtx_code  code,
enum machine_mode  mode,
enum rtx_code code1,
enum rtx_code code2 
)
Split a comparison into two others, the second of which has the other
   "orderedness".  The first is always ORDERED or UNORDERED if MODE
   does not honor NaNs (which means that it can be skipped in that case;
   see do_compare_rtx_and_jump).

   The two conditions are written in *CODE1 and *CODE2.  Return true if
   the conditions must be ANDed, false if they must be ORed.   

Referenced by do_compare_rtx_and_jump(), and emit_store_flag().

tree std_build_builtin_va_list ( void  )
The "standard" definition of va_list is void*.   
tree std_canonical_va_list_type ( tree  )
void std_expand_builtin_va_start ( tree  ,
rtx   
)
tree std_fn_abi_va_list ( tree  )
void store_bit_field ( rtx  str_rtx,
unsigned HOST_WIDE_INT  bitsize,
unsigned HOST_WIDE_INT  bitnum,
unsigned HOST_WIDE_INT  bitregion_start,
unsigned HOST_WIDE_INT  bitregion_end,
enum machine_mode  fieldmode,
rtx  value 
)
Generate code to store value from rtx VALUE
   into a bit-field within structure STR_RTX
   containing BITSIZE bits starting at bit BITNUM.

   BITREGION_START is bitpos of the first bitfield in this region.
   BITREGION_END is the bitpos of the ending bitfield in this region.
   These two fields are 0, if the C++ memory model does not apply,
   or we are not interested in keeping track of bitfield regions.

   FIELDMODE is the machine-mode of the FIELD_DECL node for this field.   

References get_best_mode(), HOST_WIDE_INT, offset, and store_bit_field_1().

Referenced by copy_blkmode_from_reg(), copy_blkmode_to_reg(), emit_group_store(), expand_assignment(), noce_emit_move_insn(), store_expr(), store_field(), store_unaligned_arguments_into_pseudos(), and write_complex_part().

rtx store_by_pieces ( rtx  to,
unsigned HOST_WIDE_INT  len,
rtx(*)(void *, HOST_WIDE_INT, enum machine_mode)  constfun,
void *  constfundata,
unsigned int  align,
bool  memsetp,
int  endp 
)
Generate several move instructions to store LEN bytes generated by
   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
   pointer which will be passed as argument in every CONSTFUN call.
   ALIGN is maximum alignment we can assume.
   MEMSETP is true if this is a real memset/bzero, not a copy.
   Returns TO + LEN.   
Generate several move instructions to store LEN bytes generated by
   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
   pointer which will be passed as argument in every CONSTFUN call.
   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
   a memset operation and false if it's a copy of a constant string.
   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
   stpcpy.   

References store_by_pieces_d::autinc_to, store_by_pieces_d::constfun, store_by_pieces_d::constfundata, copy_to_mode_reg(), emit_insn(), store_by_pieces_d::explicit_inc_to, gen_add2_insn(), get_address_mode(), store_by_pieces_d::len, len, store_by_pieces_d::offset, plus_constant(), store_by_pieces_d::reverse, store_by_pieces_1(), store_by_pieces_d::to, and store_by_pieces_d::to_addr.

Referenced by expand_builtin_memcpy(), expand_builtin_mempcpy_args(), expand_builtin_memset_args(), expand_builtin_strncpy(), and store_expr().

rtx store_expr ( tree  ,
rtx  ,
int  ,
bool   
)
Generate code for computing expression EXP,
   and storing the value into TARGET.
   If SUGGEST_REG is nonzero, copy the value through a register
   and return that register, if that is possible.   
tree string_constant ( tree  ,
tree  
)
Return the tree node and offset if a given argument corresponds to
   a string constant.   
int try_casesi ( tree  index_type,
tree  index_expr,
tree  minval,
tree  range,
rtx  table_label,
rtx  default_label,
rtx  fallback_label,
int  default_probability 
)
Two different ways of generating switch statements.   
Attempt to generate a casesi instruction.  Returns 1 if successful,
   0 otherwise (i.e. if there is no casesi instruction).

   DEFAULT_PROBABILITY is the probability of jumping to the default
   label.   

References convert_to_mode(), create_convert_operand_from_type(), create_fixed_operand(), create_input_operand(), do_pending_stack_adjust(), emit_cmp_and_jump_insns(), expand_jump_insn(), expand_normal(), lang_hooks_for_types::type_for_mode, and lang_hooks::types.

Referenced by emit_case_dispatch_table().

int try_tablejump ( tree  ,
tree  ,
tree  ,
tree  ,
rtx  ,
rtx  ,
int   
)
void update_nonlocal_goto_save_area ( void  )
Invoke emit_stack_save for the nonlocal_goto_save_area.   
Invoke emit_stack_save on the nonlocal_goto_save_area for the current
   function.  This function should be called whenever we allocate or
   deallocate dynamic stack space.   

References cfun, emit_stack_save(), expand_expr(), EXPAND_WRITE, function::nonlocal_goto_save_area, and SAVE_NONLOCAL.

Referenced by allocate_dynamic_stack_space(), expand_call(), and expand_function_start().

rtx use_anchored_address ( rtx  )
void use_group_regs ( rtx ,
rtx   
)
Mark a PARALLEL as holding a parameter for the next CALL_INSN.   
static void use_reg ( )
inlinestatic
Mark REG as holding a parameter for the next CALL_INSN.   

References use_reg_mode().

Referenced by emit_library_call_value_1(), expand_builtin_apply(), expand_call(), forward_propagate_subreg(), prepare_call_address(), use_group_regs(), and use_regs().

void use_reg_mode ( rtx ,
rtx  ,
enum  machine_mode 
)
Mark REG as holding a parameter for the next CALL_INSN.
   Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode.   
void use_regs ( rtx ,
int  ,
int   
)
Mark NREGS consecutive regs, starting at REGNO, as holding parameters
   for the next CALL_INSN.   
rtx validize_mem ( rtx  )
Return a memory reference like MEMREF, but which is known to have a
   valid address.   
rtx widen_memory_access ( rtx  ,
enum  machine_mode,
HOST_WIDE_INT   
)
Definitions from emit-rtl.c  
Return a memory reference like MEMREF, but with its mode widened to
   MODE and adjusted by OFFSET.   

Variable Documentation

tree block_clear_fn
A subroutine of set_storage_via_libcall.  Create the tree node
   for the function we use for block clears.   

Referenced by clear_storage_libcall_fn(), and scan_insn().