GCC Middle and Back End API Reference
calls.c File Reference

Data Structures

struct  arg_data

Functions

static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, rtx, rtx, int, rtx, int, cumulative_args_t)
static void precompute_register_parameters (int, struct arg_data *, int *)
static int store_one_arg (struct arg_data *, rtx, int, int, int)
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int)
static int finalize_must_preallocate (int, int, struct arg_data *, struct args_size *)
static void precompute_arguments (int, struct arg_data *)
static int compute_argument_block_size (int, struct args_size *, tree, tree, int)
static void initialize_argument_information (int, struct arg_data *, struct args_size *, int, tree, tree, tree, tree, cumulative_args_t, int, rtx *, int *, int *, int *, bool *, bool)
static void compute_argument_addresses (struct arg_data *, rtx, int)
static rtx rtx_for_function_call (tree, tree)
static void load_register_parameters (struct arg_data *, int, rtx *, int, int, int *)
static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, enum machine_mode, int, va_list)
static int special_function_p (const_tree, int)
static int check_sibcall_argument_overlap_1 (rtx)
static int check_sibcall_argument_overlap (rtx, struct arg_data *, int)
static int combine_pending_stack_adjustment_and_call (int, struct args_size *, unsigned int)
static tree split_complex_types (tree)
static rtx save_fixed_argument_area (int, rtx, int *, int *)
static void restore_fixed_argument_area (rtx, rtx, int, int)
rtx prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value, rtx *call_fusage, int reg_parm_seen, int sibcallp)
static int special_function_p ()
static int decl_return_flags ()
int setjmp_call_p ()
bool gimple_alloca_call_p ()
bool alloca_call_p ()
static bool is_tm_builtin ()
int flags_from_decl_or_type ()
int call_expr_flags ()
static rtx save_fixed_argument_area ()
static void restore_fixed_argument_area ()
static void store_unaligned_arguments_into_pseudos ()
static void precompute_arguments ()
static void compute_argument_addresses ()
static rtx rtx_for_function_call ()
static rtx internal_arg_pointer_based_exp (rtx, bool)
static void internal_arg_pointer_based_exp_scan ()
static int internal_arg_pointer_based_exp_1 ()
static rtx internal_arg_pointer_based_exp ()
static bool mem_overlaps_already_clobbered_arg_p ()
static int check_sibcall_argument_overlap_1 ()
static int check_sibcall_argument_overlap ()
bool shift_return_value ()
static rtx avoid_likely_spilled_reg ()
rtx expand_call ()
void fixup_tail_calls ()
static tree split_complex_types ()
void emit_library_call (rtx orgfun, enum libcall_type fn_type, enum machine_mode outmode, int nargs,...)
rtx emit_library_call_value (rtx orgfun, rtx value, enum libcall_type fn_type, enum machine_mode outmode, int nargs,...)
bool must_pass_in_stack_var_size (enum machine_mode mode, const_tree type)
bool must_pass_in_stack_var_size_or_pad ()

Variables

static char * stack_usage_map
static int highest_outgoing_arg_in_use
static sbitmap stored_args_map
static int stack_arg_under_construction
struct {
   rtx   scan_start
   vec< rtx >   cache
internal_arg_pointer_exp_state

Function Documentation

bool alloca_call_p ( )
   Return true when exp contains alloca call.  
static rtx avoid_likely_spilled_reg ( )
static
   If X is a likely-spilled register value, copy it to a pseudo
   register and return that register.  Return X otherwise.  
         Make sure that we generate a REG rather than a CONCAT.
         Moves into CONCATs can need nontrivial instructions,
         and the whole point of this function is to avoid
         using the hard register directly in such a situation.  
int call_expr_flags ( )
   Detect flags from a CALL_EXPR.  

References emit_group_load_into_temps(), int_size_in_bytes(), arg_data::parallel_value, and targetm.

static int check_sibcall_argument_overlap ( rtx  ,
struct arg_data ,
int   
)
static
static int check_sibcall_argument_overlap ( )
static
   Scan sequence after INSN if it does not dereference any argument slots
   we already clobbered by tail call arguments (as noted in stored_args_map
   bitmap).  If MARK_STORED_ARGS_MAP, add stack slots for ARG to
   stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
   should be 0).  Return nonzero if sequence after INSN dereferences such argument
   slots, zero otherwise.  
static int check_sibcall_argument_overlap_1 ( rtx  )
static
static int check_sibcall_argument_overlap_1 ( )
static
   Scan X expression if it does not dereference any argument slots
   we already clobbered by tail call arguments (as noted in stored_args_map
   bitmap).
   Return nonzero if X expression dereferences such argument slots,
   zero otherwise.  
     We need not check the operands of the CALL expression itself.  
     Scan all subexpressions.  
static int combine_pending_stack_adjustment_and_call ( int  unadjusted_args_size,
struct args_size args_size,
unsigned int  preferred_unit_stack_boundary 
)
static
   We need to pop PENDING_STACK_ADJUST bytes.  But, if the arguments
   wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
   bytes, then we would need to push some additional bytes to pad the
   arguments.  So, we compute an adjust to the stack pointer for an
   amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
   bytes.  Then, when the arguments are pushed the stack will be perfectly
   aligned.  ARGS_SIZE->CONSTANT is set to the number of bytes that should
   be popped after the call.  Returns the adjustment.  
     The number of bytes to pop so that the stack will be
     under-aligned by UNADJUSTED_ARGS_SIZE bytes.  
     The alignment of the stack after the arguments are pushed, if we
     just pushed the arguments without adjust the stack here.  
     We want to get rid of as many of the PENDING_STACK_ADJUST bytes
     as possible -- leaving just enough left to cancel out the
     UNADJUSTED_ALIGNMENT.  In other words, we want to ensure that the
     PENDING_STACK_ADJUST is non-negative, and congruent to
     -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY.  
     Begin by trying to pop all the bytes.  
     Push enough additional bytes that the stack will be aligned
     after the arguments are pushed.  
     Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
     bytes after the call.  The right number is the entire
     PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
     by the arguments in the first place.  
static void compute_argument_addresses ( struct arg_data ,
rtx  ,
int   
)
static
static void compute_argument_addresses ( )
static
   If we preallocated stack space, compute the address of each argument
   and store it into the ARGS array.

   We need not ensure it is a valid memory address here; it will be
   validized when it is used.

   ARGBLOCK is an rtx for the address of the outgoing arguments.  
             Skip this parm if it will not be passed on the stack.  
                 Only part of the parameter is being passed on the stack.
                 Generate a simple memory reference of the correct size.  
                 Only part of the parameter is being passed on the stack.
                 Generate a simple memory reference of the correct size.
             Function incoming arguments may overlap with sibling call
             outgoing arguments and we cannot allow reordering of reads
             from function arguments with stores to outgoing arguments
             of sibling calls.  

References current_function_decl.

static int compute_argument_block_size ( int  reg_parm_stack_space,
struct args_size args_size,
tree  fndecl,
tree  fntype,
int  preferred_stack_boundary 
)
static
   Update ARGS_SIZE to contain the total size for the argument block.
   Return the original constant component of the argument block's size.

   REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
   for arguments passed in registers.  
     For accumulate outgoing args mode we don't need to align, since the frame
     will be already aligned.  Align to STACK_BOUNDARY in order to prevent
     backends from generating misaligned frame sizes.  
     Compute the actual size of the argument block required.  The variable
     and constant sizes must be combined, the size may have to be rounded,
     and there may be a minimum required size.  
             We don't handle this case yet.  To handle it correctly we have
             to add the delta, round and subtract the delta.
             Currently no machine description requires this support.  
             The area corresponding to register parameters is not to count in
             the size of the block we need.  So make the adjustment.  

References convert_modes(), expand_normal(), gen_lowpart_SUBREG(), arg_data::initial_value, promote_mode(), type(), arg_data::unsignedp, and arg_data::value.

static int decl_return_flags ( )
static
   Similar to special_function_p; return a set of ERF_ flags for the
   function FNDECL.  
static void emit_call_1 ( rtx  funexp,
tree  fntree,
tree  fndecl,
tree  funtype,
HOST_WIDE_INT  stack_size,
HOST_WIDE_INT  rounded_stack_size,
HOST_WIDE_INT  struct_value_size,
rtx  next_arg_reg,
rtx  valreg,
int  old_inhibit_defer_pop,
rtx  call_fusage,
int  ecf_flags,
cumulative_args_t  args_so_far 
)
static
   Generate instructions to call function FUNEXP,
   and optionally pop the results.
   The CALL_INSN is the first insn generated.

   FNDECL is the declaration node of the function.  This is given to the
   hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
   its own args.

   FUNTYPE is the data type of the function.  This is given to the hook
   TARGET_RETURN_POPS_ARGS to determine whether this function pops its
   own args.  We used to allow an identifier for library functions, but
   that doesn't work when the return type is an aggregate type and the
   calling convention says that the pointer to this aggregate is to be
   popped by the callee.

   STACK_SIZE is the number of bytes of arguments on the stack,
   ROUNDED_STACK_SIZE is that number rounded up to
   PREFERRED_STACK_BOUNDARY; zero if the size is variable.  This is
   both to put into the call insn and to generate explicit popping
   code if necessary.

   STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
   It is zero if this call doesn't want a structure value.

   NEXT_ARG_REG is the rtx that results from executing
     targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
   just after all the args have had their registers assigned.
   This could be whatever you like, but normally it is the first
   arg-register beyond those used for args in this call,
   or 0 if all the arg-registers are used in this call.
   It is passed on to `gen_call' so you can put this info in the call insn.

   VALREG is a hard register in which a value is returned,
   or 0 if the call does not return a value.

   OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
   the args to this call were processed.
   We restore `inhibit_defer_pop' to that value.

   CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
   denote registers used by the called function.  
     Ensure address is valid.  SYMBOL_REF is already valid, so no need,
     and we don't want to load it into a register as an optimization,
     because prepare_call_address already did it if it should be done.  
         Although a built-in FUNCTION_DECL and its non-__builtin
         counterpart compare equal and get a shared mem_attrs, they
         produce different dump output in compare-debug compilations,
         if an entry gets garbage collected in one compilation, then
         adds a different (but equivalent) entry, while the other
         doesn't run the garbage collector at the same spot and then
         shares the mem_attr with the equivalent entry. 
         If this subroutine pops its own args, record that in the call insn
         if possible, for the sake of frame pointer elimination.  
     If the target has "call" or "call_value" insns, then prefer them
     if no arguments are actually popped.  If the target does not have
     "call" or "call_value" insns, then we must use the popping versions
     even if the call has no arguments to pop.  
         If this subroutine pops its own args, record that in the call insn
         if possible, for the sake of frame pointer elimination.  
     Find the call we just emitted.  
     Some target create a fresh MEM instead of reusing the one provided
     above.  Set its MEM_EXPR.  
     Put the register usage information there.  
     If this is a const call, then set the insn's unchanging bit.  
     If this is a pure call, then set the insn's unchanging bit.  
     If this is a const call, then set the insn's unchanging bit.  
     Create a nothrow REG_EH_REGION note, if needed.  
     Restore this now, so that we do defer pops for this call's args
     if the context of the call as a whole permits.  
         If popup is needed, stack realign must use DRAP  
     For noreturn calls when not accumulating outgoing args force
     REG_ARGS_SIZE note to prevent crossjumping of calls with different
     args sizes.  
         If returning from the subroutine does not automatically pop the args,
         we need an instruction to pop them sooner or later.
         Perhaps do it now; perhaps just record how much space to pop later.

         If returning from the subroutine does pop the args, indicate that the
         stack pointer will be changed.  
               Just pretend we did the pop.  
     When we accumulate outgoing args, we must avoid any stack manipulations.
     Restore the stack pointer to its original value now.  Usually
     ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
     On  i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
     popping variants of functions exist as well.

     ??? We may optimize similar to defer_pop above, but it is
     probably not worthwhile.

     ??? It will be worthwhile to enable combine_stack_adjustments even for
     such machines.  

References BUILT_IN_NORMAL, builtin_decl_explicit(), and set_mem_expr().

void emit_library_call ( rtx  orgfun,
enum libcall_type  fn_type,
enum machine_mode  outmode,
int  nargs,
  ... 
)
   Output a library call to function FUN (a SYMBOL_REF rtx)
   (emitting the queue unless NO_QUEUE is nonzero),
   for a value of mode OUTMODE,
   with NARGS different arguments, passed as alternating rtx values
   and machine_modes to convert them to.

   FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
   `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
   other types of library calls.  

Referenced by maybe_emit_sync_lock_test_and_set().

rtx emit_library_call_value ( rtx  orgfun,
rtx  value,
enum libcall_type  fn_type,
enum machine_mode  outmode,
int  nargs,
  ... 
)
   Like emit_library_call except that an extra argument, VALUE,
   comes second and says where to store the result.
   (If VALUE is zero, this function chooses a convenient way
   to return the value.

   This function returns an rtx for where the value is to be found.
   If VALUE is nonzero, VALUE is returned.  
static rtx emit_library_call_value_1 ( int  retval,
rtx  orgfun,
rtx  value,
enum libcall_type  fn_type,
enum machine_mode  outmode,
int  nargs,
va_list  p 
)
static
   Output a library call to function FUN (a SYMBOL_REF rtx).
   The RETVAL parameter specifies whether return value needs to be saved, other
   parameters are documented in the emit_library_call function below.  
     Total size in bytes of all the stack-parms scanned so far.  
     Size of arguments before any adjustments (such as rounding).  
     Todo, choose the correct decl type of orgfun. Sadly this information
     isn't present here, so we default to native calling abi here.  
     Define the boundary of the register parm stack space that needs to be
     save, if any.  
     Size of the stack reserved for parameter registers.  
     By default, library functions can not throw.  
     Ensure current function's preferred stack boundary is at least
     what we need.  
     If this kind of value comes back in memory,
     decide where in memory it should come back.  
             This call returns a big structure.  
     ??? Unfinished: must pass the memory address as an argument.  
     Copy all the libcall-arguments out of the varargs data
     and into a vector ARGVEC.

     Compute how to pass each argument.  We only support a very small subset
     of the full argument passing conventions to limit complexity here since
     library functions shouldn't have many args.  
     If there's a structure value address to be passed,
     either pass it in the special place, or pass it as an extra argument.  
         Make sure it is a reasonable operand for a move or push insn.  
         We cannot convert the arg value to the mode the library wants here;
         must do it earlier where we know the signedness of the arg.  
         Make sure it is a reasonable operand for a move or push insn.  
             If this was a CONST function, it is now PURE since it now
             reads memory.  
           The argument is passed entirely in registers.  See at which
           end it should be padded.  
     If this machine requires an external definition for library
     functions, write one out.  
         Since the stack pointer will never be pushed, it is possible for
         the evaluation of a parm to clobber something we have already
         written to the stack.  Since most function calls on RISC machines
         do not use the stack, this is uncommon, but must work correctly.

         Therefore, we save any area of the stack that was already written
         and that we are using.  Here we set up to do this by making a new
         stack usage map from the old one.

         Another approach might be to try to reorder the argument
         evaluations to avoid this conflicting stack usage.  
         Since we will be writing into the entire argument area, the
         map must be allocated for its entire size, not just the part that
         is the responsibility of the caller.  
         We must be careful to use virtual regs before they're instantiated,
         and real regs afterwards.  Loop optimization, for example, can create
         new libcalls after we've instantiated the virtual regs, and if we
         use virtuals anyway, they won't match the rtl patterns.  
     If we push args individually in reverse order, perform stack alignment
     before the first push (the last arg).  
         The argument list is the property of the called routine and it
         may clobber it.  If the fixed area has been used for previous
         parameters, we must save and restore it.  
     Push the args that need to be pushed.  
     ARGNUM indexes the ARGVEC array in the order in which the arguments
     are to be pushed.  
                 If this is being stored into a pre-allocated, fixed-size,
                 stack area, save any previous data at that location.  
                 stack_slot is negative, but we want to index stack_usage_map
                 with positive values.  
                 Don't worry about things in the fixed argument area;
                 it has already been saved.  
                     We need to make a save area.  
             Now mark the segment we just used.  
             Indicate argument access so that alias.c knows that these
             values are live.  
               When arguments are pushed, trying to tell alias.c where
               exactly this argument is won't work, because the
               auto-increment causes confusion.  So we merely indicate
               that we access something with a known mode somewhere on
               the stack.  
     If we pushed args in forward order, perform stack alignment
     after pushing the last arg.  
     Now load any reg parms into their regs.  
     ARGNUM indexes the ARGVEC array in the order in which the arguments
     are to be pushed.  
         Handle calls that pass values in multiple non-contiguous
         locations.  The PA64 has examples of this for library calls.  
             Copied from load_register_parameters.  
             Handle case where we have a value that needs shifting
             up to the msb.  eg. a QImode value and we're padding
             upward on a BYTES_BIG_ENDIAN machine.  
                 Assigning REG here rather than a temp makes CALL_FUSAGE
                 report the whole reg as used.  Strictly speaking, the
                 call only uses SIZE bytes at the msb end, but it doesn't
                 seem worth generating rtl to say that.  
     Any regs containing parms remain in use through the call.  
     Pass the function the address in which to return a structure value.  
     Don't allow popping to be deferred, since then
     cse'ing of library calls could delete a call and leave the pop.  
     Stack must be properly aligned now.  
     We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
     will set inhibit_defer_pop to that value.  
     The return type is needed to decide how many bytes the function pops.
     Signedness plays no role in that, so for simplicity, we pretend it's
     always signed.  We also assume that the list of arguments passed has
     no impact, so we pretend it is unknown.  
     Right-shift returned value if necessary.  
     For calls to `setjmp', etc., inform function.c:setjmp_warnings
     that it should complain if nonvolatile values are live.  For
     functions that cannot return, inform flow that control does not
     fall through.  
         The barrier note must be emitted
         immediately after the CALL_INSN.  Some ports emit more than
         just a CALL_INSN above, so we must search for it here.  
             There was no CALL_INSN?  
     Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
     and LCT_RETURNS_TWICE, cannot perform non-local gotos.  
             There was no CALL_INSN?  
     Now restore inhibit_defer_pop to its actual original value.  
     Copy the value to the right place.  
             Convert to the proper mode if a promotion has been active.  
         If we saved any argument areas, restore them.  

References args_size::constant, highest_outgoing_arg_in_use, memcpy(), memset(), plus_constant(), stack_usage_map, and virtuals_instantiated.

rtx expand_call ( )
   Generate all the code for a CALL_EXPR exp
   and return an rtx for its value.
   Store the value in TARGET (specified as an rtx) if convenient.
   If the value is stored in TARGET then TARGET is returned.
   If IGNORE is nonzero, then we ignore the value of the function call.  
     Nonzero if we are currently expanding a call.  
     RTX for the function to be called.  
     Sequence of insns to perform a normal "call".  
     Sequence of insns to perform a tail "call".  
     Data type of the function.  
     Declaration of the function being called,
     or 0 if the function is computed (not known by name).  
     The type of the function being called.  
     Register in which non-BLKmode value will be returned,
     or 0 if no value or if value is BLKmode.  
     Address where we should return a BLKmode value;
     0 if value not BLKmode.  
     Nonzero if that address is being passed by treating it as
     an extra, implicit first parameter.  Otherwise,
     it is passed by being copied directly into struct_value_rtx.  
     Holds the value of implicit argument for the struct value.  
     Size of aggregate value wanted, or zero if none wanted
     or if we are using the non-reentrant PCC calling convention
     or expecting the value in registers.  
     Nonzero if called function returns an aggregate in memory PCC style,
     by returning the address of where to find it.  
     Number of actual parameters in this call, including struct value addr.  
     Number of named args.  Args after this are anonymous ones
     and they must all go on the stack.  
     Number of complex actual arguments that need to be split.  
     Vector of information about each argument.
     Arguments are numbered in the order they will be pushed,
     not the order they are written.  
     Total size in bytes of all the stack-parms scanned so far.  
     Size of arguments before any adjustments (such as rounding).  
     Data on reg parms scanned so far.  
     Nonzero if a reg parm has been scanned.  
     Nonzero if this is an indirect function call.  
     Nonzero if we must avoid push-insns in the args for this call.
     If stack space is allocated for register parameters, but not by the
     caller, then it is preallocated in the fixed part of the stack frame.
     So the entire argument block must then be preallocated (i.e., we
     ignore PUSH_ROUNDING in that case).  
     Size of the stack reserved for parameter registers.  
     Address of space preallocated for stack parms
     (on machines that lack push insns), or 0 if space not preallocated.  
     Mask of ECF_ and ERF_ flags.  
     Define the boundary of the register parm stack space that needs to be
     saved, if any.  
     State variables to track stack modifications.  
     Some stack pointer alterations we make are performed via
     allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
     which we then also need to save/restore along the way.  
     The alignment of the stack, in bits.  
     The alignment of the stack, in bytes.  
     The static chain value to use for this call.  
     See if this is "nothrow" function call.  
     See if we can find a DECL-node for the actual function, and get the
     function attributes (flags) from the function decl or type node.  
     Warn if this value is an aggregate type,
     regardless of which calling convention we are using for it.  
     If the result of a non looping pure or const function call is
     ignored (or void), and none of its arguments are volatile, we can
     avoid expanding the call and just evaluate the arguments for
     side-effects.  
     Set up a place to return a structure.  
     Cater to broken compilers.  
         This call returns a big structure.  
               For variable-sized objects, we must be called with a target
               specified.  If we were to allocate space on the stack here,
               we would have no way of knowing when to free it.  
     Figure out the amount to which the stack should be aligned.  
         Without automatic stack alignment, we can't increase preferred
         stack boundary.  With automatic stack alignment, it is
         unnecessary since unless we can guarantee that all callers will
         align the outgoing stack properly, callee has to align its
         stack anyway.  
     Operand 0 is a pointer-to-function; get the type of the function.  
     Count whether there are actual complex arguments that need to be split
     into their real and imaginary parts.  Munge the type_arg_types
     appropriately here as well.  
     If struct_value_rtx is 0, it means pass the address
     as if it were an extra parameter.  Put the argument expression
     in structure_value_addr_value.  
         If structure_value_addr is a REG other than
         virtual_outgoing_args_rtx, we can use always use it.  If it
         is not a REG, we must always copy it into a register.
         If it is virtual_outgoing_args_rtx, we must copy it to another
         register in some cases.  
     Count the arguments and set NUM_ACTUALS.  
     Compute number of named args.
     First, do a raw count of the args for INIT_CUMULATIVE_ARGS.  
            Count the struct value address, if it is passed as a parm.  
       If we know nothing, treat all args as named.  
     Start updating where the next arg would go.

     On some machines (such as the PA) indirect calls have a different
     calling convention than normal calls.  The fourth argument in
     INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
     or not.  
     Now possibly adjust the number of named args.
     Normally, don't include the last named arg if anonymous args follow.
     We do include the last named arg if
     targetm.calls.strict_argument_naming() returns nonzero.
     (If no anonymous args follow, the result of list_length is actually
     one too large.  This is harmless.)

     If targetm.calls.pretend_outgoing_varargs_named() returns
     nonzero, and targetm.calls.strict_argument_naming() returns zero,
     this machine will be able to place unnamed args that were passed
     in registers into the stack.  So treat all args as named.  This
     allows the insns emitting for a specific argument list to be
     independent of the function declaration.

     If targetm.calls.pretend_outgoing_varargs_named() returns zero,
     we do not have any reliable way to pass unnamed args in
     registers, so we must force them into memory.  
       Don't include the last named arg.  
       Treat all args as named.  
     Make a vector to hold all the information about each arg.  
     Build up entries in the ARGS array, compute the size of the
     arguments into ARGS_SIZE, etc.  
     Now make final decision about preallocating stack space.  
     If the structure value address will reference the stack pointer, we
     must stabilize it.  We don't need to do this if we know that we are
     not going to adjust the stack pointer in processing this call.  
     Tail calls can make things harder to debug, and we've traditionally
     pushed these optimizations into -O2.  Don't try if we're already
     expanding a call, as that means we're an argument.  Don't try if
     there's cleanups, as we know there's code to follow the call.  
      Rest of purposes for tail call optimizations to fail.  
         Doing sibling call optimization needs some work, since
         structure_value_addr can be allocated on the stack.
         It does not seem worth the effort since few optimizable
         sibling calls will return a structure.  
         If outgoing reg parm stack space changes, we can not do sibcall.  
         Check whether the target is able to optimize the call
         into a sibcall.  
         Functions that do not return exactly once may not be sibcall
         optimized.  
         If the called function is nested in the current one, it might access
         some of the caller's arguments, but could clobber them beforehand if
         the argument areas are shared.  
         If this function requires more stack slots than the current
         function, we cannot change it into a sibling call.
         crtl->args.pretend_args_size is not part of the
         stack allocated by our caller.  
         If the callee pops its own arguments, then it must pop exactly
         the same number of arguments as the current function.  
     Check if caller and callee disagree in promotion of function
     return value.  
     Ensure current function's preferred stack boundary is at least
     what we need.  Stack alignment may also increase preferred stack
     boundary.  
     We want to make two insn chains; one for a sibling call, the other
     for a normal call.  We will select one of the two chains after
     initial RTL generation is complete.  
         We want to emit any pending stack adjustments before the tail
         recursion "call".  That way we know any adjustment after the tail
         recursion call can be ignored if we indeed use the tail
         call expansion.  
             State variables we need to save and restore between
             iterations.  
         Other state variables that we must reinitialize each time
         through the loop (that are not initialized by the loop itself).  
         Start a new sequence for the normal call case.

         From this point on, if the sibling call fails, we want to set
         sibcall_failure instead of continuing the loop.  
         Don't let pending stack adjusts add up to too much.
         Also, do all pending adjustments now if there is any chance
         this might be a call to alloca or if we are expanding a sibling
         call sequence.
         Also do the adjustments before a throwing call, otherwise
         exception handling can fail; PR 19225. 
         Precompute any arguments as needed.  
         Now we are about to start emitting insns that can be deleted
         if a libcall is deleted.  
         Compute the actual size of the argument block required.  The variable
         and constant sizes must be combined, the size may have to be rounded,
         and there may be a minimum required size.  When generating a sibcall
         pattern, do not round up, since we'll be re-using whatever space our
         caller provided.  
         The argument block when performing a sibling call is the
         incoming argument block.  
         If we have no actual push instructions, or shouldn't use them,
         make space for all args right now.  
                 stack_arg_under_construction says whether a stack arg is
                 being constructed at the old stack level.  Pushing the stack
                 gets a clean outgoing argument block.  
             Note that we must go through the motions of allocating an argument
             block even if the size is zero because we may be storing args
             in the area reserved for register arguments, which may be part of
             the stack frame.  
             Store the maximum argument space used.  It will be pushed by
             the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
             checking).  
                     Since the stack pointer will never be pushed, it is
                     possible for the evaluation of a parm to clobber
                     something we have already written to the stack.
                     Since most function calls on RISC machines do not use
                     the stack, this is uncommon, but must work correctly.

                     Therefore, we save any area of the stack that was already
                     written and that we are using.  Here we set up to do this
                     by making a new stack usage map from the old one.  The
                     actual save will be done by store_one_arg.

                     Another approach might be to try to reorder the argument
                     evaluations to avoid this conflicting stack usage.  
                     Since we will be writing into the entire argument area,
                     the map must be allocated for its entire size, not just
                     the part that is the responsibility of the caller.  
                     The address of the outgoing argument list must not be
                     copied to a register here, because argblock would be left
                     pointing to the wrong place after the call to
                     allocate_dynamic_stack_space below.  
                         Try to reuse some or all of the pending_stack_adjust
                         to get this space.  
                         combine_pending_stack_adjustment_and_call computes
                         an adjustment before the arguments are allocated.
                         Account for them and see whether or not the stack
                         needs to go up or down.  
                             We're releasing stack space.  
                             ??? We can avoid any adjustment at all if we're
                             already aligned.  FIXME.  
                           We need to allocate space.  We'll do that in
                           push_block below.  
                     Special case this because overhead of `push_block' in
                     this case is non-trivial.  
                     We only really need to call `copy_to_reg' in the case
                     where push insns are going to be used to pass ARGBLOCK
                     to a function call in ARGS.  In that case, the stack
                     pointer changes value from the allocation point to the
                     call point, and hence the value of
                     VIRTUAL_OUTGOING_ARGS_RTX changes as well.  But might
                     as well always do it.  
             The save/restore code in store_one_arg handles all
             cases except one: a constructor call (including a C
             function returning a BLKmode struct) to initialize
             an argument.  
                     stack_arg_under_construction says whether a stack
                     arg is being constructed at the old stack level.
                     Pushing the stack gets a clean outgoing argument
                     block.  
                     Make a new map for the new argument list.  
                 We can pass TRUE as the 4th argument because we just
                 saved the stack pointer and will restore it right after
                 the call.  
             If argument evaluation might modify the stack pointer,
             copy the address of the argument list to a register.  
         If we push args individually in reverse order, perform stack alignment
         before the first push (the last arg).  
             When the stack adjustment is pending, we get better code
             by combining the adjustments.  
         Now that the stack is properly aligned, pops can't safely
         be deferred during the evaluation of the arguments.  
         Record the maximum pushed stack space size.  We need to delay
         doing it this far to take into account the optimization done
         by combine_pending_stack_adjustment_and_call.  
         Figure out the register where the value, if any, will come back.  
             If VALREG is a PARALLEL whose first member has a zero
             offset, use that.  This is for targets such as m68k that
             return the same value in multiple places.  
         Precompute all register parameters.  It isn't safe to compute anything
         once we have started filling any specific hard regs.  
         Save the fixed argument area if it's part of the caller's frame and
         is clobbered by argument setup for this call.  
         Now store (and compute if necessary) all non-register parms.
         These come before register parms, since they can require block-moves,
         which could clobber the registers used for register parms.
         Parms which have partial registers are not stored here,
         but we do preallocate space here if they want that.  
         If we have a parm that is passed in registers but not in memory
         and whose alignment does not permit a direct copy into registers,
         make a group of pseudos that correspond to each register that we
         will later fill.  
         Now store any partially-in-registers parm.
         This is the last place a block-move can happen.  
         If we pushed args in forward order, perform stack alignment
         after pushing the last arg.  
         If register arguments require space on the stack and stack space
         was not preallocated, allocate stack space here for arguments
         passed in registers.  
         Pass the function the address in which to return a
         structure value.  
         Save a pointer to the last insn before the call, so that we can
         later safely search backwards to find the CALL_INSN.  
         Set up next argument register.  For sibling calls on machines
         with register windows this should be the incoming register.  
         All arguments and registers used for the call must be set up by
         now!  
         Stack must be properly aligned now.  
         Generate the actual call instruction.  
         If the call setup or the call itself overlaps with anything
         of the argument setup we probably clobbered our call address.
         In that case we can't do sibcalls.  
         If a non-BLKmode value is returned at the most significant end
         of a register, shift the register right by the appropriate amount
         and update VALREG accordingly.  BLKmode values are handled by the
         group load/store machinery below.  
             The return value from a malloc-like function is a pointer.  
             The return value from a malloc-like function can not alias
             anything else.  
             Write out the sequence.  
         For calls to `setjmp', etc., inform
         function.c:setjmp_warnings that it should complain if
         nonvolatile values are live.  For functions that cannot
         return, inform flow that control does not fall through.  
             The barrier must be emitted
             immediately after the CALL_INSN.  Some ports emit more
             than just a CALL_INSN above, so we must search for it here.  
                 There was no CALL_INSN?  
             Stack adjustments after a noreturn call are dead code.
             However when NO_DEFER_POP is in effect, we must preserve
             stack_pointer_delta.  
         If value type not void, return an rtx for the value.  
             This is the special C++ case where we need to
             know what the true target was.  We take care to
             never use this value more than once in one expression.  
         Handle calls that return values in multiple non-contiguous locations.
         The Irix 6 ABI has examples of this.  
               Handle the result of a emit_group_move_into_temps
               call in the previous pass.  
             We have to copy a return value in a CLASS_LIKELY_SPILLED hard
             reg to a plain register.  
             If TARGET is a MEM in the argument area, and we have
             saved part of the argument area, then we can't store
             directly into TARGET as it may get overwritten when we
             restore the argument save area below.  Don't work too
             hard though and simply force TARGET to a register if it
             is a MEM; the optimizer is quite likely to sort it out.  
                 TARGET and VALREG cannot be equal at this point
                 because the latter would not have
                 REG_FUNCTION_VALUE_P true, while the former would if
                 it were referring to the same register.

                 If they refer to the same register, this move will be
                 a no-op, except when function inlining is being
                 done.  
                 If we are setting a MEM, this code must be executed.
                 Since it is emitted after the call insn, sibcall
                 optimization cannot be performed in that case.  
         If we promoted this return value, make the proper SUBREG.
         TARGET might be const0_rtx here, so be careful.  
             Ensure we promote as expected, and get the new unsignedness.  
         If size of args is variable or this was a constructor call for a stack
         argument, restore saved stack-pointer value.  
             If we saved any argument areas, restore them.  
         If this was alloca, record the new stack level for nonlocal gotos.
         Check for the handler slots since we might not have a save area
         for non-local gotos.  
         Free up storage we no longer need.  
             Restore the pending stack adjustment now that we have
             finished generating the sibling call sequence.  
             Prepare arg structure for next iteration.  
             Verify that we've deallocated all the stack we used.  
         If something prevents making this a sibling call,
         zero out the sequence.  
     If tail call production succeeded, we need to remove REG_EQUIV notes on
     arguments too, as argument area is now clobbered by the call.  

References expand_expr(), and EXPAND_NORMAL.

Referenced by expand_builtin_memset_args().

static int finalize_must_preallocate ( int  must_preallocate,
int  num_actuals,
struct arg_data args,
struct args_size args_size 
)
static
   Given the current state of MUST_PREALLOCATE and information about
   arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
   compute and return the final value for MUST_PREALLOCATE.  
     See if we have or want to preallocate stack space.

     If we would have to push a partially-in-regs parm
     before other stack parms, preallocate stack space instead.

     If the size of some parm is not a multiple of the required stack
     alignment, we must preallocate.

     If the total size of arguments that would otherwise create a copy in
     a temporary (such as a CALL) is more than half the total argument list
     size, preallocation is faster.

     Another reason to preallocate is if we have a machine (like the m88k)
     where stack alignment is required to be maintained between every
     pair of insns, not just when the call is made.  However, we assume here
     that such machines either do not have push insns (and hence preallocation
     would occur anyway) or the problem is taken care of with
     PUSH_ROUNDING.  
void fixup_tail_calls ( void  )
   A sibling call sequence invalidates any REG_EQUIV notes made for
   this function's incoming arguments.

   At the start of RTL generation we know the only REG_EQUIV notes
   in the rtl chain are those for incoming arguments, so we can look
   for REG_EQUIV notes between the start of the function and the
   NOTE_INSN_FUNCTION_BEG.

   This is (slight) overkill.  We could keep track of the highest
   argument we clobber and be more selective in removing notes, but it
   does not seem to be worth the effort.  
         There are never REG_EQUIV notes for the incoming arguments
         after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it.  
int flags_from_decl_or_type ( )
   Detect flags (function attributes) from the function decl or type node.  
         The function exp may have the `malloc' attribute.  
         The function exp may have the `returns_twice' attribute.  
         Process the pure and const attributes.  

Referenced by dump_possible_polymorphic_call_targets(), ipa_reference_get_not_read_global(), and set_reference_optimization_summary().

bool gimple_alloca_call_p ( )
   Return true if STMT is an alloca call.  
static void initialize_argument_information ( int  num_actuals,
struct arg_data args,
struct args_size args_size,
int  n_named_args,
tree  exp,
tree  struct_value_addr_value,
tree  fndecl,
tree  fntype,
cumulative_args_t  args_so_far,
int  reg_parm_stack_space,
rtx old_stack_level,
int *  old_pending_adj,
int *  must_preallocate,
int *  ecf_flags,
bool *  may_tailcall,
bool  call_from_thunk_p 
)
static
   Fill in ARGS_SIZE and ARGS array based on the parameters found in
   CALL_EXPR EXP.

   NUM_ACTUALS is the total number of parameters.

   N_NAMED_ARGS is the total number of named arguments.

   STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
   value, or null.

   FNDECL is the tree code for the target of this call (if known)

   ARGS_SO_FAR holds state needed by the target to know where to place
   the next argument.

   REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
   for arguments which are passed in registers.

   OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
   and may be modified by this routine.

   OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
   flags which may may be modified by this routine.

   MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
   that requires allocation of stack space.

   CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
   the thunked-to function.  
     1 if scanning parms front to back, -1 if scanning back to front.  
     Count arg position in order args appear.  
     In this loop, we consider args in the order they are written.
     We fill up ARGS from the front or from the back if necessary
     so that in any case the first arg to be pushed ends up at the front.  
         In this case, must reverse order of args
         so that we compute and push the last arg first.  
     First fill in the actual arguments in the ARGS array, splitting
     complex arguments if necessary.  
     I counts args in order (to be) pushed; ARGPOS counts in order written.  
         Replace erroneous argument with constant zero.  
         If TYPE is a transparent union or record, pass things the way
         we would pass the first field of the union or record.  We have
         already verified that the modes are the same.  
         Decide where to pass this arg.

         args[i].reg is nonzero if all or part is passed in registers.

         args[i].partial is nonzero if part but not all is passed in registers,
         and the exact value says how many bytes are passed in registers.

         args[i].pass_on_stack is nonzero if the argument must at least be
         computed on the stack.  It may then be loaded back into registers
         if args[i].reg is nonzero.

         These decisions are driven by the FUNCTION_... macros and must agree
         with those made by function.c.  
         See if this argument should be passed by invisible reference.  
             If we're compiling a thunk, pass through invisible references
             instead of making a copy.  
                 We can't use sibcalls if a callee-copied argument is
                 stored in the current function's frame.  
                 We make a copy of the object and pass the address to the
                 function being called.  
                     This is a variable-sized object.  Make space on the stack
                     for it.  
                     We can pass TRUE as the 4th argument because we just
                     saved the stack pointer and will restore it right after
                     the call.  
                 Just change the const function to pure and then let
                 the next test clear the pure based on
                 callee_copies.  
         If this is a sibling call and the machine has register windows, the
         register window has to be unwinded before calling the routine, so
         arguments have to go into the incoming registers.  
         If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
         it means that we are to pass this arg in the register(s) designated
         by the PARALLEL, but also to pass it in the stack.  
         If this is an addressable type, we must preallocate the stack
         since we must evaluate the object into its final location.

         If this is to be passed in both registers and the stack, it is simpler
         to preallocate.  
         Compute the stack-size of this argument.  
           The argument is passed entirely in registers.  See at which
           end it should be padded.  
         Update ARGS_SIZE, the total stack space for args so far.  
         Increment ARGS_SO_FAR, which has info about which arg-registers
         have been used, etc.  

References allocate_dynamic_stack_space(), assign_temp(), build_fold_addr_expr_loc(), compare_tree_int(), args_size::constant, copy(), emit_stack_save(), expr_size(), first_field(), gen_rtx_MEM(), GENERIC_STACK_CHECK, get_base_address(), int_size_in_bytes(), arg_data::locate, locate_and_pad_parm(), make_tree(), mark_addressable(), arg_data::mode, arg_data::partial, pass_by_reference(), arg_data::pass_on_stack, promote_function_mode(), reference_callee_copied(), arg_data::reg, SAVE_BLOCK, set_mem_attributes(), locate_and_pad_arg_data::size, store_expr(), arg_data::tail_call_reg, targetm, arg_data::tree_value, arg_data::unsignedp, and locate_and_pad_arg_data::where_pad.

static rtx internal_arg_pointer_based_exp ( rtx  ,
bool   
)
static

Referenced by rtx_for_function_call().

static rtx internal_arg_pointer_based_exp ( )
static
   Compute whether RTL is based on crtl->args.internal_arg_pointer.  Return
   NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
   it with fixed offset, or PC if this is with variable or unknown offset.
   TOPLEVEL is true if the function is invoked at the topmost level.  
     When called at the topmost level, scan pseudo assignments in between the
     last scanned instruction in the tail call sequence and the latest insn
     in that sequence.  

References int_size_in_bytes().

static int internal_arg_pointer_based_exp_1 ( )
static
   Helper function for internal_arg_pointer_based_exp, called through
   for_each_rtx.  Return 1 if *LOC is a register based on
   crtl->args.internal_arg_pointer.  Return -1 if *LOC is not based on it
   and the subexpressions need not be examined.  Otherwise return 0.  
static void internal_arg_pointer_based_exp_scan ( )
static
   Helper function for internal_arg_pointer_based_exp.  Scan insns in
   the tail call sequence, starting with first insn that hasn't been
   scanned yet, and note for each pseudo on the LHS whether it is based
   on crtl->args.internal_arg_pointer or not, and what offset from that
   that pointer it has.  
             Punt on pseudos set multiple times.  
static bool is_tm_builtin ( )
static
   Return TRUE if FNDECL is either a TM builtin or a TM cloned
   function.  Return FALSE otherwise.  
static void load_register_parameters ( struct arg_data args,
int  num_actuals,
rtx call_fusage,
int  flags,
int  is_sibcall,
int *  sibcall_failure 
)
static
   Do the register loads required for any wholly-register parms or any
   parms which are passed both on the stack and in a register.  Their
   expressions were already evaluated.

   Mark all register-parms as living through the call, putting these USE
   insns in the CALL_INSN_FUNCTION_USAGE field.

   When IS_SIBCALL, perform the check_sibcall_argument_overlap
   checking, setting *SIBCALL_FAILURE if appropriate.  
             Set non-negative if we must move a word at a time, even if
             just one word (e.g, partial == 4 && mode == DFmode).  Set
             to -1 if we just use a normal move insn.  This value can be
             zero if the argument is a zero size structure.  
             Handle calls that pass values in multiple non-contiguous
             locations.  The Irix 6 ABI has examples of this.  
             If simple case, just do move.  If normal partial, store_one_arg
             has already loaded the register for us.  In all other cases,
             load the register(s) from memory.  
                 Handle case where we have a value that needs shifting
                 up to the msb.  eg. a QImode value and we're padding
                 upward on a BYTES_BIG_ENDIAN machine.  
                     Assigning REG here rather than a temp makes CALL_FUSAGE
                     report the whole reg as used.  Strictly speaking, the
                     call only uses SIZE bytes at the msb end, but it doesn't
                     seem worth generating rtl to say that.  
             If we have pre-computed the values to put in the registers in
             the case of non-aligned structures, copy them in now.  
                 Check for overlap with already clobbered argument area,
                 providing that this has non-zero size.  
                 Handle a BLKmode that needs shifting.  
             When a parameter is a block, and perhaps in other cases, it is
             possible that it did a load from an argument slot that was
             already clobbered.  
             Handle calls that pass values in multiple non-contiguous
             locations.  The Irix 6 ABI has examples of this.  
static bool mem_overlaps_already_clobbered_arg_p ( )
static
   Return true if and only if SIZE storage units (usually bytes)
   starting from address ADDR overlap with already clobbered argument
   area.  This function is used to determine if we should give up a
   sibcall.  

References emit_move_insn(), expand_shift(), gen_reg_rtx(), gen_rtx_REG(), move_block_to_reg(), operand_subword_force(), locate_and_pad_arg_data::size, validize_mem(), and word_mode.

bool must_pass_in_stack_var_size ( enum machine_mode  mode,
const_tree  type 
)
   Nonzero if we do not know how to pass TYPE solely in registers.  
     If the type has variable size...  
     If the type is marked as addressable (it is required
     to be constructed into the stack)...  
bool must_pass_in_stack_var_size_or_pad ( )
   Another version of the TARGET_MUST_PASS_IN_STACK hook.  This one
   takes trailing padding of a structure into account.  
   ??? Should be able to merge these two by examining BLOCK_REG_PADDING.  
     If the type has variable size...  
     If the type is marked as addressable (it is required
     to be constructed into the stack)...  
     If the padding and mode of the type is such that a copy into
     a register would put it into the wrong part of the register.  
static void precompute_arguments ( int  ,
struct arg_data  
)
static
static void precompute_arguments ( )
static
   Precompute parameters as needed for a function call.

   FLAGS is mask of ECF_* constants.

   NUM_ACTUALS is the number of arguments.

   ARGS is an array containing information for each argument; this
   routine fills in the INITIAL_VALUE and VALUE fields for each
   precomputed argument.  
     If this is a libcall, then precompute all arguments so that we do not
     get extraneous instructions emitted as part of the libcall sequence.  
     If we preallocated the stack space, and some arguments must be passed
     on the stack, then we must precompute any parameter which contains a
     function call which will store arguments on the stack.
     Otherwise, evaluating the parameter may clobber previous parameters
     which have already been stored into the stack.  (we have code to avoid
     such case by saving the outgoing stack arguments, but it results in
     worse code)  
         If this is an addressable type, we cannot pre-evaluate it.  
             CSE will replace this only if it contains args[i].value
             pseudo, so convert it down to the declared mode using
             a SUBREG.  
static void precompute_register_parameters ( int  num_actuals,
struct arg_data args,
int *  reg_parm_seen 
)
static
   Precompute all register parameters as described by ARGS, storing values
   into fields within the ARGS array.

   NUM_ACTUALS indicates the total number elements in the ARGS array.

   Set REG_PARM_SEEN if we encounter a register parameter.  
           If we are to promote the function arg to a wider mode,
           do it now.  
           If the value is a non-legitimate constant, force it into a
           pseudo now.  TLS symbols sometimes need a call to resolve.  
           If we're going to have to load the value by parts, pull the
           parts into pseudos.  The part extraction process can involve
           non-trivial computation.  
           If the value is expensive, and we are inside an appropriately
           short loop, put the value into a pseudo and then put the pseudo
           into the hard reg.

           For small register classes, also do this if this call uses
           register parameters.  This is to avoid reload conflicts while
           loading the parameters registers.  

References targetm.

rtx prepare_call_address ( tree  fndecl,
rtx  funexp,
rtx  static_chain_value,
rtx call_fusage,
int  reg_parm_seen,
int  sibcallp 
)
   Force FUNEXP into a form suitable for the address of a CALL,
   and return that as an rtx.  Also load the static chain register
   if FNDECL is a nested function.

   CALL_FUSAGE points to a variable holding the prospective
   CALL_INSN_FUNCTION_USAGE information.  
     Make a valid memory address and copy constants through pseudo-regs,
     but not for a constant address if -fno-function-cse.  
       If we are using registers for parameters, force the
       function address into a register now.  

References targetm.

static void restore_fixed_argument_area ( rtx  ,
rtx  ,
int  ,
int   
)
static
static void restore_fixed_argument_area ( )
static
static rtx rtx_for_function_call ( tree  ,
tree   
)
static
static rtx rtx_for_function_call ( )
static
   Given a FNDECL and EXP, return an rtx suitable for use as a target address
   in a call instruction.

   FNDECL is the tree node for the target function.  For an indirect call
   FNDECL will be NULL_TREE.

   ADDR is the operand 0 of CALL_EXPR for this call.  
     Get the function to call, in the form of RTL.  
         Get a SYMBOL_REF rtx for the function address.  
       Generate an rtx (probably a pseudo-register) for the address.  

References internal_arg_pointer_based_exp(), pc_rtx, and plus_constant().

static rtx save_fixed_argument_area ( int  ,
rtx  ,
int *  ,
int *   
)
static
static rtx save_fixed_argument_area ( )
static
     The argument list is the property of the called routine and it
     may clobber it.  If the fixed area has been used for previous
     parameters, we must save and restore it.  
     Compute the boundary of the area that needs to be saved, if any.  
           If we don't have the required alignment, must do this
           in BLKmode.  

References BLOCK_OP_CALL_PARM, emit_block_move(), emit_move_insn(), gen_rtx_MEM(), plus_constant(), set_mem_align(), and validize_mem().

int setjmp_call_p ( )
   Return nonzero when FNDECL represents a call to setjmp.  

Referenced by check_call().

bool shift_return_value ( )
   Given that a function returns a value of mode MODE at the most
   significant end of hard register VALUE, shift VALUE left or right
   as specified by LEFT_P.  Return true if some action was needed.  
     Use ashr rather than lshr for right shifts.  This is for the benefit
     of the MIPS port, which requires SImode values to be sign-extended
     when stored in 64-bit registers.  
static int special_function_p ( const_tree  ,
int   
)
static
static int special_function_p ( )
static
   Determine if the function identified by NAME and FNDECL is one with
   special properties we wish to know about.

   For example, if the function might return more than one time (setjmp), then
   set RETURNS_TWICE to a nonzero value.

   Similarly set NORETURN if the function is in the longjmp family.

   Set MAY_BE_ALLOCA for any memory allocation function that might allocate
   space from the stack such as alloca.  
         Exclude functions not at the file scope, or not `extern',
         since they are not the magic functions we would otherwise
         think they are.
         FIXME: this should be handled with attributes, not with this
         hacky imitation of DECL_ASSEMBLER_NAME.  It's (also) wrong
         because you can declare fork() inside a function if you
         wish.  
         We assume that alloca will always be called by name.  It
         makes no sense to pass it as a pointer-to-function to
         anything that does not understand its behavior.  
         Disregard prefix _, __, __x or __builtin_.  
static tree split_complex_types ( tree  )
static
static tree split_complex_types ( )
static
   Traverse a list of TYPES and expand all complex types into their
   components.  
     Before allocating memory, check for the common case of no complex.  
             Rewrite complex type with component type.  
             Add another component type for the imaginary part.  
             Skip the newly created node.  

References args_size::constant, count, and locate_and_pad_parm().

static int store_one_arg ( struct arg_data arg,
rtx  argblock,
int  flags,
int  variable_size,
int  reg_parm_stack_space 
)
static
   Store a single argument for a function call
   into the register or memory area where it must be passed.
   *ARG describes the argument value and where to pass it.

   ARGBLOCK is the address of the stack-block for all the arguments,
   or 0 on a machine where arguments are pushed individually.

   MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
   so must be careful about how the stack is used.

   VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
   argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
   that we need not worry about saving and restoring the stack.

   FNDECL is the declaration of the function we are calling.

   Return nonzero if this arg should cause sibcall failure,
   zero otherwise.  
     Push a new temporary level for any temporaries we make for
     this argument.  
         If this is being stored into a pre-allocated, fixed-size, stack area,
         save any previous data at that location.  
             stack_slot is negative, but we want to index stack_usage_map
             with positive values.  
             Don't worry about things in the fixed argument area;
             it has already been saved.  
                 We need to make a save area.  
     If this isn't going to be placed on both the stack and in registers,
     set up the register and number of words.  
     Being passed entirely in a register.  We shouldn't be called in
     this case.  
     If this arg needs special alignment, don't load the registers
     here.  
     If this is being passed partially in a register, we can't evaluate
     it directly into its stack slot.  Otherwise, we can.  
         stack_arg_under_construction is nonzero if a function argument is
         being evaluated directly into the outgoing argument list and
         expand_call must take special action to preserve the argument list
         if it is called recursively.

         For scalar function arguments stack_usage_map is sufficient to
         determine which stack slots must be saved and restored.  Scalar
         arguments in general have pass_on_stack == 0.

         If this argument is initialized by a function which takes the
         address of the argument (a C++ constructor or a C function
         returning a BLKmode structure), then stack_usage_map is
         insufficient and expand_call must push the stack around the
         function call.  Such arguments have pass_on_stack == 1.

         Note that it is always safe to set stack_arg_under_construction,
         but this generates suboptimal code if set when not needed.  
         If we are promoting object (or for any other reason) the mode
         doesn't agree, convert the mode.  
     Check for overlap with already clobbered argument area.  
     Don't allow anything left on stack from computation
     of argument to alloca.  
       If the value is already in the stack slot, we are done.  
         Argument is a scalar, not entirely passed in registers.
         (If part is passed in registers, arg->partial says how much
         and emit_push_insn will take care of putting it there.)

         Push it, and if its size is less than the
         amount of space allocated to it,
         also bump stack pointer by the additional space.
         Note that in C the default argument promotions
         will prevent such mismatches.  
         Compute how much space the push instruction will push.
         On many machines, pushing a byte will advance the stack
         pointer by a halfword.  
         Compute how much space the argument should get:
         round up to a multiple of the alignment for arguments.  
         Compute the alignment of the pushed argument.  
         This isn't already where we want it on the stack, so put it there.
         This can either be done with push or copy insns.  
         Unless this is a partially-in-register argument, the argument is now
         in the stack.  
         BLKmode, at least partly to be pushed.  
         Pushing a nonscalar.
         If part is passed in registers, PARTIAL says how much
         and emit_push_insn will take care of putting it there.  
         Round its size up to a multiple
         of the allocation unit for arguments.  
             PUSH_ROUNDING has no effect on us, because emit_push_insn
             for BLKmode is careful to avoid it.  
         When an argument is padded down, the block is aligned to
         PARM_BOUNDARY, but the actual argument isn't.  
             emit_push_insn might not work properly if arg->value and
             argblock + arg->locate.offset areas overlap.  
                 expand_call should ensure this.  
                     Use arg->locate.size.constant instead of size_rtx
                     because we only care about the part of the argument
                     on the stack.  
                     Even though they appear to be at the same location,
                     if part of the outgoing argument is in registers,
                     they aren't really at the same location.  Check for
                     this by making sure that the incoming size is the
                     same as the outgoing size.  
         Unless this is a partially-in-register argument, the argument is now
         in the stack.

         ??? Unlike the case above, in which we want the actual
         address of the data, so that we can load it directly into a
         register, here we want the address of the stack slot, so that
         it's properly aligned for word-by-word copying or something
         like that.  It's not clear that this is always correct.  
     Mark all slots this store used.  
     Once we have pushed something, pops can't safely
     be deferred during the rest of the arguments.  
     Free any temporary slots made in processing this argument.  
static void store_unaligned_arguments_into_pseudos ( struct arg_data ,
int   
)
static
static void store_unaligned_arguments_into_pseudos ( )
static
   If any elements in ARGS refer to parameters that are to be passed in
   registers, but not in memory, and whose alignment does not permit a
   direct copy into registers.  Copy the values into a group of pseudos
   which we will later copy into the appropriate hard registers.

   Pseudos for each unaligned argument will be stored into the array
   args[argnum].aligned_regs.  The caller is responsible for deallocating
   the aligned_regs array if it is nonzero.  
           Structures smaller than a word are normally aligned to the
           least significant byte.  On a BYTES_BIG_ENDIAN machine,
           this means we must skip the empty high order bytes when
           calculating the bit offset.  
               There is no need to restrict this code to loading items
               in TYPE_ALIGN sized hunks.  The bitfield instructions can
               load up entire word sized registers efficiently.

               ??? This may not be needed anymore.
               We use to emit a clobber here but that doesn't let later
               passes optimize the instructions we emit.  By storing 0 into
               the register later passes know the first AND to zero out the
               bitfield being set in the register is unnecessary.  The store
               of 0 will be deleted as will at least the first AND.  

Variable Documentation

vec<rtx> cache
     Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
     based on crtl->args.internal_arg_pointer.  The element is NULL_RTX if the
     pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
     with fixed offset, or PC if this is with variable or unknown offset.  

Referenced by preload_common_nodes().

int highest_outgoing_arg_in_use
static
   Size of STACK_USAGE_MAP.  

Referenced by emit_library_call_value_1().

struct { ... } internal_arg_pointer_exp_state
   Internal state for internal_arg_pointer_based_exp and its helpers.  
rtx scan_start
     Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
     or NULL_RTX if none has been scanned yet.  
int stack_arg_under_construction
static
   stack_arg_under_construction is nonzero when an argument may be
   initialized with a constructor call (including a C function that
   returns a BLKmode struct) and expand_call must take special action
   to make sure the object being constructed does not overlap the
   argument list for the constructor call.  
char* stack_usage_map
static
   A vector of one char per byte of stack space.  A byte if nonzero if
   the corresponding stack location has been used.
   This vector is used to prevent a function call within an argument from
   clobbering any stack already set up.  

Referenced by emit_library_call_value_1().

sbitmap stored_args_map
static
   A bitmap of virtual-incoming stack space.  Bit is set if the corresponding
   stack location's tail call argument has been already stored into the stack.
   This bitmap is used to prevent sibling call optimization if function tries
   to use parent's incoming argument slots when they have been already
   overwritten with tail call arguments.