GCC Middle and Back End API Reference
|
#include "statistics.h"
#include "machmode.h"
#include "input.h"
#include "real.h"
#include "vec.h"
#include "fixed-value.h"
#include "alias.h"
#include "hashtab.h"
#include "flags.h"
#include "rtl.def"
#include "reg-notes.def"
#include "insn-notes.def"
#include "genrtl.h"
Go to the source code of this file.
Data Structures | |
struct | addr_diff_vec_flags |
struct | mem_attrs |
struct | reg_attrs |
union | rtunion_def |
struct | block_symbol |
struct | object_block |
struct | rtx_def |
union | rtx_def::u |
struct | rtvec_def |
struct | full_rtx_costs |
struct | address_info |
struct | replace_label_data |
struct | subreg_info |
struct | target_rtl |
struct | rtl_hooks |
Macros | |
#define | NOOP_MOVE_INSN_CODE INT_MAX |
#define | RTX_CODE enum rtx_code |
#define | DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) ENUM , |
#define | NUM_RTX_CODE ((int) LAST_AND_UNUSED_RTX_CODE) |
#define | RTX_OBJ_MASK (~1) |
#define | RTX_OBJ_RESULT (RTX_OBJ & RTX_OBJ_MASK) |
#define | RTX_COMPARE_MASK (~1) |
#define | RTX_COMPARE_RESULT (RTX_COMPARE & RTX_COMPARE_MASK) |
#define | RTX_ARITHMETIC_MASK (~1) |
#define | RTX_ARITHMETIC_RESULT (RTX_COMM_ARITH & RTX_ARITHMETIC_MASK) |
#define | RTX_BINARY_MASK (~3) |
#define | RTX_BINARY_RESULT (RTX_COMPARE & RTX_BINARY_MASK) |
#define | RTX_COMMUTATIVE_MASK (~2) |
#define | RTX_COMMUTATIVE_RESULT (RTX_COMM_COMPARE & RTX_COMMUTATIVE_MASK) |
#define | RTX_NON_COMMUTATIVE_RESULT (RTX_COMPARE & RTX_COMMUTATIVE_MASK) |
#define | GET_RTX_LENGTH(CODE) (rtx_length[(int) (CODE)]) |
#define | GET_RTX_NAME(CODE) (rtx_name[(int) (CODE)]) |
#define | GET_RTX_FORMAT(CODE) (rtx_format[(int) (CODE)]) |
#define | GET_RTX_CLASS(CODE) (rtx_class[(int) (CODE)]) |
#define | RTX_HDR_SIZE offsetof (struct rtx_def, u) |
#define | RTX_CODE_SIZE(CODE) rtx_code_size[CODE] |
#define | NULL_RTX (rtx) 0 |
#define | RTX_NEXT(X) |
#define | RTX_PREV(X) |
#define | GET_CODE(RTX) ((enum rtx_code) (RTX)->code) |
#define | PUT_CODE(RTX, CODE) ((RTX)->code = (CODE)) |
#define | GET_MODE(RTX) ((enum machine_mode) (RTX)->mode) |
#define | PUT_MODE(RTX, MODE) ((RTX)->mode = (MODE)) |
#define | NULL_RTVEC (rtvec) 0 |
#define | GET_NUM_ELEM(RTVEC) ((RTVEC)->num_elem) |
#define | PUT_NUM_ELEM(RTVEC, NUM) ((RTVEC)->num_elem = (NUM)) |
#define | REG_P(X) (GET_CODE (X) == REG) |
#define | MEM_P(X) (GET_CODE (X) == MEM) |
#define | CASE_CONST_SCALAR_INT |
#define | CASE_CONST_UNIQUE |
#define | CASE_CONST_ANY |
#define | CONST_INT_P(X) (GET_CODE (X) == CONST_INT) |
#define | CONST_FIXED_P(X) (GET_CODE (X) == CONST_FIXED) |
#define | CONST_DOUBLE_P(X) (GET_CODE (X) == CONST_DOUBLE) |
#define | CONST_DOUBLE_AS_INT_P(X) (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) == VOIDmode) |
#define | CONST_SCALAR_INT_P(X) (CONST_INT_P (X) || CONST_DOUBLE_AS_INT_P (X)) |
#define | CONST_DOUBLE_AS_FLOAT_P(X) (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) != VOIDmode) |
#define | LABEL_P(X) (GET_CODE (X) == CODE_LABEL) |
#define | JUMP_P(X) (GET_CODE (X) == JUMP_INSN) |
#define | CALL_P(X) (GET_CODE (X) == CALL_INSN) |
#define | NONJUMP_INSN_P(X) (GET_CODE (X) == INSN) |
#define | DEBUG_INSN_P(X) (GET_CODE (X) == DEBUG_INSN) |
#define | NONDEBUG_INSN_P(X) (INSN_P (X) && !DEBUG_INSN_P (X)) |
#define | MAY_HAVE_DEBUG_INSNS (flag_var_tracking_assignments) |
#define | INSN_P(X) (NONJUMP_INSN_P (X) || DEBUG_INSN_P (X) || JUMP_P (X) || CALL_P (X)) |
#define | NOTE_P(X) (GET_CODE (X) == NOTE) |
#define | BARRIER_P(X) (GET_CODE (X) == BARRIER) |
#define | JUMP_TABLE_DATA_P(INSN) (GET_CODE (INSN) == JUMP_TABLE_DATA) |
#define | ANY_RETURN_P(X) (GET_CODE (X) == RETURN || GET_CODE (X) == SIMPLE_RETURN) |
#define | UNARY_P(X) (GET_RTX_CLASS (GET_CODE (X)) == RTX_UNARY) |
#define | BINARY_P(X) ((GET_RTX_CLASS (GET_CODE (X)) & RTX_BINARY_MASK) == RTX_BINARY_RESULT) |
#define | ARITHMETIC_P(X) |
#define | COMMUTATIVE_ARITH_P(X) (GET_RTX_CLASS (GET_CODE (X)) == RTX_COMM_ARITH) |
#define | SWAPPABLE_OPERANDS_P(X) |
#define | NON_COMMUTATIVE_P(X) |
#define | COMMUTATIVE_P(X) |
#define | COMPARISON_P(X) ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMPARE_MASK) == RTX_COMPARE_RESULT) |
#define | CONSTANT_P(X) (GET_RTX_CLASS (GET_CODE (X)) == RTX_CONST_OBJ) |
#define | OBJECT_P(X) ((GET_RTX_CLASS (GET_CODE (X)) & RTX_OBJ_MASK) == RTX_OBJ_RESULT) |
#define | RTL_CHECK1(RTX, N, C1) ((RTX)->u.fld[N]) |
#define | RTL_CHECK2(RTX, N, C1, C2) ((RTX)->u.fld[N]) |
#define | RTL_CHECKC1(RTX, N, C) ((RTX)->u.fld[N]) |
#define | RTL_CHECKC2(RTX, N, C1, C2) ((RTX)->u.fld[N]) |
#define | RTVEC_ELT(RTVEC, I) ((RTVEC)->elem[I]) |
#define | XWINT(RTX, N) ((RTX)->u.hwint[N]) |
#define | XCWINT(RTX, N, C) ((RTX)->u.hwint[N]) |
#define | XCMWINT(RTX, N, C, M) ((RTX)->u.hwint[N]) |
#define | XCNMWINT(RTX, N, C, M) ((RTX)->u.hwint[N]) |
#define | XCNMPRV(RTX, C, M) (&(RTX)->u.rv) |
#define | XCNMPFV(RTX, C, M) (&(RTX)->u.fv) |
#define | BLOCK_SYMBOL_CHECK(RTX) (&(RTX)->u.block_sym) |
#define | RTX_FLAG(RTX, FLAG) ((RTX)->FLAG) |
#define | RTL_FLAG_CHECK1(NAME, RTX, C1) (RTX) |
#define | RTL_FLAG_CHECK2(NAME, RTX, C1, C2) (RTX) |
#define | RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3) (RTX) |
#define | RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4) (RTX) |
#define | RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5) (RTX) |
#define | RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) (RTX) |
#define | RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) (RTX) |
#define | RTL_FLAG_CHECK8(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8) (RTX) |
#define | XINT(RTX, N) (RTL_CHECK2 (RTX, N, 'i', 'n').rt_int) |
#define | XUINT(RTX, N) (RTL_CHECK2 (RTX, N, 'i', 'n').rt_uint) |
#define | XSTR(RTX, N) (RTL_CHECK2 (RTX, N, 's', 'S').rt_str) |
#define | XEXP(RTX, N) (RTL_CHECK2 (RTX, N, 'e', 'u').rt_rtx) |
#define | XVEC(RTX, N) (RTL_CHECK2 (RTX, N, 'E', 'V').rt_rtvec) |
#define | XMODE(RTX, N) (RTL_CHECK1 (RTX, N, 'M').rt_type) |
#define | XTREE(RTX, N) (RTL_CHECK1 (RTX, N, 't').rt_tree) |
#define | XBBDEF(RTX, N) (RTL_CHECK1 (RTX, N, 'B').rt_bb) |
#define | XTMPL(RTX, N) (RTL_CHECK1 (RTX, N, 'T').rt_str) |
#define | XCFI(RTX, N) (RTL_CHECK1 (RTX, N, 'C').rt_cfi) |
#define | XVECEXP(RTX, N, M) RTVEC_ELT (XVEC (RTX, N), M) |
#define | XVECLEN(RTX, N) GET_NUM_ELEM (XVEC (RTX, N)) |
#define | X0INT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_int) |
#define | X0UINT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_uint) |
#define | X0STR(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_str) |
#define | X0EXP(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_rtx) |
#define | X0VEC(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_rtvec) |
#define | X0MODE(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_type) |
#define | X0TREE(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_tree) |
#define | X0BBDEF(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_bb) |
#define | X0ADVFLAGS(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_addr_diff_vec_flags) |
#define | X0CSELIB(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_cselib) |
#define | X0MEMATTR(RTX, N) (RTL_CHECKC1 (RTX, N, MEM).rt_mem) |
#define | X0REGATTR(RTX, N) (RTL_CHECKC1 (RTX, N, REG).rt_reg) |
#define | X0CONSTANT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_constant) |
#define | X0ANY(RTX, N) RTL_CHECK1 (RTX, N, '0') |
#define | XCINT(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_int) |
#define | XCUINT(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_uint) |
#define | XCSTR(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_str) |
#define | XCEXP(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_rtx) |
#define | XCVEC(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_rtvec) |
#define | XCMODE(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_type) |
#define | XCTREE(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_tree) |
#define | XCBBDEF(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_bb) |
#define | XCCFI(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_cfi) |
#define | XCCSELIB(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_cselib) |
#define | XCVECEXP(RTX, N, M, C) RTVEC_ELT (XCVEC (RTX, N, C), M) |
#define | XCVECLEN(RTX, N, C) GET_NUM_ELEM (XCVEC (RTX, N, C)) |
#define | XC2EXP(RTX, N, C1, C2) (RTL_CHECKC2 (RTX, N, C1, C2).rt_rtx) |
#define | INSN_UID(INSN) XINT (INSN, 0) |
#define | PREV_INSN(INSN) XEXP (INSN, 1) |
#define | NEXT_INSN(INSN) XEXP (INSN, 2) |
#define | BLOCK_FOR_INSN(INSN) XBBDEF (INSN, 3) |
#define | PATTERN(INSN) XEXP (INSN, 4) |
#define | INSN_LOCATION(INSN) XUINT (INSN, 5) |
#define | INSN_HAS_LOCATION(INSN) |
#define | RTL_LOCATION(X) |
#define | INSN_CODE(INSN) XINT (INSN, 6) |
#define | RTX_FRAME_RELATED_P(RTX) |
#define | INSN_DELETED_P(RTX) |
#define | RTL_CONST_CALL_P(RTX) (RTL_FLAG_CHECK1 ("RTL_CONST_CALL_P", (RTX), CALL_INSN)->unchanging) |
#define | RTL_PURE_CALL_P(RTX) (RTL_FLAG_CHECK1 ("RTL_PURE_CALL_P", (RTX), CALL_INSN)->return_val) |
#define | RTL_CONST_OR_PURE_CALL_P(RTX) (RTL_CONST_CALL_P (RTX) || RTL_PURE_CALL_P (RTX)) |
#define | RTL_LOOPING_CONST_OR_PURE_CALL_P(RTX) (RTL_FLAG_CHECK1 ("CONST_OR_PURE_CALL_P", (RTX), CALL_INSN)->call) |
#define | SIBLING_CALL_P(RTX) (RTL_FLAG_CHECK1 ("SIBLING_CALL_P", (RTX), CALL_INSN)->jump) |
#define | INSN_ANNULLED_BRANCH_P(RTX) (RTL_FLAG_CHECK1 ("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)->unchanging) |
#define | INSN_FROM_TARGET_P(RTX) |
#define | ADDR_DIFF_VEC_FLAGS(RTX) X0ADVFLAGS (RTX, 4) |
#define | CSELIB_VAL_PTR(RTX) X0CSELIB (RTX, 0) |
#define | REG_NOTES(INSN) XEXP(INSN, 7) |
#define | ENTRY_VALUE_EXP(RTX) (RTL_CHECKC1 (RTX, 0, ENTRY_VALUE).rt_rtx) |
#define | DEF_REG_NOTE(NAME) NAME, |
#define | REG_NOTE_KIND(LINK) ((enum reg_note) GET_MODE (LINK)) |
#define | PUT_REG_NOTE_KIND(LINK, KIND) PUT_MODE (LINK, (enum machine_mode) (KIND)) |
#define | GET_REG_NOTE_NAME(MODE) (reg_note_name[(int) (MODE)]) |
#define | CALL_INSN_FUNCTION_USAGE(INSN) XEXP(INSN, 8) |
#define | CODE_LABEL_NUMBER(INSN) XINT (INSN, 6) |
#define | NOTE_DATA(INSN) RTL_CHECKC1 (INSN, 4, NOTE) |
#define | NOTE_DELETED_LABEL_NAME(INSN) XCSTR (INSN, 4, NOTE) |
#define | SET_INSN_DELETED(INSN) set_insn_deleted (INSN); |
#define | NOTE_BLOCK(INSN) XCTREE (INSN, 4, NOTE) |
#define | NOTE_EH_HANDLER(INSN) XCINT (INSN, 4, NOTE) |
#define | NOTE_BASIC_BLOCK(INSN) XCBBDEF (INSN, 4, NOTE) |
#define | NOTE_VAR_LOCATION(INSN) XCEXP (INSN, 4, NOTE) |
#define | NOTE_CFI(INSN) XCCFI (INSN, 4, NOTE) |
#define | NOTE_LABEL_NUMBER(INSN) XCINT (INSN, 4, NOTE) |
#define | NOTE_KIND(INSN) XCINT (INSN, 5, NOTE) |
#define | NOTE_INSN_BASIC_BLOCK_P(INSN) (NOTE_P (INSN) && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK) |
#define | PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION)) |
#define | PAT_VAR_LOCATION_LOC(PAT) (XCEXP ((PAT), 1, VAR_LOCATION)) |
#define | PAT_VAR_LOCATION_STATUS(PAT) ((enum var_init_status) (XCINT ((PAT), 2, VAR_LOCATION))) |
#define | NOTE_VAR_LOCATION_DECL(NOTE) PAT_VAR_LOCATION_DECL (NOTE_VAR_LOCATION (NOTE)) |
#define | NOTE_VAR_LOCATION_LOC(NOTE) PAT_VAR_LOCATION_LOC (NOTE_VAR_LOCATION (NOTE)) |
#define | NOTE_VAR_LOCATION_STATUS(NOTE) PAT_VAR_LOCATION_STATUS (NOTE_VAR_LOCATION (NOTE)) |
#define | INSN_VAR_LOCATION(INSN) PATTERN (INSN) |
#define | INSN_VAR_LOCATION_DECL(INSN) PAT_VAR_LOCATION_DECL (INSN_VAR_LOCATION (INSN)) |
#define | INSN_VAR_LOCATION_LOC(INSN) PAT_VAR_LOCATION_LOC (INSN_VAR_LOCATION (INSN)) |
#define | INSN_VAR_LOCATION_STATUS(INSN) PAT_VAR_LOCATION_STATUS (INSN_VAR_LOCATION (INSN)) |
#define | gen_rtx_UNKNOWN_VAR_LOC() (gen_rtx_CLOBBER (VOIDmode, const0_rtx)) |
#define | VAR_LOC_UNKNOWN_P(X) (GET_CODE (X) == CLOBBER && XEXP ((X), 0) == const0_rtx) |
#define | NOTE_DURING_CALL_P(RTX) (RTL_FLAG_CHECK1 ("NOTE_VAR_LOCATION_DURING_CALL_P", (RTX), NOTE)->call) |
#define | DEBUG_EXPR_TREE_DECL(RTX) XCTREE (RTX, 0, DEBUG_EXPR) |
#define | DEBUG_IMPLICIT_PTR_DECL(RTX) XCTREE (RTX, 0, DEBUG_IMPLICIT_PTR) |
#define | DEBUG_PARAMETER_REF_DECL(RTX) XCTREE (RTX, 0, DEBUG_PARAMETER_REF) |
#define | DEF_INSN_NOTE(NAME) NAME, |
#define | GET_NOTE_INSN_NAME(NOTE_CODE) (note_insn_name[(NOTE_CODE)]) |
#define | LABEL_NAME(RTX) XCSTR (RTX, 7, CODE_LABEL) |
#define | LABEL_NUSES(RTX) XCINT (RTX, 5, CODE_LABEL) |
#define | LABEL_KIND(LABEL) ((enum label_kind) (((LABEL)->jump << 1) | (LABEL)->call)) |
#define | SET_LABEL_KIND(LABEL, KIND) |
#define | LABEL_ALT_ENTRY_P(LABEL) (LABEL_KIND (LABEL) != LABEL_NORMAL) |
#define | JUMP_LABEL(INSN) XCEXP (INSN, 8, JUMP_INSN) |
#define | LABEL_REFS(LABEL) XCEXP (LABEL, 4, CODE_LABEL) |
#define | REGNO(RTX) (rhs_regno(RTX)) |
#define | SET_REGNO(RTX, N) (df_ref_change_reg_with_loc (REGNO (RTX), N, RTX), XCUINT (RTX, 0, REG) = N) |
#define | SET_REGNO_RAW(RTX, N) (XCUINT (RTX, 0, REG) = N) |
#define | ORIGINAL_REGNO(RTX) X0UINT (RTX, 1) |
#define | REG_FUNCTION_VALUE_P(RTX) (RTL_FLAG_CHECK2 ("REG_FUNCTION_VALUE_P", (RTX), REG, PARALLEL)->return_val) |
#define | REG_USERVAR_P(RTX) (RTL_FLAG_CHECK1 ("REG_USERVAR_P", (RTX), REG)->volatil) |
#define | REG_POINTER(RTX) (RTL_FLAG_CHECK1 ("REG_POINTER", (RTX), REG)->frame_related) |
#define | MEM_POINTER(RTX) (RTL_FLAG_CHECK1 ("MEM_POINTER", (RTX), MEM)->frame_related) |
#define | HARD_REGISTER_P(REG) (HARD_REGISTER_NUM_P (REGNO (REG))) |
#define | HARD_REGISTER_NUM_P(REG_NO) ((REG_NO) < FIRST_PSEUDO_REGISTER) |
#define | INTVAL(RTX) XCWINT (RTX, 0, CONST_INT) |
#define | UINTVAL(RTX) ((unsigned HOST_WIDE_INT) INTVAL (RTX)) |
#define | CONST_DOUBLE_LOW(r) XCMWINT (r, 0, CONST_DOUBLE, VOIDmode) |
#define | CONST_DOUBLE_HIGH(r) XCMWINT (r, 1, CONST_DOUBLE, VOIDmode) |
#define | CONST_DOUBLE_REAL_VALUE(r) ((const struct real_value *) XCNMPRV (r, CONST_DOUBLE, VOIDmode)) |
#define | CONST_FIXED_VALUE(r) ((const struct fixed_value *) XCNMPFV (r, CONST_FIXED, VOIDmode)) |
#define | CONST_FIXED_VALUE_HIGH(r) ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.high)) |
#define | CONST_FIXED_VALUE_LOW(r) ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.low)) |
#define | CONST_VECTOR_ELT(RTX, N) XCVECEXP (RTX, 0, N, CONST_VECTOR) |
#define | CONST_VECTOR_NUNITS(RTX) XCVECLEN (RTX, 0, CONST_VECTOR) |
#define | SUBREG_REG(RTX) XCEXP (RTX, 0, SUBREG) |
#define | SUBREG_BYTE(RTX) XCUINT (RTX, 1, SUBREG) |
#define | COSTS_N_INSNS(N) ((N) * 4) |
#define | MAX_COST INT_MAX |
#define | SUBREG_PROMOTED_VAR_P(RTX) (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED", (RTX), SUBREG)->in_struct) |
#define | SUBREG_PROMOTED_UNSIGNED_SET(RTX, VAL) |
#define | SUBREG_PROMOTED_UNSIGNED_P(RTX) |
#define | LRA_SUBREG_P(RTX) (RTL_FLAG_CHECK1 ("LRA_SUBREG_P", (RTX), SUBREG)->jump) |
#define | ASM_OPERANDS_TEMPLATE(RTX) XCSTR (RTX, 0, ASM_OPERANDS) |
#define | ASM_OPERANDS_OUTPUT_CONSTRAINT(RTX) XCSTR (RTX, 1, ASM_OPERANDS) |
#define | ASM_OPERANDS_OUTPUT_IDX(RTX) XCINT (RTX, 2, ASM_OPERANDS) |
#define | ASM_OPERANDS_INPUT_VEC(RTX) XCVEC (RTX, 3, ASM_OPERANDS) |
#define | ASM_OPERANDS_INPUT_CONSTRAINT_VEC(RTX) XCVEC (RTX, 4, ASM_OPERANDS) |
#define | ASM_OPERANDS_INPUT(RTX, N) XCVECEXP (RTX, 3, N, ASM_OPERANDS) |
#define | ASM_OPERANDS_INPUT_LENGTH(RTX) XCVECLEN (RTX, 3, ASM_OPERANDS) |
#define | ASM_OPERANDS_INPUT_CONSTRAINT_EXP(RTX, N) XCVECEXP (RTX, 4, N, ASM_OPERANDS) |
#define | ASM_OPERANDS_INPUT_CONSTRAINT(RTX, N) XSTR (XCVECEXP (RTX, 4, N, ASM_OPERANDS), 0) |
#define | ASM_OPERANDS_INPUT_MODE(RTX, N) GET_MODE (XCVECEXP (RTX, 4, N, ASM_OPERANDS)) |
#define | ASM_OPERANDS_LABEL_VEC(RTX) XCVEC (RTX, 5, ASM_OPERANDS) |
#define | ASM_OPERANDS_LABEL_LENGTH(RTX) XCVECLEN (RTX, 5, ASM_OPERANDS) |
#define | ASM_OPERANDS_LABEL(RTX, N) XCVECEXP (RTX, 5, N, ASM_OPERANDS) |
#define | ASM_OPERANDS_SOURCE_LOCATION(RTX) XCUINT (RTX, 6, ASM_OPERANDS) |
#define | ASM_INPUT_SOURCE_LOCATION(RTX) XCUINT (RTX, 1, ASM_INPUT) |
#define | MEM_READONLY_P(RTX) (RTL_FLAG_CHECK1 ("MEM_READONLY_P", (RTX), MEM)->unchanging) |
#define | MEM_KEEP_ALIAS_SET_P(RTX) (RTL_FLAG_CHECK1 ("MEM_KEEP_ALIAS_SET_P", (RTX), MEM)->jump) |
#define | MEM_VOLATILE_P(RTX) |
#define | MEM_NOTRAP_P(RTX) (RTL_FLAG_CHECK1 ("MEM_NOTRAP_P", (RTX), MEM)->call) |
#define | MEM_ATTRS(RTX) X0MEMATTR (RTX, 1) |
#define | REG_ATTRS(RTX) X0REGATTR (RTX, 2) |
#define | MEM_ALIAS_SET(RTX) (get_mem_attrs (RTX)->alias) |
#define | MEM_EXPR(RTX) (get_mem_attrs (RTX)->expr) |
#define | MEM_OFFSET_KNOWN_P(RTX) (get_mem_attrs (RTX)->offset_known_p) |
#define | MEM_OFFSET(RTX) (get_mem_attrs (RTX)->offset) |
#define | MEM_ADDR_SPACE(RTX) (get_mem_attrs (RTX)->addrspace) |
#define | MEM_SIZE_KNOWN_P(RTX) (get_mem_attrs (RTX)->size_known_p) |
#define | MEM_SIZE(RTX) (get_mem_attrs (RTX)->size) |
#define | MEM_ALIGN(RTX) (get_mem_attrs (RTX)->align) |
#define | REG_EXPR(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->decl) |
#define | REG_OFFSET(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->offset) |
#define | MEM_COPY_ATTRIBUTES(LHS, RHS) |
#define | LABEL_REF_NONLOCAL_P(RTX) (RTL_FLAG_CHECK1 ("LABEL_REF_NONLOCAL_P", (RTX), LABEL_REF)->volatil) |
#define | LABEL_PRESERVE_P(RTX) (RTL_FLAG_CHECK2 ("LABEL_PRESERVE_P", (RTX), CODE_LABEL, NOTE)->in_struct) |
#define | SCHED_GROUP_P(RTX) |
#define | SET_DEST(RTX) XC2EXP (RTX, 0, SET, CLOBBER) |
#define | SET_SRC(RTX) XCEXP (RTX, 1, SET) |
#define | SET_IS_RETURN_P(RTX) (RTL_FLAG_CHECK1 ("SET_IS_RETURN_P", (RTX), SET)->jump) |
#define | TRAP_CONDITION(RTX) XCEXP (RTX, 0, TRAP_IF) |
#define | TRAP_CODE(RTX) XCEXP (RTX, 1, TRAP_IF) |
#define | COND_EXEC_TEST(RTX) XCEXP (RTX, 0, COND_EXEC) |
#define | COND_EXEC_CODE(RTX) XCEXP (RTX, 1, COND_EXEC) |
#define | CONSTANT_POOL_ADDRESS_P(RTX) (RTL_FLAG_CHECK1 ("CONSTANT_POOL_ADDRESS_P", (RTX), SYMBOL_REF)->unchanging) |
#define | TREE_CONSTANT_POOL_ADDRESS_P(RTX) |
#define | SYMBOL_REF_FLAG(RTX) (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAG", (RTX), SYMBOL_REF)->volatil) |
#define | SYMBOL_REF_USED(RTX) (RTL_FLAG_CHECK1 ("SYMBOL_REF_USED", (RTX), SYMBOL_REF)->used) |
#define | SYMBOL_REF_WEAK(RTX) (RTL_FLAG_CHECK1 ("SYMBOL_REF_WEAK", (RTX), SYMBOL_REF)->return_val) |
#define | SYMBOL_REF_DATA(RTX) X0ANY ((RTX), 2) |
#define | SET_SYMBOL_REF_DECL(RTX, DECL) (gcc_assert (!CONSTANT_POOL_ADDRESS_P (RTX)), X0TREE ((RTX), 2) = (DECL)) |
#define | SYMBOL_REF_DECL(RTX) (CONSTANT_POOL_ADDRESS_P (RTX) ? NULL : X0TREE ((RTX), 2)) |
#define | SET_SYMBOL_REF_CONSTANT(RTX, C) (gcc_assert (CONSTANT_POOL_ADDRESS_P (RTX)), X0CONSTANT ((RTX), 2) = (C)) |
#define | SYMBOL_REF_CONSTANT(RTX) (CONSTANT_POOL_ADDRESS_P (RTX) ? X0CONSTANT ((RTX), 2) : NULL) |
#define | SYMBOL_REF_FLAGS(RTX) X0INT ((RTX), 1) |
#define | SYMBOL_FLAG_FUNCTION (1 << 0) |
#define | SYMBOL_REF_FUNCTION_P(RTX) ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_FUNCTION) != 0) |
#define | SYMBOL_FLAG_LOCAL (1 << 1) |
#define | SYMBOL_REF_LOCAL_P(RTX) ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_LOCAL) != 0) |
#define | SYMBOL_FLAG_SMALL (1 << 2) |
#define | SYMBOL_REF_SMALL_P(RTX) ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_SMALL) != 0) |
#define | SYMBOL_FLAG_TLS_SHIFT 3 |
#define | SYMBOL_REF_TLS_MODEL(RTX) ((enum tls_model) ((SYMBOL_REF_FLAGS (RTX) >> SYMBOL_FLAG_TLS_SHIFT) & 7)) |
#define | SYMBOL_FLAG_EXTERNAL (1 << 6) |
#define | SYMBOL_REF_EXTERNAL_P(RTX) ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_EXTERNAL) != 0) |
#define | SYMBOL_FLAG_HAS_BLOCK_INFO (1 << 7) |
#define | SYMBOL_REF_HAS_BLOCK_INFO_P(RTX) ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_HAS_BLOCK_INFO) != 0) |
#define | SYMBOL_FLAG_ANCHOR (1 << 8) |
#define | SYMBOL_REF_ANCHOR_P(RTX) ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_ANCHOR) != 0) |
#define | SYMBOL_FLAG_MACH_DEP_SHIFT 9 |
#define | SYMBOL_FLAG_MACH_DEP (1 << SYMBOL_FLAG_MACH_DEP_SHIFT) |
#define | SYMBOL_REF_BLOCK(RTX) (BLOCK_SYMBOL_CHECK (RTX)->block) |
#define | SYMBOL_REF_BLOCK_OFFSET(RTX) (BLOCK_SYMBOL_CHECK (RTX)->offset) |
#define | PREFETCH_SCHEDULE_BARRIER_P(RTX) (RTL_FLAG_CHECK1 ("PREFETCH_SCHEDULE_BARRIER_P", (RTX), PREFETCH)->volatil) |
#define | FIND_REG_INC_NOTE(INSN, REG) 0 |
#define | HAVE_PRE_INCREMENT 0 |
#define | HAVE_PRE_DECREMENT 0 |
#define | HAVE_POST_INCREMENT 0 |
#define | HAVE_POST_DECREMENT 0 |
#define | HAVE_POST_MODIFY_DISP 0 |
#define | HAVE_POST_MODIFY_REG 0 |
#define | HAVE_PRE_MODIFY_DISP 0 |
#define | HAVE_PRE_MODIFY_REG 0 |
#define | USE_LOAD_POST_INCREMENT(MODE) HAVE_POST_INCREMENT |
#define | USE_LOAD_POST_DECREMENT(MODE) HAVE_POST_DECREMENT |
#define | USE_LOAD_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT |
#define | USE_LOAD_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT |
#define | USE_STORE_POST_INCREMENT(MODE) HAVE_POST_INCREMENT |
#define | USE_STORE_POST_DECREMENT(MODE) HAVE_POST_DECREMENT |
#define | USE_STORE_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT |
#define | USE_STORE_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT |
#define | rtx_alloc(c) rtx_alloc_stat (c MEM_STAT_INFO) |
#define | shallow_copy_rtx(a) shallow_copy_rtx_stat (a MEM_STAT_INFO) |
#define | convert_memory_address(to_mode, x) convert_memory_address_addr_space ((to_mode), (x), ADDR_SPACE_GENERIC) |
#define | ASLK_REDUCE_ALIGN 1 |
#define | ASLK_RECORD_PAD 2 |
#define | single_set(I) |
#define | single_set_1(I) single_set_2 (I, PATTERN (I)) |
#define | MAX_SAVED_CONST_INT 64 |
#define | const0_rtx (const_int_rtx[MAX_SAVED_CONST_INT]) |
#define | const1_rtx (const_int_rtx[MAX_SAVED_CONST_INT+1]) |
#define | const2_rtx (const_int_rtx[MAX_SAVED_CONST_INT+2]) |
#define | constm1_rtx (const_int_rtx[MAX_SAVED_CONST_INT-1]) |
#define | CONST0_RTX(MODE) (const_tiny_rtx[0][(int) (MODE)]) |
#define | CONST1_RTX(MODE) (const_tiny_rtx[1][(int) (MODE)]) |
#define | CONST2_RTX(MODE) (const_tiny_rtx[2][(int) (MODE)]) |
#define | CONSTM1_RTX(MODE) (const_tiny_rtx[3][(int) (MODE)]) |
#define | HARD_FRAME_POINTER_REGNUM FRAME_POINTER_REGNUM |
#define | HARD_FRAME_POINTER_IS_FRAME_POINTER (HARD_FRAME_POINTER_REGNUM == FRAME_POINTER_REGNUM) |
#define | HARD_FRAME_POINTER_IS_ARG_POINTER (HARD_FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM) |
#define | this_target_rtl (&default_target_rtl) |
#define | global_rtl (this_target_rtl->x_global_rtl) |
#define | pic_offset_table_rtx (this_target_rtl->x_pic_offset_table_rtx) |
#define | return_address_pointer_rtx (this_target_rtl->x_return_address_pointer_rtx) |
#define | top_of_stack (this_target_rtl->x_top_of_stack) |
#define | mode_mem_attrs (this_target_rtl->x_mode_mem_attrs) |
#define | stack_pointer_rtx (global_rtl[GR_STACK_POINTER]) |
#define | frame_pointer_rtx (global_rtl[GR_FRAME_POINTER]) |
#define | hard_frame_pointer_rtx (global_rtl[GR_HARD_FRAME_POINTER]) |
#define | arg_pointer_rtx (global_rtl[GR_ARG_POINTER]) |
#define | gen_rtx_ASM_INPUT(MODE, ARG0) gen_rtx_fmt_si (ASM_INPUT, (MODE), (ARG0), 0) |
#define | gen_rtx_ASM_INPUT_loc(MODE, ARG0, LOC) gen_rtx_fmt_si (ASM_INPUT, (MODE), (ARG0), (LOC)) |
#define | GEN_INT(N) gen_rtx_CONST_INT (VOIDmode, (N)) |
#define | FIRST_VIRTUAL_REGISTER (FIRST_PSEUDO_REGISTER) |
#define | virtual_incoming_args_rtx (global_rtl[GR_VIRTUAL_INCOMING_ARGS]) |
#define | VIRTUAL_INCOMING_ARGS_REGNUM (FIRST_VIRTUAL_REGISTER) |
#define | virtual_stack_vars_rtx (global_rtl[GR_VIRTUAL_STACK_ARGS]) |
#define | VIRTUAL_STACK_VARS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 1) |
#define | virtual_stack_dynamic_rtx (global_rtl[GR_VIRTUAL_STACK_DYNAMIC]) |
#define | VIRTUAL_STACK_DYNAMIC_REGNUM ((FIRST_VIRTUAL_REGISTER) + 2) |
#define | virtual_outgoing_args_rtx (global_rtl[GR_VIRTUAL_OUTGOING_ARGS]) |
#define | VIRTUAL_OUTGOING_ARGS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 3) |
#define | virtual_cfa_rtx (global_rtl[GR_VIRTUAL_CFA]) |
#define | VIRTUAL_CFA_REGNUM ((FIRST_VIRTUAL_REGISTER) + 4) |
#define | LAST_VIRTUAL_POINTER_REGISTER ((FIRST_VIRTUAL_REGISTER) + 4) |
#define | virtual_preferred_stack_boundary_rtx (global_rtl[GR_VIRTUAL_PREFERRED_STACK_BOUNDARY]) |
#define | VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM ((FIRST_VIRTUAL_REGISTER) + 5) |
#define | LAST_VIRTUAL_REGISTER ((FIRST_VIRTUAL_REGISTER) + 5) |
#define | REGNO_PTR_FRAME_P(REGNUM) |
#define | INVALID_REGNUM (~(unsigned int) 0) |
#define | IGNORED_DWARF_REGNUM (INVALID_REGNUM - 1) |
#define | can_create_pseudo_p() (!reload_in_progress && !reload_completed) |
#define | gen_lowpart rtl_hooks.gen_lowpart |
#define | fatal_insn(msgid, insn) _fatal_insn (msgid, insn, __FILE__, __LINE__, __FUNCTION__) |
#define | fatal_insn_not_found(insn) _fatal_insn_not_found (insn, __FILE__, __LINE__, __FUNCTION__) |
Typedefs | |
typedef struct mem_attrs | mem_attrs |
typedef struct reg_attrs | reg_attrs |
typedef union rtunion_def | rtunion |
typedef struct replace_label_data | replace_label_data |
typedef int(* | rtx_function )(rtx *, void *) |
typedef int(* | for_each_inc_dec_fn )(rtx mem, rtx op, rtx dest, rtx src, rtx srcoff, void *arg) |
typedef int(* | rtx_equal_p_callback_function )(const_rtx *, const_rtx *, rtx *, rtx *) |
typedef int(* | hash_rtx_callback_function )(const_rtx, enum machine_mode, rtx *, enum machine_mode *) |
Enumerations | |
enum | rtx_code { DEF_RTL_EXPR, DEF_RTL_EXPR } |
enum | rtx_class { RTX_COMPARE, RTX_COMM_COMPARE, RTX_BIN_ARITH, RTX_COMM_ARITH, RTX_UNARY, RTX_EXTRA, RTX_MATCH, RTX_INSN, RTX_OBJ, RTX_CONST_OBJ, RTX_TERNARY, RTX_BITFIELD_OPS, RTX_AUTOINC } |
enum | reg_note { REG_NOTE, REG_NOTE } |
enum | insn_note { INSN_NOTE, INSN_NOTE } |
enum | label_kind { LABEL_NORMAL = 0, LABEL_STATIC_ENTRY, LABEL_GLOBAL_ENTRY, LABEL_WEAK_ENTRY } |
enum | global_rtl_index { GR_STACK_POINTER, GR_FRAME_POINTER, GR_ARG_POINTER = GR_FRAME_POINTER, GR_HARD_FRAME_POINTER = GR_FRAME_POINTER, GR_VIRTUAL_INCOMING_ARGS, GR_VIRTUAL_STACK_ARGS, GR_VIRTUAL_STACK_DYNAMIC, GR_VIRTUAL_OUTGOING_ARGS, GR_VIRTUAL_CFA, GR_VIRTUAL_PREFERRED_STACK_BOUNDARY, GR_MAX } |
enum | libcall_type { LCT_NORMAL = 0, LCT_CONST = 1, LCT_PURE = 2, LCT_NORETURN = 3, LCT_THROW = 4, LCT_RETURNS_TWICE = 5 } |
Variables | |
const unsigned char | rtx_length [NUM_RTX_CODE] |
const char *const | rtx_name [NUM_RTX_CODE] |
const char *const | rtx_format [NUM_RTX_CODE] |
enum rtx_class | rtx_class [NUM_RTX_CODE] |
const unsigned char | rtx_code_size [NUM_RTX_CODE] |
const unsigned char | rtx_next [NUM_RTX_CODE] |
const char *const | reg_note_name [] |
const char *const | note_insn_name [NOTE_INSN_MAX] |
int | generating_concat_p |
int | currently_expanding_to_rtl |
location_t | prologue_location |
location_t | epilogue_location |
int | split_branch_probability |
rtx | const_int_rtx [MAX_SAVED_CONST_INT *2+1] |
rtx | const_true_rtx |
rtx | const_tiny_rtx [4][(int) MAX_MACHINE_MODE] |
rtx | pc_rtx |
rtx | cc0_rtx |
rtx | ret_rtx |
rtx | simple_return_rtx |
struct target_rtl | default_target_rtl |
int | reload_completed |
int | epilogue_completed |
int | reload_in_progress |
int | lra_in_progress |
int | cse_not_expected |
const char * | print_rtx_head |
rtx | stack_limit_rtx |
struct rtl_hooks | rtl_hooks |
struct rtl_hooks | general_rtl_hooks |
#define ADDR_DIFF_VEC_FLAGS | ( | RTX | ) | X0ADVFLAGS (RTX, 4) |
In an ADDR_DIFF_VEC, the flags for RTX for use by branch shortening. See the comments for ADDR_DIFF_VEC in rtl.def.
Predicate yielding nonzero iff X is a return or simple_return.
Referenced by replace_rtx(), and return_insn_p().
#define arg_pointer_rtx (global_rtl[GR_ARG_POINTER]) |
#define ARITHMETIC_P | ( | X | ) |
1 if X is an arithmetic operator.
#define ASLK_RECORD_PAD 2 |
#define ASLK_REDUCE_ALIGN 1 |
#define ASM_INPUT_SOURCE_LOCATION | ( | RTX | ) | XCUINT (RTX, 1, ASM_INPUT) |
Referenced by find_comparison_args().
#define ASM_OPERANDS_INPUT_CONSTRAINT_VEC | ( | RTX | ) | XCVEC (RTX, 4, ASM_OPERANDS) |
#define ASM_OPERANDS_INPUT_LENGTH | ( | RTX | ) | XCVECLEN (RTX, 3, ASM_OPERANDS) |
Referenced by find_comparison_args().
#define ASM_OPERANDS_INPUT_VEC | ( | RTX | ) | XCVEC (RTX, 3, ASM_OPERANDS) |
Referenced by ordered_comparison_operator().
Referenced by emit_barrier_after_bb().
#define ASM_OPERANDS_LABEL_LENGTH | ( | RTX | ) | XCVECLEN (RTX, 5, ASM_OPERANDS) |
#define ASM_OPERANDS_LABEL_VEC | ( | RTX | ) | XCVEC (RTX, 5, ASM_OPERANDS) |
#define ASM_OPERANDS_OUTPUT_CONSTRAINT | ( | RTX | ) | XCSTR (RTX, 1, ASM_OPERANDS) |
Referenced by extract_asm_operands().
#define ASM_OPERANDS_OUTPUT_IDX | ( | RTX | ) | XCINT (RTX, 2, ASM_OPERANDS) |
#define ASM_OPERANDS_SOURCE_LOCATION | ( | RTX | ) | XCUINT (RTX, 6, ASM_OPERANDS) |
Referenced by diagnostic_for_asm().
#define ASM_OPERANDS_TEMPLATE | ( | RTX | ) | XCSTR (RTX, 0, ASM_OPERANDS) |
Access various components of an ASM_OPERANDS rtx.
#define BARRIER_P | ( | X | ) | (GET_CODE (X) == BARRIER) |
Predicate yielding nonzero iff X is a barrier insn.
Referenced by alter_reg(), block_label(), create_cfi_notes(), make_pass_free_cfg(), merge_blocks_move_predecessor_nojumps(), rebuild_jump_labels_chain(), and rtl_verify_bb_pointers().
#define BINARY_P | ( | X | ) | ((GET_RTX_CLASS (GET_CODE (X)) & RTX_BINARY_MASK) == RTX_BINARY_RESULT) |
1 if X is a binary operator.
#define BLOCK_FOR_INSN | ( | INSN | ) | XBBDEF (INSN, 3) |
Referenced by alloc_cprop_mem(), check_dependency(), delete_insn(), df_dump_insn_bottom(), emit_call_insn_after_setloc(), find_moveable_store(), free_gcse_mem(), init_resource_info(), inner_loop_header_p(), insert_insn_end_basic_block(), insert_store(), ira_reassign_pseudos(), mark_label_nuses(), may_assign_reg_p(), move2add_use_add3_insn(), move2add_valid_value_p(), one_pre_gcse_pass(), pre_edge_insert(), process_reg_shuffles(), reload_combine_closest_single_use(), remove_predictions_associated_with_edge(), rtl_delete_block(), saved_hard_reg_compare_func(), store_killed_before(), and try_replace_reg().
#define BLOCK_SYMBOL_CHECK | ( | RTX | ) | (&(RTX)->u.block_sym) |
#define CALL_INSN_FUNCTION_USAGE | ( | INSN | ) | XEXP(INSN, 8) |
This field is only present on CALL_INSNs. It holds a chain of EXPR_LIST of USE and CLOBBER expressions. USE expressions list the registers filled with arguments that are passed to the function. CLOBBER expressions document the registers explicitly clobbered by this CALL_INSN. Pseudo registers can not be mentioned in this list.
Referenced by check_argument_store(), find_reg_note(), gen_const_vector(), make_note_raw(), merge_dir(), remove_pseudos(), and reverse_op().
#define CALL_P | ( | X | ) | (GET_CODE (X) == CALL_INSN) |
Predicate yielding nonzero iff X is a call insn.
Referenced by can_throw_external(), cheap_bb_rtx_cost_p(), check_argument_store(), default_fixed_point_supported_p(), df_get_exit_block_use_set(), do_warn_unused_parameter(), emit_move_insn(), expand_builtin_longjmp(), expand_copysign_bit(), find_reg_note(), force_move_args_size_note(), fprint_whex(), get_eh_region_from_rtx(), get_last_insertion_point(), get_last_value_validate(), make_note_raw(), merge_identical_invariants(), next_active_insn(), process_bb_node_lives(), record_entry_value(), remove_pseudos(), remove_unreachable_eh_regions(), saved_hard_reg_compare_func(), sjlj_assign_call_site_values(), and store_killed_in_pat().
#define can_create_pseudo_p | ( | ) | (!reload_in_progress && !reload_completed) |
This macro indicates whether you may create a new pseudo-register.
#define CASE_CONST_ANY |
Match all CONST_* rtxes.
Referenced by equiv_init_varies_p(), find_comparison_args(), gen_clobber(), get_elimination(), invariant_for_use(), remove_note(), rtx_unstable_p(), rtx_varies_p(), set_label_offsets(), shared_const_p(), target_canonicalize_comparison(), and volatile_refs_p().
#define CASE_CONST_SCALAR_INT |
Match CONST_*s that can represent compile-time constant integers.
Referenced by convert_memory_address_addr_space().
#define CASE_CONST_UNIQUE |
Match CONST_*s for which pointer equality corresponds to value equality.
Referenced by hard_reg_set_here_p(), invert_jump(), and rtx_equal_p_cb().
#define CODE_LABEL_NUMBER | ( | INSN | ) | XINT (INSN, 6) |
The label-number of a code-label. The assembler label is made from `L' and the label-number printed in decimal. Label numbers are unique in a compilation.
Referenced by alter_reg(), default_jump_align_max_skip(), default_loop_align_max_skip(), and premark_types_used_by_global_vars_helper().
#define COMMUTATIVE_ARITH_P | ( | X | ) | (GET_RTX_CLASS (GET_CODE (X)) == RTX_COMM_ARITH) |
1 if X is an arithmetic operator.
#define COMMUTATIVE_P | ( | X | ) |
1 if X is a commutative operator on integers.
Referenced by default_hidden_stack_protect_fail(), and validate_change().
#define COMPARISON_P | ( | X | ) | ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMPARE_MASK) == RTX_COMPARE_RESULT) |
1 if X is a relational operator.
Referenced by canon_condition(), and simplify_set().
#define COND_EXEC_CODE | ( | RTX | ) | XCEXP (RTX, 1, COND_EXEC) |
Referenced by adjust_mem_uses(), multiple_sets(), and reg_overlap_mentioned_p().
#define COND_EXEC_TEST | ( | RTX | ) | XCEXP (RTX, 0, COND_EXEC) |
For a COND_EXEC rtx, COND_EXEC_TEST is the condition to base conditionally executing the code on, COND_EXEC_CODE is the code to execute if the condition is true.
#define const0_rtx (const_int_rtx[MAX_SAVED_CONST_INT]) |
Referenced by analyze_insn_to_expand_var(), can_store_by_pieces(), canon_condition(), cfg_layout_can_merge_blocks_p(), do_compare_rtx_and_jump(), do_jump_by_parts_zero_rtx(), emit_cmp_and_jump_insn_1(), end_ifcvt_sequence(), expand_atomic_compare_and_swap(), expand_builtin_longjmp(), expand_builtin_mempcpy_args(), expand_builtin_memset_args(), expand_builtin_strcpy_args(), expand_builtin_strncmp(), expand_call(), expand_copysign(), expand_gimple_stmt(), expand_mem_signal_fence(), expand_mult_highpart_adjust(), expand_sync_lock_test_and_set(), final_start_function(), find_shift_sequence(), get_frame_arg(), get_memmodel(), have_sub2_insn(), highest_pow2_factor_for_target(), maybe_emit_unop_insn(), move2add_use_add3_insn(), move2add_valid_value_p(), noce_try_addcc(), operands_match_p(), promote_decl_mode(), reg_num_sign_bit_copies_for_combine(), remove_eh_handler(), set_label_offsets(), simplify_relational_operation_1(), simplify_using_condition(), simplify_while_replacing(), and subst().
#define CONST0_RTX | ( | MODE | ) | (const_tiny_rtx[0][(int) (MODE)]) |
Returns a constant 0 rtx in mode MODE. Integer modes are treated the same as VOIDmode.
Referenced by clear_by_pieces(), count_type_elements(), do_compare_rtx_and_jump(), expand_doubleword_shift(), expand_vector_broadcast(), fold_rtx(), highest_pow2_factor_for_target(), simplify_byte_swapping_operation(), and split_iv().
#define const1_rtx (const_int_rtx[MAX_SAVED_CONST_INT+1]) |
#define CONST1_RTX | ( | MODE | ) | (const_tiny_rtx[1][(int) (MODE)]) |
Likewise, for the constants 1 and 2 and -1.
Referenced by split_iv().
#define const2_rtx (const_int_rtx[MAX_SAVED_CONST_INT+2]) |
#define CONST2_RTX | ( | MODE | ) | (const_tiny_rtx[2][(int) (MODE)]) |
Predicate yielding true iff X is an rtx for a double-int.
Referenced by const_desc_rtx_eq(), decode_asm_operands(), process_alt_operands(), and simplify_relational_operation_1().
Predicate yielding true iff X is an rtx for a double-int.
Referenced by simplify_relational_operation_1().
#define CONST_DOUBLE_HIGH | ( | r | ) | XCMWINT (r, 1, CONST_DOUBLE, VOIDmode) |
Referenced by print_value(), and simplify_relational_operation_1().
#define CONST_DOUBLE_LOW | ( | r | ) | XCMWINT (r, 0, CONST_DOUBLE, VOIDmode) |
For a CONST_DOUBLE: For a VOIDmode, there are two integers CONST_DOUBLE_LOW is the low-order word and ..._HIGH the high-order. For a float, there is a REAL_VALUE_TYPE structure, and CONST_DOUBLE_REAL_VALUE(r) is a pointer to it.
Referenced by print_value(), and simplify_relational_operation_1().
#define CONST_DOUBLE_P | ( | X | ) | (GET_CODE (X) == CONST_DOUBLE) |
Predicate yielding true iff X is an rtx for a double-int or floating point constant.
#define CONST_DOUBLE_REAL_VALUE | ( | r | ) | ((const struct real_value *) XCNMPRV (r, CONST_DOUBLE, VOIDmode)) |
Referenced by print_value().
#define CONST_FIXED_P | ( | X | ) | (GET_CODE (X) == CONST_FIXED) |
Predicate yielding nonzero iff X is an rtx for a constant fixed-point.
#define CONST_FIXED_VALUE | ( | r | ) | ((const struct fixed_value *) XCNMPFV (r, CONST_FIXED, VOIDmode)) |
Referenced by const_double_htab_eq(), and print_value().
#define CONST_FIXED_VALUE_HIGH | ( | r | ) | ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.high)) |
#define CONST_FIXED_VALUE_LOW | ( | r | ) | ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.low)) |
#define CONST_INT_P | ( | X | ) | (GET_CODE (X) == CONST_INT) |
Predicate yielding nonzero iff X is an rtx for a constant integer.
Referenced by compress_float_constant(), convert_memory_address_addr_space(), convert_modes(), end_ifcvt_sequence(), expand_mult(), find_call_stack_args(), find_shift_sequence(), find_single_use(), get_call_rtx_from(), make_extraction(), make_memloc(), move2add_use_add3_insn(), move_by_pieces_1(), offset_within_block_p(), operands_match_p(), output_asm_operand_names(), process_alt_operands(), promote_decl_mode(), register_operand(), remove_reg_equal_offset_note(), rtx_equal_for_memref_p(), set_label_offsets(), setup_elimination_map(), simplify_relational_operation_1(), simplify_set(), subst(), swap_commutative_operands_with_target(), and validate_simplify_insn().
#define CONST_SCALAR_INT_P | ( | X | ) | (CONST_INT_P (X) || CONST_DOUBLE_AS_INT_P (X)) |
Predicate yielding true iff X is an rtx for a integer const.
Referenced by process_alt_operands().
For a CONST_VECTOR, return element #n.
#define CONST_VECTOR_NUNITS | ( | RTX | ) | XCVECLEN (RTX, 0, CONST_VECTOR) |
For a CONST_VECTOR, return the number of elements in a vector.
#define CONSTANT_P | ( | X | ) | (GET_RTX_CLASS (GET_CODE (X)) == RTX_CONST_OBJ) |
1 if X is a constant value that is an integer.
Referenced by adjust_for_new_dest(), canon_condition(), check_cond_move_block(), copy_rtx_if_shared_1(), do_output_reload(), emit_group_load_1(), emit_move_change_mode(), equiv_constant(), expand_debug_parm_decl(), find_comparison_args(), find_shift_sequence(), init_num_sign_bit_copies_in_rep(), no_conflict_move_test(), noce_emit_store_flag(), ok_for_base_p_nonstrict(), operands_match_p(), pmode_register_operand(), process_alt_operands(), register_operand(), reload_combine_closest_single_use(), rtx_equal_for_memref_p(), scratch_operand(), set_label_offsets(), set_usage_bits(), setup_elimination_map(), subst_reloads(), unchain_one_elt_loc_list(), validate_simplify_insn(), and vt_stack_adjustments().
#define CONSTANT_POOL_ADDRESS_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("CONSTANT_POOL_ADDRESS_P", (RTX), SYMBOL_REF)->unchanging) |
1 if RTX is a symbol_ref that addresses this function's rtl constants pool.
Referenced by default_binds_local_p(), get_integer_term(), and init_varasm_status().
#define constm1_rtx (const_int_rtx[MAX_SAVED_CONST_INT-1]) |
#define CONSTM1_RTX | ( | MODE | ) | (const_tiny_rtx[3][(int) (MODE)]) |
#define convert_memory_address | ( | to_mode, | |
x | |||
) | convert_memory_address_addr_space ((to_mode), (x), ADDR_SPACE_GENERIC) |
Referenced by emit_jump(), expand_null_return_1(), and rtx_equal_for_memref_p().
in rtlanal.c Return the right cost to give to an operation to make the cost of the corresponding register-to-register instruction N times that of a fast register-to-register instruction.
Referenced by no_conflict_move_test().
#define CSELIB_VAL_PTR | ( | RTX | ) | X0CSELIB (RTX, 0) |
In a VALUE, the value cselib has assigned to RTX. This is a "struct cselib_val_struct", see cselib.h.
Referenced by add_mem_for_addr(), attrs_list_mpdv_union(), drop_overlapping_mem_locs(), refs_newer_value_cb(), and rtx_equal_for_memref_p().
#define DEBUG_EXPR_TREE_DECL | ( | RTX | ) | XCTREE (RTX, 0, DEBUG_EXPR) |
DEBUG_EXPR_DECL corresponding to a DEBUG_EXPR RTX.
Referenced by print_value().
#define DEBUG_IMPLICIT_PTR_DECL | ( | RTX | ) | XCTREE (RTX, 0, DEBUG_IMPLICIT_PTR) |
VAR_DECL/PARM_DECL DEBUG_IMPLICIT_PTR takes address of.
Referenced by rtx_equal_p_cb().
#define DEBUG_INSN_P | ( | X | ) | (GET_CODE (X) == DEBUG_INSN) |
Predicate yielding nonzero iff X is a debug note/insn.
Referenced by bb_note(), df_word_lr_simulate_uses(), gate_ud_dce(), get_last_insn_anywhere(), insert_var_expansion_initialization(), note_reg_elim_costly(), note_sets_clobbers(), reload_combine_purge_reg_uses_after_ruid(), replace_equiv_address_nv(), set_label_offsets(), set_used_flags(), subst_reloads(), and try_crossjump_to_edge().
#define DEBUG_PARAMETER_REF_DECL | ( | RTX | ) | XCTREE (RTX, 0, DEBUG_PARAMETER_REF) |
PARM_DECL DEBUG_PARAMETER_REF references.
Referenced by convert_descriptor_to_mode(), gen_formal_parameter_die(), and rtx_equal_p_cb().
#define DEF_RTL_EXPR | ( | ENUM, | |
NAME, | |||
FORMAT, | |||
CLASS | |||
) | ENUM , |
#define ENTRY_VALUE_EXP | ( | RTX | ) | (RTL_CHECKC1 (RTX, 0, ENTRY_VALUE).rt_rtx) |
In an ENTRY_VALUE this is the DECL_INCOMING_RTL of the argument in question.
Referenced by rtx_equal_p_cb().
#define fatal_insn | ( | msgid, | |
insn | |||
) | _fatal_insn (msgid, insn, __FILE__, __LINE__, __FUNCTION__) |
Referenced by rtl_verify_edges(), and rtl_verify_flow_info_1().
#define fatal_insn_not_found | ( | insn | ) | _fatal_insn_not_found (insn, __FILE__, __LINE__, __FUNCTION__) |
#define FIND_REG_INC_NOTE | ( | INSN, | |
REG | |||
) | 0 |
Indicate whether the machine has any sort of auto increment addressing. If not, we can avoid checking for REG_INC notes. Define a macro to look for REG_INC notes, but save time on machines where they never exist.
#define FIRST_VIRTUAL_REGISTER (FIRST_PSEUDO_REGISTER) |
Virtual registers are used during RTL generation to refer to locations into the stack frame when the actual location isn't known until RTL generation is complete. The routine instantiate_virtual_regs replaces these with the proper value, which is normally {frame,arg,stack}_pointer_rtx plus a constant.
#define frame_pointer_rtx (global_rtl[GR_FRAME_POINTER]) |
#define GEN_INT | ( | N | ) | gen_rtx_CONST_INT (VOIDmode, (N)) |
Referenced by assemble_static_space(), count_type_elements(), create_fixed_operand(), decide_peel_simple(), dw2_asm_output_data_sleb128(), emit_block_move_libcall_fn(), emit_block_move_via_loop(), emit_cstore(), emit_move_change_mode(), expand_builtin_memset_args(), expand_mult_highpart_adjust(), expand_vector_broadcast(), extract_low_bits(), fixup_args_size_notes(), maybe_emit_sync_lock_test_and_set(), output_constant(), output_constructor_regular_field(), remove_eh_handler(), remove_unreachable_eh_regions_worker(), simplify_and_const_int_1(), swap_commutative_operands_with_target(), try_widen_shift_mode(), and undo_commit().
#define gen_lowpart rtl_hooks.gen_lowpart |
Keep this for the nonce.
Referenced by convert_modes(), expand_binop(), lookup_as_function(), make_compound_operation(), make_extraction(), simplify_set(), and undo_commit().
#define gen_rtx_ASM_INPUT | ( | MODE, | |
ARG0 | |||
) | gen_rtx_fmt_si (ASM_INPUT, (MODE), (ARG0), 0) |
Include the RTL generation functions.
Referenced by get_reg_attrs().
Referenced by n_occurrences().
#define gen_rtx_UNKNOWN_VAR_LOC | ( | ) | (gen_rtx_CLOBBER (VOIDmode, const0_rtx)) |
Expand to the RTL that denotes an unknown variable location in a DEBUG_INSN.
#define GET_CODE | ( | RTX | ) | ((enum rtx_code) (RTX)->code) |
Define macros to access the `code' field of the rtx.
Referenced by add_attr_value(), add_define_attr(), add_equal_note(), add_mem_for_addr(), add_mode_tests(), add_name_attribute(), addr_expr_of_non_mem_decl_p(), address_of_int_loc_descriptor(), address_operand(), adjust_mem_uses(), adjust_operands_numbers(), alter_reg(), apply_code_iterator(), asm_noperands(), assemble_name_raw(), assign_parm_setup_reg(), assign_stack_slot_num_and_sort_pseudos(), attr_alt_subset_of_compl_p(), attr_hash_add_string(), can_reload_into(), canon_condition(), canonicalize_change_group(), canonicalize_values_star(), change_cfi_row(), change_subst_attribute(), check_argument_store(), check_defs(), clobber_return_register(), collect_one_action_chain(), compress_float_constant(), compute_alternative_mask(), compute_const_anchors(), compute_local_properties(), cond_exec_find_if_block(), connect_traces(), contains_symbol_ref(), convert_memory_address_addr_space(), copy_rtx_if_shared_1(), count_alternatives(), count_reg_usage(), cse_prescan_path(), dead_debug_global_insert(), dead_or_set_regno_p(), decl_for_component_ref(), decls_for_scope(), decode_asm_operands(), decompose_register(), default_section_type_flags(), delete_slot_part(), df_bb_regno_first_def_find(), df_read_modify_subreg_p(), df_simulate_defs(), df_simulate_uses(), diagnostic_for_asm(), do_output_reload(), drop_overlapping_mem_locs(), dump_rtx_statistics(), dwarf2out_flush_queued_reg_saves(), dwarf2out_frame_debug_cfa_window_save(), emit_clobber(), emit_debug_insn(), emit_debug_insn_after_noloc(), emit_insn_at_entry(), emit_move_change_mode(), emit_move_insn(), emit_note_before(), emit_notes_for_differences_2(), emit_pattern_after(), emit_push_insn(), equiv_init_varies_p(), expand_copysign_bit(), extract_asm_operands(), find_call_stack_args(), find_comparison_args(), find_invariants_to_move(), find_loads(), find_reg_equal_equiv_note(), find_reg_note(), find_single_use(), for_each_rtx(), fprint_whex(), free_loop_data(), gcse_emit_move_after(), gen_attr(), gen_formal_parameter_die(), gen_insn(), gen_label_rtx(), gen_mnemonic_setattr(), gen_reg_rtx_offset(), gen_satfractuns_conv_libfunc(), get_attr_order(), get_biv_step_1(), get_call_rtx_from(), get_elimination(), get_final_hard_regno(), get_integer_term(), hash_scan_set(), init_num_sign_bit_copies_in_rep(), init_varasm_status(), initial_value_entry(), initialize_argument_information(), insert_insn_end_basic_block(), insert_var_expansion_initialization(), invert_exp_1(), iv_analysis_done(), kill_set_value(), lra_set_insn_deleted(), main(), make_extraction(), make_memloc(), mark_insn(), mark_pseudo_reg_dead(), maybe_memory_address_addr_space_p(), maybe_propagate_label_ref(), memory_operand(), merge_dir(), move2add_use_add3_insn(), multiple_sets(), new_decision(), noce_emit_store_flag(), note_outside_basic_block_p(), note_reg_elim_costly(), note_stores(), note_uses(), notice_source_line(), notice_stack_pointer_modification_1(), num_validated_changes(), offset_within_block_p(), ok_for_base_p_nonstrict(), one_code_hoisting_pass(), operands_match_p(), ordered_comparison_operator(), output_added_clobbers_hard_reg_p(), output_asm_insn(), output_get_insn_name(), preserve_value(), prev_nonnote_insn_bb(), previous_insn(), print_value(), process_alt_operands(), process_bb_node_lives(), push_insns(), record_component_aliases(), record_hard_reg_sets(), reg_class_from_constraints(), reg_overlap_mentioned_p(), reg_saved_in(), reload_as_needed(), reload_combine_closest_single_use(), remove_invalid_refs(), remove_note(), remove_reg_equal_offset_note(), remove_value_from_changed_variables(), replace_oldest_value_addr(), resolve_operand_name_1(), returnjump_p_1(), reverse_op(), rtl_verify_flow_info_1(), rtx_addr_varies_p(), rtx_equal_for_memref_p(), safe_insn_predicate(), save_call_clobbered_regs(), scompare_loc_descriptor(), scratch_operand(), set_dv_changed(), set_label_offsets(), set_nonzero_bits_and_sign_copies(), set_reg_attrs_from_value(), set_usage_bits(), setup_elimination_map(), setup_reg_equiv(), shallow_copy_rtvec(), shared_const_p(), simplejump_p(), simplify_byte_swapping_operation(), simplify_relational_operation_1(), simplify_replace_rtx(), simplify_set(), simplify_truncation(), single_set_2(), spill_hard_reg(), splay_tree_compare_strings(), store_killed_in_pat(), subst(), subst_reloads(), tablejump_p(), target_canonicalize_comparison(), try_back_substitute_reg(), unchain_one_elt_loc_list(), unroll_loop_stupid(), update_cfg_for_uncondjump(), uses_hard_regs_p(), valid_address_p(), validate_simplify_insn(), var_reg_decl_set(), var_reg_set(), var_regno_delete(), volatile_refs_p(), vt_canon_true_dep(), vt_stack_adjustments(), walk_attr_value(), write_const_num_delay_slots(), and write_header().
#define GET_MODE | ( | RTX | ) | ((enum machine_mode) (RTX)->mode) |
Referenced by add_stores(), addr_expr_of_non_mem_decl_p_1(), address_of_int_loc_descriptor(), address_operand(), adjust_offset_for_component_ref(), anti_adjust_stack_and_probe(), asm_noperands(), asm_operand_ok(), assemble_name_raw(), assign_mem_slot(), assign_parm_setup_reg(), assign_temp(), build_def_use(), build_libfunc_function(), calculate_bb_reg_pressure(), can_compare_and_swap_p(), can_reload_into(), canonicalize_values_star(), combine_set_extension(), compress_float_constant(), compute_const_anchors(), cond_exec_find_if_block(), const_double_htab_eq(), convert_memory_address_addr_space(), copy_rtx_if_shared_1(), count_reg_usage(), count_type_elements(), cselib_invalidate_mem(), cselib_invalidate_regno(), cselib_reg_set_mode(), dataflow_set_destroy(), dead_debug_insert_temp(), decl_for_component_ref(), decode_asm_operands(), decompose_register(), delete_caller_save_insns(), distribute_and_simplify_rtx(), do_output_reload(), dump_case_nodes(), dump_insn_info(), dv_changed_p(), dwarf2out_frame_debug_adjust_cfa(), emit_cmp_and_jump_insn_1(), emit_cstore(), emit_group_load_1(), entry_register(), expand_builtin_memset_args(), expand_debug_parm_decl(), expand_mem_signal_fence(), expand_mult(), expand_naked_return(), expand_value_return(), extract_asm_operands(), extract_split_bit_field(), find_comparison_args(), find_reloads_toplev(), find_shift_sequence(), find_single_use(), fixed_base_plus_p(), gen_formal_parameter_die(), gen_group_rtx(), gen_highpart_mode(), gen_insn(), gen_lowpart_common(), gen_lowpart_if_possible(), get_biv_step_1(), get_inner_reference(), get_ivts_expr(), cselib_hasher::hash(), have_sub2_insn(), inherit_piecemeal_p(), init_reg_last(), insert_move_for_subreg(), insert_restore(), insert_save(), invariant_for_use(), invert_exp_1(), invert_jump_1(), kill_set_value(), mark_nonreg_stores_2(), mark_pseudo_regno_subword_dead(), match_reload(), mathfn_built_in_1(), may_trap_p(), maybe_memory_address_addr_space_p(), merge_overlapping_regs(), move2add_use_add3_insn(), move2add_valid_value_p(), move_block_to_reg(), no_conflict_move_test(), noce_emit_store_flag(), noce_try_addcc(), noce_try_cmove_arith(), nonimmediate_operand(), num_changes_pending(), operands_match_p(), print_value(), process_alt_operands(), push_insns(), push_secondary_reload(), record_component_aliases(), record_value_for_reg(), redirect_jump(), reg_class_from_constraints(), reg_loc_descriptor(), register_operand(), reload_as_needed(), reload_combine_closest_single_use(), reload_combine_note_store(), reload_combine_recognize_pattern(), remove_child_with_prev(), replace_reg_with_saved_mem(), reset_opr_set_tables(), resolve_reg_notes(), resolve_subreg_use(), reverse_op(), rtx_equal_for_memref_p(), scompare_loc_descriptor(), scratch_operand(), set_decl_origin_self(), set_dv_changed(), set_label_offsets(), set_mem_attributes(), set_of_1(), set_reg_attrs_from_value(), set_storage_via_setmem(), setup_elimination_map(), setup_incoming_promotions(), setup_reg_equiv(), shift_optab_p(), simplify_relational_operation_1(), simplify_replace_rtx(), simplify_set(), simplify_while_replacing(), sjlj_assign_call_site_values(), spill_hard_reg(), split_iv(), stabilize_va_list_loc(), store_bit_field(), store_killed_before(), subst_pattern_match(), subst_reloads(), unroll_loop_stupid(), update_auto_inc_notes(), uses_hard_regs_p(), val_bind(), validate_simplify_insn(), variable_part_different_p(), volatile_refs_p(), and vt_stack_adjustments().
#define GET_NOTE_INSN_NAME | ( | NOTE_CODE | ) | (note_insn_name[(NOTE_CODE)]) |
#define GET_NUM_ELEM | ( | RTVEC | ) | ((RTVEC)->num_elem) |
Referenced by change_subst_attribute(), and find_int().
#define GET_REG_NOTE_NAME | ( | MODE | ) | (reg_note_name[(int) (MODE)]) |
#define GET_RTX_CLASS | ( | CODE | ) | (rtx_class[(int) (CODE)]) |
#define GET_RTX_FORMAT | ( | CODE | ) | (rtx_format[(int) (CODE)]) |
#define GET_RTX_LENGTH | ( | CODE | ) | (rtx_length[(int) (CODE)]) |
#define GET_RTX_NAME | ( | CODE | ) | (rtx_name[(int) (CODE)]) |
Referenced by add_map_value(), dump_rtx_statistics(), gen_satfractuns_conv_libfunc(), and print_value().
#define global_rtl (this_target_rtl->x_global_rtl) |
#define HARD_FRAME_POINTER_IS_ARG_POINTER (HARD_FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM) |
#define HARD_FRAME_POINTER_IS_FRAME_POINTER (HARD_FRAME_POINTER_REGNUM == FRAME_POINTER_REGNUM) |
Referenced by make_memloc().
#define HARD_FRAME_POINTER_REGNUM FRAME_POINTER_REGNUM |
If HARD_FRAME_POINTER_REGNUM is defined, then a special dummy reg is used to represent the frame pointer. This is because the hard frame pointer and the automatic variables are separated by an amount that cannot be determined until after register allocation. We can assume that in this case ELIMINABLE_REGS will be defined, one action of which will be to eliminate FRAME_POINTER_REGNUM into HARD_FRAME_POINTER_REGNUM.
Referenced by add_mem_for_addr(), dwarf2out_frame_debug_cfa_window_save(), insn_contains_asm_1(), and remove_reg_equal_offset_note().
#define hard_frame_pointer_rtx (global_rtl[GR_HARD_FRAME_POINTER]) |
Referenced by cselib_record_sets(), rtx_unstable_p(), rtx_varies_p(), set_usage_bits(), and target_char_cast().
#define HARD_REGISTER_NUM_P | ( | REG_NO | ) | ((REG_NO) < FIRST_PSEUDO_REGISTER) |
1 if the given register number REG_NO corresponds to a hard register.
Referenced by df_chain_finalize(), insert_save(), resolve_reg_notes(), and variable_part_different_p().
#define HARD_REGISTER_P | ( | REG | ) | (HARD_REGISTER_NUM_P (REGNO (REG))) |
1 if the given register REG corresponds to a hard register.
Referenced by bb_has_abnormal_call_pred(), dump_insn_info(), emit_move_change_mode(), iv_analysis_done(), record_jump_cond_subreg(), record_last_set_info(), save_call_clobbered_regs(), and split_double().
#define HAVE_POST_DECREMENT 0 |
#define HAVE_POST_INCREMENT 0 |
Referenced by get_def_for_expr().
#define HAVE_POST_MODIFY_DISP 0 |
#define HAVE_POST_MODIFY_REG 0 |
#define HAVE_PRE_DECREMENT 0 |
#define HAVE_PRE_INCREMENT 0 |
#define HAVE_PRE_MODIFY_DISP 0 |
#define HAVE_PRE_MODIFY_REG 0 |
#define IGNORED_DWARF_REGNUM (INVALID_REGNUM - 1) |
REGNUM for which no debug information can be generated.
#define INSN_ANNULLED_BRANCH_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)->unchanging) |
1 if RTX is a jump_insn, call_insn, or insn that is an annulling branch.
#define INSN_CODE | ( | INSN | ) | XINT (INSN, 6) |
Code number of instruction, from when it was recognized. -1 means this instruction has not been recognized yet.
Referenced by init_elim_table(), mark_label_nuses(), maybe_fix_stack_asms(), multiple_sets(), and process_alt_operands().
#define INSN_DELETED_P | ( | RTX | ) |
1 if RTX is an insn that has been deleted.
Referenced by pre_edge_insert(), premark_types_used_by_global_vars_helper(), and return_insn_p().
#define INSN_FROM_TARGET_P | ( | RTX | ) |
1 if RTX is an insn in a delay slot and is from the target of the branch. If the branch insn has INSN_ANNULLED_BRANCH_P set, this insn should only be executed if the branch is taken. For annulled branches with this bit clear, the insn should be executed only if the branch is not taken.
#define INSN_HAS_LOCATION | ( | INSN | ) |
Referenced by loop_latch_edge(), and rtl_split_block().
#define INSN_LOCATION | ( | INSN | ) | XUINT (INSN, 5) |
#define INSN_P | ( | X | ) | (NONJUMP_INSN_P (X) || DEBUG_INSN_P (X) || JUMP_P (X) || CALL_P (X)) |
Predicate yielding nonzero iff X is a real insn.
Referenced by check_for_label_ref(), collect_one_action_chain(), compute_out(), count_reg_usage(), covers_regno_no_parallel_p(), covers_regno_p(), cse_prescan_path(), dead_or_set_regno_p(), df_chain_remove_problem(), df_get_call_refs(), df_get_exit_block_use_set(), df_live_free_bb_info(), df_set_regs_ever_live(), do_clobber_return_reg(), dse_step3(), eh_returnjump_p_1(), find_dead_or_set_registers(), for_each_eh_label(), get_first_nonnote_insn(), hash_scan_set(), in_list_p(), inner_loop_header_p(), insert_var_expansion_initialization(), loop_latch_edge(), make_reg_eh_region_note(), maybe_fix_stack_asms(), memref_referenced_p(), notice_stack_pointer_modification(), prev_nonnote_nondebug_insn(), record_hard_reg_uses(), reload_cse_regs_1(), remove_unreachable_eh_regions(), reorder_basic_blocks(), rtx_addr_varies_p(), scan_stores_spill(), sjlj_assign_call_site_values(), sprint_ul(), too_high_register_pressure_p(), update_alignments(), and vt_get_decl_and_offset().
#define INSN_UID | ( | INSN | ) | XINT (INSN, 0) |
ACCESS MACROS for particular fields of insns. Holds a unique number for each insn. These are not necessarily sequentially increasing.
Referenced by add_to_inherit(), btr_def_live_range(), change_cfi_row(), compute_out(), connect_traces(), cselib_process_insn(), df_bb_regno_first_def_find(), df_chain_remove_problem(), df_dump_insn_bottom(), df_dump_insn_top(), df_hard_reg_used_p(), df_live_free_bb_info(), df_whole_mw_reg_dead_p(), df_word_lr_transfer_function(), dse_step3(), dse_transfer_function(), dump_prediction(), emit_call_insn_after_setloc(), emit_jump_insn_after_setloc(), find_if_case_2(), find_removable_extensions(), insert_insn_end_basic_block(), insert_store(), insn_addresses_new(), invalidate_insn_data_regno_info(), lra_inheritance(), lra_push_insn_by_uid(), lra_set_regno_unique_value(), mark_label_nuses(), new_btr_user(), note_sets_clobbers(), print_value(), process_bb_node_lives(), profile_function(), regrename_chain_from_id(), remove_pseudos(), replace_equiv_address_nv(), return_insn_p(), rtl_verify_edges(), swap_operands(), unsuitable_loc(), and web_main().
#define INSN_VAR_LOCATION | ( | INSN | ) | PATTERN (INSN) |
The VAR_LOCATION rtx in a DEBUG_INSN.
#define INSN_VAR_LOCATION_DECL | ( | INSN | ) | PAT_VAR_LOCATION_DECL (INSN_VAR_LOCATION (INSN)) |
Accessors for a tree-expanded var location debug insn.
Referenced by insert_var_expansion_initialization(), and print_insn().
#define INSN_VAR_LOCATION_LOC | ( | INSN | ) | PAT_VAR_LOCATION_LOC (INSN_VAR_LOCATION (INSN)) |
Referenced by print_insn(), and reload_combine_purge_reg_uses_after_ruid().
#define INSN_VAR_LOCATION_STATUS | ( | INSN | ) | PAT_VAR_LOCATION_STATUS (INSN_VAR_LOCATION (INSN)) |
#define INTVAL | ( | RTX | ) | XCWINT (RTX, 0, CONST_INT) |
For a CONST_INT rtx, INTVAL extracts the integer.
Referenced by builtin_memset_gen_str(), check_defs(), combine_set_extension(), compress_float_constant(), compute_const_anchors(), convert_modes(), dump_prediction(), dwarf2out_flush_queued_reg_saves(), emit_notes_for_differences_2(), end_ifcvt_sequence(), expand_builtin_update_setjmp_buf(), expand_mult(), expand_widening_mult(), extract_low_bits(), find_call_stack_args(), find_shift_sequence(), find_single_use(), fixup_args_size_notes(), force_reg(), free_csa_reflist(), get_eh_region_from_rtx(), get_pos_from_mask(), insert_regs(), insert_temp_slot_address(), make_extraction(), maybe_memory_address_addr_space_p(), move2add_use_add3_insn(), move2add_valid_value_p(), move_by_pieces_1(), noce_emit_store_flag(), note_reg_elim_costly(), operands_match_p(), output_asm_operand_names(), print_value(), process_alt_operands(), promote_decl_mode(), reg_saved_in(), register_operand(), rtx_equal_for_memref_p(), rtx_for_function_call(), set_label_offsets(), set_reg_attrs_from_value(), setup_elimination_map(), simplify_comparison(), simplify_plus_minus_op_data_cmp(), simplify_relational_operation_1(), simplify_set(), subst(), swap_commutative_operands_with_target(), try_widen_shift_mode(), validate_simplify_insn(), validize_mem(), var_reg_decl_set(), var_regno_delete(), and vt_get_canonicalize_base().
#define INVALID_REGNUM (~(unsigned int) 0) |
REGNUM never really appearing in the INSN stream.
Referenced by debug_value_data(), df_get_regular_block_artificial_uses(), dwarf2out_frame_debug_adjust_cfa(), fp_setter_insn(), and set_value_regno().
#define JUMP_LABEL | ( | INSN | ) | XCEXP (INSN, 8, JUMP_INSN) |
In jump.c, each JUMP_INSN can point to a label that it can jump to, so that if the JUMP_INSN is deleted, the label's LABEL_NUSES can be decremented and possibly the label can be deleted.
Referenced by cond_exec_find_if_block(), find_cond_trap(), maybe_propagate_label_ref(), outof_cfg_layout_mode(), profile_function(), prologue_epilogue_contains(), record_truncated_values(), redirect_exp_1(), replace_rtx(), and sprint_ul_rev().
#define JUMP_P | ( | X | ) | (GET_CODE (X) == JUMP_INSN) |
Predicate yielding nonzero iff X is a jump insn.
Referenced by add_labels_and_missing_jumps(), btr_def_live_range(), expand_builtin_longjmp(), find_cond_trap(), find_partition_fixes(), gcse_emit_move_after(), get_last_bb_insn(), get_last_insertion_point(), maybe_fix_stack_asms(), next_real_insn(), profile_function(), prologue_epilogue_contains(), remove_predictions_associated_with_edge(), replace_rtx(), and returnjump_p_1().
#define JUMP_TABLE_DATA_P | ( | INSN | ) | (GET_CODE (INSN) == JUMP_TABLE_DATA) |
Predicate yielding nonzero iff X is a data for a jump table.
Referenced by inside_basic_block_p(), make_pass_compute_alignments(), maybe_fix_stack_asms(), replace_rtx(), and rtl_verify_flow_info_1().
#define LABEL_ALT_ENTRY_P | ( | LABEL | ) | (LABEL_KIND (LABEL) != LABEL_NORMAL) |
#define LABEL_KIND | ( | LABEL | ) | ((enum label_kind) (((LABEL)->jump << 1) | (LABEL)->call)) |
Retrieve the kind of LABEL.
#define LABEL_NAME | ( | RTX | ) | XCSTR (RTX, 7, CODE_LABEL) |
The name of a label, in case it corresponds to an explicit label in the input source code.
Referenced by delete_insn(), and expand_computed_goto().
#define LABEL_NUSES | ( | RTX | ) | XCINT (RTX, 5, CODE_LABEL) |
In jump.c, each label contains a count of the number of LABEL_REFs that point at it, so unused labels can be deleted.
Referenced by cond_exec_find_if_block(), emit_barrier_after_bb(), gcse_emit_move_after(), make_pass_cleanup_barriers(), prev_nonnote_insn_bb(), profile_function(), and record_truncated_values().
#define LABEL_P | ( | X | ) | (GET_CODE (X) == CODE_LABEL) |
Predicate yielding nonzero iff X is a label insn.
Referenced by block_label(), can_fallthru(), create_cfi_notes(), gcse_emit_move_after(), init_resource_info(), make_pass_cleanup_barriers(), maybe_fix_stack_asms(), prev_nonnote_insn_bb(), profile_function(), record_truncated_values(), rtl_split_edge(), rtl_verify_edges(), sjlj_assign_call_site_values(), subst_reloads(), thread_prologue_and_epilogue_insns(), try_crossjump_to_edge(), and update_alignments().
#define LABEL_PRESERVE_P | ( | RTX | ) | (RTL_FLAG_CHECK2 ("LABEL_PRESERVE_P", (RTX), CODE_LABEL, NOTE)->in_struct) |
1 if RTX is a code_label that should always be considered to be needed.
Referenced by make_pass_cleanup_barriers().
#define LABEL_REF_NONLOCAL_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("LABEL_REF_NONLOCAL_P", (RTX), LABEL_REF)->volatil) |
1 if RTX is a label_ref for a nonlocal label. Likewise in an expr_list for a REG_LABEL_OPERAND or REG_LABEL_TARGET note.
Referenced by alter_reg(), convert_memory_address_addr_space(), gcse_emit_move_after(), and invert_jump().
Once basic blocks are found, each CODE_LABEL starts a chain that goes through all the LABEL_REFs that jump to that label. The chain eventually winds up at the CODE_LABEL: it is circular.
Referenced by profile_function().
#define LAST_VIRTUAL_POINTER_REGISTER ((FIRST_VIRTUAL_REGISTER) + 4) |
#define LAST_VIRTUAL_REGISTER ((FIRST_VIRTUAL_REGISTER) + 5) |
Referenced by determine_common_wider_type(), and gmalloc().
#define LRA_SUBREG_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("LRA_SUBREG_P", (RTX), SUBREG)->jump) |
True if the subreg was generated by LRA for reload insns. Such subregs are valid only during LRA.
Referenced by reg_class_from_constraints().
#define MAX_COST INT_MAX |
Maximum cost of an rtl expression. This value has the special meaning not to use an rtx with this cost under any circumstances.
Referenced by try_back_substitute_reg().
#define MAX_SAVED_CONST_INT 64 |
#define MAY_HAVE_DEBUG_INSNS (flag_var_tracking_assignments) |
Nonzero if DEBUG_INSN_P may possibly hold.
Referenced by find_src_set_src(), init_dce(), loc_exp_insert_dep(), and vt_get_decl_and_offset().
#define MEM_ADDR_SPACE | ( | RTX | ) | (get_mem_attrs (RTX)->addrspace) |
For a MEM rtx, the address space.
Referenced by distribute_and_simplify_rtx(), gen_lowpart_if_possible(), move_by_pieces_1(), num_changes_pending(), reload_combine_closest_single_use(), and subreg_lowpart_offset().
#define MEM_ALIAS_SET | ( | RTX | ) | (get_mem_attrs (RTX)->alias) |
For a MEM rtx, the alias set. If 0, this MEM is not in any alias set, and may alias anything. Otherwise, the MEM can only alias MEMs in a conflicting alias set. This value is set in a language-dependent manner in the front-end, and should not be altered in the back-end. These set numbers are tested with alias_sets_conflict_p.
Referenced by array_ref_element_size(), decl_for_component_ref(), and rtx_refs_may_alias_p().
#define MEM_ALIGN | ( | RTX | ) | (get_mem_attrs (RTX)->align) |
For a MEM rtx, the alignment in bits. We can use the alignment of the mode as a default when STRICT_ALIGNMENT, but not if not.
Referenced by alignment_for_piecewise_move(), delete_caller_save_insns(), insert_restore(), lowpart_bit_field_p(), move_by_pieces_1(), replace_reg_with_saved_mem(), and store_bit_field().
#define MEM_ATTRS | ( | RTX | ) | X0MEMATTR (RTX, 1) |
The memory attribute block. We provide access macros for each value in the block and provide defaults if none specified.
#define MEM_COPY_ATTRIBUTES | ( | LHS, | |
RHS | |||
) |
Copy the attributes that apply to memory locations from RHS to LHS.
Referenced by set_mem_alias_set(), and set_storage_via_setmem().
#define MEM_EXPR | ( | RTX | ) | (get_mem_attrs (RTX)->expr) |
For a MEM rtx, the decl it is known to refer to, if it is known to refer to part of a DECL. It may also be a COMPONENT_REF.
Referenced by assign_parms_setup_varargs(), move_by_pieces_1(), and reverse_op().
#define MEM_KEEP_ALIAS_SET_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("MEM_KEEP_ALIAS_SET_P", (RTX), MEM)->jump) |
1 if RTX is a mem and we should keep the alias set for this mem unchanged when we access a component. Set to 1, or example, when we are already in a non-addressable component of an aggregate.
Referenced by all_zeros_p().
#define MEM_NOTRAP_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("MEM_NOTRAP_P", (RTX), MEM)->call) |
1 if RTX is a mem that cannot trap.
Referenced by volatile_refs_p().
#define MEM_OFFSET | ( | RTX | ) | (get_mem_attrs (RTX)->offset) |
For a MEM rtx, the offset from the start of MEM_EXPR.
Referenced by assign_parms_setup_varargs().
#define MEM_OFFSET_KNOWN_P | ( | RTX | ) | (get_mem_attrs (RTX)->offset_known_p) |
For a MEM rtx, true if its MEM_OFFSET is known.
Referenced by assign_parms_setup_varargs().
#define MEM_P | ( | X | ) | (GET_CODE (X) == MEM) |
Predicate yielding nonzero iff X is an rtx for a memory location.
Referenced by add_equal_note(), add_name_attribute(), all_zeros_p(), allocate_struct_function(), array_ref_element_size(), can_compare_and_swap_p(), can_reload_into(), ceiling(), check_argument_store(), clear_storage_libcall_fn(), combine_stack_adjustments(), copy_rtx_if_shared_1(), count_type_elements(), cselib_invalidate_regno(), cselib_reg_set_mode(), decode_asm_operands(), df_simulate_defs(), df_simulate_uses(), discover_nonconstant_array_refs_r(), do_output_reload(), dv_changed_p(), dwarf2out_flush_queued_reg_saves(), dwarf2out_frame_debug_cfa_window_save(), emit_move_change_mode(), equiv_init_varies_p(), expand_reg_info(), find_loads(), gen_formal_parameter_die(), have_global_bss_p(), init_num_sign_bit_copies_in_rep(), initialize_argument_information(), lowpart_bit_field_p(), make_decl_rtl_for_debug(), move_by_pieces_1(), noce_emit_store_flag(), nonimmediate_operand(), note_stores(), notice_stack_pointer_modification_1(), one_code_hoisting_pass(), process_alt_operands(), reg_class_from_constraints(), reg_saved_in(), reg_truncated_to_mode_general(), reverse_op(), rtx_addr_varies_p(), set_dv_changed(), set_storage_via_libcall(), set_storage_via_setmem(), spill_hard_reg(), split_double(), store_killed_before(), subst_reloads(), valid_address_p(), validate_simplify_insn(), and write_dependence_p().
#define MEM_POINTER | ( | RTX | ) | (RTL_FLAG_CHECK1 ("MEM_POINTER", (RTX), MEM)->frame_related) |
1 if RTX is a mem that holds a pointer value.
Referenced by split_double().
#define MEM_READONLY_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("MEM_READONLY_P", (RTX), MEM)->unchanging) |
1 if RTX is a mem that is statically allocated in read-only memory.
Referenced by df_simulate_defs(), rtx_unstable_p(), and rtx_varies_p().
#define MEM_SIZE | ( | RTX | ) | (get_mem_attrs (RTX)->size) |
For a MEM rtx, the size in bytes of the MEM.
Referenced by find_call_stack_args(), and volatile_refs_p().
#define MEM_SIZE_KNOWN_P | ( | RTX | ) | (get_mem_attrs (RTX)->size_known_p) |
For a MEM rtx, true if its MEM_SIZE is known.
Referenced by find_call_stack_args(), and volatile_refs_p().
#define MEM_VOLATILE_P | ( | RTX | ) |
1 if RTX is a mem or asm_operand for a volatile reference.
Referenced by calculate_bb_reg_pressure(), decl_for_component_ref(), df_simulate_defs(), df_simulate_uses(), distribute_and_simplify_rtx(), get_reg_attrs(), maybe_emit_atomic_exchange(), n_occurrences(), remove_note(), rtx_unstable_p(), rtx_varies_p(), store_bit_field(), store_killed_before(), subst_reloads(), and volatile_refs_p().
#define mode_mem_attrs (this_target_rtl->x_mode_mem_attrs) |
Referenced by set_mem_attributes().
#define NEXT_INSN | ( | INSN | ) | XEXP (INSN, 2) |
Referenced by alloc_use_cost_map(), bb_note(), block_label(), can_fallthru(), cfg_layout_split_block(), cheap_bb_rtx_cost_p(), check_for_label_ref(), collect_one_action_chain(), compute_out(), count_reg_usage(), debug_rtx_list(), delete_insn(), delete_trivially_dead_insns(), df_simulate_initialize_backwards(), do_clobber_return_reg(), emit_barrier_after(), emit_pattern_after_setloc(), expand_copysign(), expand_dummy_function_end(), for_each_eh_label(), fprint_whex(), get_first_nonnote_insn(), get_stored_val(), insert_store(), insert_var_expansion_initialization(), inside_basic_block_p(), insn_live_p(), label_for_bb(), link_insn_into_chain(), make_pass_cleanup_barriers(), make_pass_free_cfg(), memref_referenced_p(), merge_blocks_move_predecessor_nojumps(), note_outside_basic_block_p(), prev_nondebug_insn(), prev_nonnote_nondebug_insn(), profile_function(), rebuild_jump_labels_chain(), record_insns(), reemit_insn_block_notes(), reload_combine_purge_reg_uses_after_ruid(), replace_rtx(), restore_operands(), rtl_delete_block(), rtl_force_nonfallthru(), rtl_split_block(), rtl_split_edge(), rtl_verify_edges(), rtl_verify_flow_info_1(), set_first_insn(), set_used_flags(), sprint_ul(), too_high_register_pressure_p(), try_crossjump_to_edge(), update_alignments(), and vt_get_decl_and_offset().
#define NON_COMMUTATIVE_P | ( | X | ) |
1 if X is a non-commutative operator.
#define NONDEBUG_INSN_P | ( | X | ) | (INSN_P (X) && !DEBUG_INSN_P (X)) |
Predicate yielding nonzero iff X is an insn that is not a debug insn.
Referenced by add_to_inherit(), cfg_layout_redirect_edge_and_branch_force(), decls_for_scope(), df_simulate_initialize_backwards(), get_last_insertion_point(), insert_store(), lra_update_dups(), memref_referenced_p(), num_loop_insns(), one_code_hoisting_pass(), one_pre_gcse_pass(), process_bb_node_lives(), restore_operands(), rtl_split_block(), should_hoist_expr_to_dom(), store_killed_in_pat(), use_crosses_set_p(), and web_main().
#define NONJUMP_INSN_P | ( | X | ) | (GET_CODE (X) == INSN) |
Predicate yielding nonzero iff X is an insn that cannot jump.
Referenced by cheap_bb_rtx_cost_p(), clobber_return_register(), collect_one_action_chain(), fprint_whex(), and previous_insn().
#define NOOP_MOVE_INSN_CODE INT_MAX |
Register Transfer Language (RTL) definitions for GCC Copyright (C) 1987-2013 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/. Value used by some passes to "recognize" noop moves as valid instructions.
Referenced by multiple_sets().
#define NOTE_BASIC_BLOCK | ( | INSN | ) | XCBBDEF (INSN, 4, NOTE) |
Referenced by rtl_verify_edges().
#define NOTE_BLOCK | ( | INSN | ) | XCTREE (INSN, 4, NOTE) |
Referenced by add_debug_prefix_map().
#define NOTE_CFI | ( | INSN | ) | XCCFI (INSN, 4, NOTE) |
Referenced by new_cfi_row().
#define NOTE_DATA | ( | INSN | ) | RTL_CHECKC1 (INSN, 4, NOTE) |
In a NOTE that is a line number, this is a string for the file name that the line is in. We use the same field to record block numbers temporarily in NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes. (We avoid lots of casts between ints and pointers if we use a different macro for the block number.) Opaque data.
Referenced by emit_debug_insn_after().
#define NOTE_DELETED_LABEL_NAME | ( | INSN | ) | XCSTR (INSN, 4, NOTE) |
Referenced by delete_insn().
#define NOTE_DURING_CALL_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("NOTE_VAR_LOCATION_DURING_CALL_P", (RTX), NOTE)->call) |
1 if RTX is emitted after a call, but it should take effect before the call returns.
#define NOTE_EH_HANDLER | ( | INSN | ) | XCINT (INSN, 4, NOTE) |
Referenced by collect_one_action_chain().
#define NOTE_INSN_BASIC_BLOCK_P | ( | INSN | ) | (NOTE_P (INSN) && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK) |
Nonzero if INSN is a note marking the beginning of a basic block.
Referenced by can_fallthru(), delete_insn(), inner_loop_header_p(), rtl_split_edge(), rtl_verify_bb_pointers(), rtl_verify_edges(), and sjlj_emit_function_enter().
#define NOTE_KIND | ( | INSN | ) | XCINT (INSN, 5, NOTE) |
In a NOTE that is a line number, this is the line number. Other kinds of NOTEs are identified by negative numbers here.
Referenced by block_fallthru(), create_cfi_notes(), delete_insn(), emit_debug_insn_after(), reemit_insn_block_notes(), and sjlj_emit_function_enter().
#define NOTE_LABEL_NUMBER | ( | INSN | ) | XCINT (INSN, 4, NOTE) |
#define NOTE_P | ( | X | ) | (GET_CODE (X) == NOTE) |
Predicate yielding nonzero iff X is a note insn.
Referenced by block_fallthru(), create_cfi_notes(), delete_insn_and_edges(), get_last_insn_anywhere(), reemit_insn_block_notes(), spill_hard_reg(), and try_crossjump_to_edge().
#define NOTE_VAR_LOCATION | ( | INSN | ) | XCEXP (INSN, 4, NOTE) |
Referenced by gen_formal_parameter_die().
#define NOTE_VAR_LOCATION_DECL | ( | NOTE | ) | PAT_VAR_LOCATION_DECL (NOTE_VAR_LOCATION (NOTE)) |
Accessors for a NOTE_INSN_VAR_LOCATION.
#define NOTE_VAR_LOCATION_LOC | ( | NOTE | ) | PAT_VAR_LOCATION_LOC (NOTE_VAR_LOCATION (NOTE)) |
#define NOTE_VAR_LOCATION_STATUS | ( | NOTE | ) | PAT_VAR_LOCATION_STATUS (NOTE_VAR_LOCATION (NOTE)) |
#define NULL_RTVEC (rtvec) 0 |
#define NULL_RTX (rtx) 0 |
Referenced by add_costs(), add_int_reg_note(), add_pseudo_to_slot(), add_stores(), add_to_inherit(), analyze_insn_to_expand_var(), assign_stack_slot_num_and_sort_pseudos(), attr_hash_add_rtx(), build_def_use(), can_compare_and_swap_p(), can_reload_into(), canonicalize_values_star(), ceil_sdiv_adjust(), check_simple_exit(), collect_one_action_chain(), compute_store_table(), cond_exec_get_condition(), contains_replace_regs(), convert_debug_memory_address(), cse_prescan_path(), cselib_record_sets(), dead_debug_add(), decide_peel_simple(), decl_for_component_ref(), decompose_register(), delete_insn(), determine_common_wider_type(), df_simulate_initialize_backwards(), do_jump_by_parts_zero_rtx(), drop_overlapping_mem_locs(), dw2_output_call_site_table(), emit_add2_insn(), emit_cstore(), emit_debug_insn_after_noloc(), emit_group_load_1(), emit_jump_insn_before(), emit_note_before(), expand_abs(), expand_atomic_compare_and_swap(), expand_atomic_load(), expand_binop(), expand_binop_directly(), expand_builtin_atomic_exchange(), expand_builtin_bswap(), expand_builtin_bzero(), expand_builtin_expect(), expand_builtin_memcpy(), expand_builtin_memset_args(), expand_builtin_prefetch(), expand_builtin_sync_operation(), expand_builtin_va_end(), expand_builtin_va_start(), expand_doubleword_clz(), expand_expr_real_1(), expand_float(), expand_mem_thread_fence(), expand_return(), extract_high_half(), extract_split_bit_field(), find_comparison_args(), find_costs_and_classes(), find_moveable_store(), find_partition_fixes(), find_shift_sequence(), fixup_new_cold_bb(), fold_rtx(), for_each_eh_label(), force_move_args_size_note(), free_for_value_p(), gen_formal_parameter_die(), get_eh_region_from_rtx(), get_last_bb_insn(), get_stored_val(), have_sub2_insn(), in_list_p(), init_block_clear_fn(), init_recog(), init_varasm_status(), initializer_constant_valid_p_1(), insert_insn_end_basic_block(), insert_restore(), ira_reassign_pseudos(), jumpifnot_1(), loc_descriptor_from_tree(), lra_get_insn_regs(), make_pass_into_cfg_layout_mode(), maybe_emit_sync_lock_test_and_set(), mode_for_array(), move2add_use_add3_insn(), move2add_valid_value_p(), multiple_sets(), noce_can_store_speculate_p(), noce_emit_store_flag(), node_has_low_bound(), note_outside_basic_block_p(), one_code_hoisting_pass(), operands_match_p(), output_asm_insn(), previous_insn(), print_exp(), process_alt_operands(), process_bb_node_lives(), process_reg_shuffles(), push_block(), queue_reg_save(), read_mapping(), record_biv(), reemit_insn_block_notes(), reg_used_on_edge(), reload_combine_closest_single_use(), reload_combine_recognize_const_pattern(), remove_child_TAG(), remove_unreachable_eh_regions_worker(), replace_reg_with_saved_mem(), replace_rtx(), resolve_clobber(), resolve_subreg_use(), reverse_op(), reversed_condition(), rewrite_trees(), rtx_addr_varies_p(), rtx_equal_for_memref_p(), rtx_for_function_call(), scompare_loc_descriptor(), set_initial_eh_label_offset(), set_insn_deleted(), set_label_offsets(), sjlj_assign_call_site_values(), split_all_insns(), split_edge_and_insert(), stabilize_va_list_loc(), tablejump_p(), tree_conflicts_with_clobbers_p(), try_back_substitute_reg(), uses_hard_regs_p(), and val_bind().
#define NUM_RTX_CODE ((int) LAST_AND_UNUSED_RTX_CODE) |
The cast here, saves many elsewhere.
#define OBJECT_P | ( | X | ) | ((GET_RTX_CLASS (GET_CODE (X)) & RTX_OBJ_MASK) == RTX_OBJ_RESULT) |
1 if X can be used to represent an object.
Referenced by for_each_rtx(), make_extraction(), and noce_emit_store_flag().
#define ORIGINAL_REGNO | ( | RTX | ) | X0UINT (RTX, 1) |
ORIGINAL_REGNO holds the number the register originally had; for a pseudo register turned into a hard reg this will hold the old pseudo register number.
Referenced by find_oldest_value_reg(), process_alt_operands(), and variable_part_different_p().
#define PAT_VAR_LOCATION_DECL | ( | PAT | ) | (XCTREE ((PAT), 0, VAR_LOCATION)) |
Variable declaration and the location of a variable.
#define PAT_VAR_LOCATION_LOC | ( | PAT | ) | (XCEXP ((PAT), 1, VAR_LOCATION)) |
#define PAT_VAR_LOCATION_STATUS | ( | PAT | ) | ((enum var_init_status) (XCINT ((PAT), 2, VAR_LOCATION))) |
Initialization status of the variable in the location. Status can be unknown, uninitialized or initialized. See enumeration type below.
#define PATTERN | ( | INSN | ) | XEXP (INSN, 4) |
The body of an insn.
Referenced by adjust_mem_uses(), alter_reg(), canonicalize_change_group(), cheap_bb_rtx_cost_p(), check_eliminable_occurrences(), clobber_return_register(), collect_one_action_chain(), cond_exec_find_if_block(), cse_prescan_path(), dead_or_set_regno_p(), decls_for_scope(), decrease_live_ranges_number(), df_simulate_one_insn_forwards(), diagnostic_for_asm(), eh_returnjump_p_1(), emit_call_insn_after_setloc(), emit_insn_at_entry(), emit_move_insn(), expand_copysign(), expand_copysign_bit(), find_dead_or_set_registers(), find_implicit_sets(), fprint_whex(), gen_const_vector(), get_eh_region_and_lp_from_rtx(), get_last_value_validate(), get_stored_val(), gmalloc(), hash_scan_set(), init_elim_table(), init_label_info(), insert_insn_end_basic_block(), insert_var_expansion_initialization(), insn_invalid_p(), lra_set_insn_deleted(), mark_label_nuses(), mark_used_regs_combine(), maybe_fix_stack_asms(), memref_used_between_p(), move2add_valid_value_p(), multiple_sets(), next_real_insn(), notice_stack_pointer_modification(), one_code_hoisting_pass(), prev_nonnote_nondebug_insn(), previous_insn(), process_bb_node_lives(), reg_killed_on_edge(), reload_combine_recognize_const_pattern(), reload_cse_move2add(), remove_pseudos(), remove_unreachable_eh_regions(), resolve_clobber(), rtx_addr_varies_p(), save_call_clobbered_regs(), set_nonzero_bits_and_sign_copies(), simplejump_p(), single_set_2(), spill_hard_reg(), sprint_ul(), store_killed_in_pat(), try_back_substitute_reg(), try_combine(), update_cfg_for_uncondjump(), use_crosses_set_p(), vt_add_function_parameter(), and vt_get_decl_and_offset().
#define pic_offset_table_rtx (this_target_rtl->x_pic_offset_table_rtx) |
Referenced by fprint_ul(), rtx_equal_for_memref_p(), rtx_unstable_p(), rtx_varies_p(), and set_usage_bits().
#define PREFETCH_SCHEDULE_BARRIER_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("PREFETCH_SCHEDULE_BARRIER_P", (RTX), PREFETCH)->volatil) |
True if RTX is flagged to be a scheduling barrier.
#define PREV_INSN | ( | INSN | ) | XEXP (INSN, 1) |
Chain insns together in sequence.
Referenced by bb_note(), block_label(), cfg_layout_can_merge_blocks_p(), cfg_layout_split_block(), debug(), delete_insn_and_edges(), df_simulate_fixup_sets(), expand_copysign(), get_last_insn_anywhere(), next_insn(), noce_can_store_speculate_p(), prev_nondebug_insn(), record_insns(), rtl_split_block(), rtl_split_edge(), set_insn_locations(), set_nonzero_bits_and_sign_copies(), subst_reloads(), try_crossjump_to_edge(), and use_crosses_set_p().
#define PUT_CODE | ( | RTX, | |
CODE | |||
) | ((RTX)->code = (CODE)) |
Referenced by delete_insn(), find_mode(), notice_source_line(), and read_mapping().
#define PUT_MODE | ( | RTX, | |
MODE | |||
) | ((RTX)->mode = (MODE)) |
#define PUT_NUM_ELEM | ( | RTVEC, | |
NUM | |||
) | ((RTVEC)->num_elem = (NUM)) |
#define PUT_REG_NOTE_KIND | ( | LINK, | |
KIND | |||
) | PUT_MODE (LINK, (enum machine_mode) (KIND)) |
Referenced by find_constant_src(), and get_last_value().
#define REG_ATTRS | ( | RTX | ) | X0REGATTR (RTX, 2) |
The register attribute block. We provide access macros for each value in the block and provide defaults if none specified.
Referenced by can_decompose_p(), entry_register(), find_oldest_value_reg(), and set_reg_attrs_from_value().
For a REG rtx, the decl it is known to refer to, if it is known to refer to part of a DECL.
Referenced by shared_hash_find().
#define REG_FUNCTION_VALUE_P | ( | RTX | ) | (RTL_FLAG_CHECK2 ("REG_FUNCTION_VALUE_P", (RTX), REG, PARALLEL)->return_val) |
1 if RTX is a reg or parallel that is the current function's return value.
Referenced by assign_parms_unsplit_complex().
Define macros to extract and insert the reg-note kind in an EXPR_LIST.
Referenced by adjust_decomposed_uses(), alter_reg(), covers_regno_no_parallel_p(), covers_regno_p(), dead_or_set_regno_p(), dump_prediction(), init_label_info(), memref_referenced_p(), next_active_insn(), and one_code_hoisting_pass().
#define REG_NOTES | ( | INSN | ) | XEXP(INSN, 7) |
Holds a list of notes on what this insn does to various REGs. It is a chain of EXPR_LIST rtx's, where the second operand is the chain pointer and the first operand is the REG being described. The mode field of the EXPR_LIST contains not a real machine mode but a value from enum reg_note.
Referenced by alter_reg(), covers_regno_no_parallel_p(), covers_regno_p(), dead_or_set_regno_p(), df_note_bb_compute(), df_remove_dead_eq_notes(), dump_prediction(), init_label_info(), mark_label_nuses(), maybe_fix_stack_asms(), memref_referenced_p(), and try_back_substitute_reg().
For a REG rtx, the offset from the start of REG_EXPR, if known, as an HOST_WIDE_INT.
Referenced by shared_hash_find().
#define REG_P | ( | X | ) | (GET_CODE (X) == REG) |
Predicate yielding nonzero iff X is an rtx for a register.
Referenced by add_mem_for_addr(), add_stores(), address_operand(), analyze_insn_to_expand_var(), anti_adjust_stack_and_probe(), assign_parms_unsplit_complex(), bb_has_abnormal_call_pred(), build_def_use(), can_decompose_p(), can_reload_into(), canon_condition(), ceiling(), compute_argument_block_size(), compute_const_anchors(), compute_local_properties(), copy_rtx_if_shared_1(), count_reg_usage(), count_type_elements(), covers_regno_p(), cselib_preserved_value_p(), cselib_reg_set_mode(), dataflow_set_copy(), dataflow_set_init(), dead_debug_global_insert(), dead_debug_insert_temp(), deallocate_reload_reg(), df_bb_regno_first_def_find(), do_output_reload(), dump_insn_info(), dwarf2out_flush_queued_reg_saves(), dwarf2out_frame_debug_cfa_window_save(), emit_group_load_1(), emit_local(), emit_move_change_mode(), expand_builtin_memset_args(), expand_reg_info(), find_call_stack_args(), find_dummy_reload(), find_invariants_to_move(), find_reg_equal_equiv_note(), find_reg_note(), find_reusable_reload(), find_valid_class_1(), find_variable_location_part(), fprint_ul(), free_csa_reflist(), free_for_value_p(), free_loop_data(), gen_formal_parameter_die(), gen_insn(), gen_reg_rtx_offset(), get_final_hard_regno(), have_sub2_insn(), implicit_set_cond_p(), in_list_p(), init_dummy_function_start(), init_num_sign_bit_copies_in_rep(), init_reg_last(), insert_insn_end_basic_block(), ira_reassign_pseudos(), iv_analysis_done(), locate_and_pad_parm(), lra_set_insn_deleted(), make_memloc(), mark_altered(), mark_insn(), match_reload(), maybe_fix_stack_asms(), move2add_use_add3_insn(), noce_emit_store_flag(), note_uses(), ok_for_base_p_nonstrict(), one_code_hoisting_pass(), operands_match_p(), output_asm_insn(), process_alt_operands(), process_bb_node_lives(), process_reg_shuffles(), record_jump_cond_subreg(), record_last_set_info(), record_promoted_value(), record_reg_classes(), refs_newer_value_cb(), reg_overlap_mentioned_p(), reload_as_needed(), reload_combine_closest_single_use(), remove_from_table(), remove_useless_values(), remove_value_from_changed_variables(), reverse_op(), rtx_equal_for_memref_p(), set_dv_changed(), set_label_offsets(), set_nonzero_bits_and_sign_copies(), set_reg_attrs_from_value(), simplify_relational_operation_1(), split_double(), subst_reloads(), target_canonicalize_comparison(), try_replace_reg(), unique_base_value(), uses_hard_regs_p(), variable_part_different_p(), and write_header().
#define REG_POINTER | ( | RTX | ) | (RTL_FLAG_CHECK1 ("REG_POINTER", (RTX), REG)->frame_related) |
1 if RTX is a reg that holds a pointer value.
Referenced by convert_memory_address_addr_space(), entry_register(), find_oldest_value_reg(), rtx_equal_for_memref_p(), set_reg_attrs_for_decl_rtl(), split_double(), and unique_base_value().
#define REG_USERVAR_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("REG_USERVAR_P", (RTX), REG)->volatil) |
1 if RTX is a reg that corresponds to a variable declared by the user.
Referenced by entry_register().
#define REGNO | ( | RTX | ) | (rhs_regno(RTX)) |
For a REG rtx, REGNO extracts the register number. REGNO can only be used on RHS. Use SET_REGNO to change the value.
Referenced by add_mem_for_addr(), address_operand(), assign_parm_adjust_entry_rtl(), assign_parm_setup_reg(), assign_parms_unsplit_complex(), assign_stack_slot_num_and_sort_pseudos(), bb_has_abnormal_call_pred(), build_def_use(), can_reload_into(), canonicalize_values_star(), check_and_process_move(), compute_hash_table(), constrain_operands(), copy_rtx_if_shared_1(), count_reg_usage(), covers_regno_p(), dataflow_set_init(), dead_debug_global_insert(), deallocate_reload_reg(), delete_trivially_dead_insns(), df_bb_regno_first_def_find(), df_word_lr_top_dump(), do_output_reload(), dump_insn_info(), dump_var(), dwarf2out_frame_debug_cfa_window_save(), emit_move_change_mode(), emit_push_insn(), entry_register(), equiv_init_varies_p(), expand_builtin_memset_args(), find_implicit_sets(), find_invariants_to_move(), find_reg_equal_equiv_note(), find_reg_note(), find_variable_location_part(), fprint_ul(), gen_formal_parameter_die(), gen_insn(), get_biv_step(), get_last_value(), get_reg_known_value(), have_sub2_insn(), implicit_set_cond_p(), in_list_p(), init_dummy_function_start(), init_reg_last(), insert_insn_end_basic_block(), insert_move_for_subreg(), insert_save(), insn_live_p(), insn_sets_btr_p(), invert_exp_1(), ira_reassign_pseudos(), kill_autoinc_value(), kill_set_value(), locate_and_pad_parm(), lookup_as_function(), make_memloc(), mark_altered(), mark_pseudo_regno_subword_dead(), match_reload(), maybe_fix_stack_asms(), mem_overlaps_already_clobbered_arg_p(), modify_move_list(), move2add_use_add3_insn(), move2add_valid_value_p(), move_unallocated_pseudos(), new_btr_user(), note_uses(), one_pre_gcse_pass(), operands_match_p(), output_asm_insn(), print_value(), process_alt_operands(), process_bb_node_lives(), queue_reg_save(), record_jump_cond_subreg(), record_promoted_value(), record_reg_classes(), reg_available_p(), reg_overlap_mentioned_p(), reload_as_needed(), reload_combine_note_store(), reload_combine_recognize_const_pattern(), remove_from_table(), resolve_reg_notes(), rtx_equal_p_cb(), save_call_clobbered_regs(), scan_paradoxical_subregs(), scompare_loc_descriptor(), set_dest_equal_p(), set_dv_changed(), set_label_offsets(), set_nonzero_bits_and_sign_copies(), set_of_1(), set_reg_attrs_for_decl_rtl(), split_double(), subst_reloads(), try_replace_reg(), val_bind(), and validate_subreg().
#define REGNO_PTR_FRAME_P | ( | REGNUM | ) |
Nonzero if REGNUM is a pointer into the stack frame.
#define return_address_pointer_rtx (this_target_rtl->x_return_address_pointer_rtx) |
General accessor macros for accessing the fields of an rtx.
#define RTL_CONST_CALL_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("RTL_CONST_CALL_P", (RTX), CALL_INSN)->unchanging) |
1 if RTX is a call to a const function. Built from ECF_CONST and TREE_READONLY.
Referenced by gen_const_vector(), and store_killed_in_pat().
#define RTL_CONST_OR_PURE_CALL_P | ( | RTX | ) | (RTL_CONST_CALL_P (RTX) || RTL_PURE_CALL_P (RTX)) |
1 if RTX is a call to a const or pure function.
Referenced by merge_identical_invariants().
#define RTL_FLAG_CHECK1 | ( | NAME, | |
RTX, | |||
C1 | |||
) | (RTX) |
#define RTL_FLAG_CHECK2 | ( | NAME, | |
RTX, | |||
C1, | |||
C2 | |||
) | (RTX) |
#define RTL_FLAG_CHECK3 | ( | NAME, | |
RTX, | |||
C1, | |||
C2, | |||
C3 | |||
) | (RTX) |
#define RTL_FLAG_CHECK4 | ( | NAME, | |
RTX, | |||
C1, | |||
C2, | |||
C3, | |||
C4 | |||
) | (RTX) |
#define RTL_FLAG_CHECK5 | ( | NAME, | |
RTX, | |||
C1, | |||
C2, | |||
C3, | |||
C4, | |||
C5 | |||
) | (RTX) |
#define RTL_FLAG_CHECK6 | ( | NAME, | |
RTX, | |||
C1, | |||
C2, | |||
C3, | |||
C4, | |||
C5, | |||
C6 | |||
) | (RTX) |
#define RTL_FLAG_CHECK7 | ( | NAME, | |
RTX, | |||
C1, | |||
C2, | |||
C3, | |||
C4, | |||
C5, | |||
C6, | |||
C7 | |||
) | (RTX) |
#define RTL_FLAG_CHECK8 | ( | NAME, | |
RTX, | |||
C1, | |||
C2, | |||
C3, | |||
C4, | |||
C5, | |||
C6, | |||
C7, | |||
C8 | |||
) | (RTX) |
#define RTL_LOCATION | ( | X | ) |
LOCATION of an RTX if relevant.
#define RTL_LOOPING_CONST_OR_PURE_CALL_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("CONST_OR_PURE_CALL_P", (RTX), CALL_INSN)->call) |
1 if RTX is a call to a looping const or pure function. Built from ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P.
Referenced by gen_const_vector(), and merge_identical_invariants().
#define RTL_PURE_CALL_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("RTL_PURE_CALL_P", (RTX), CALL_INSN)->return_val) |
1 if RTX is a call to a pure function. Built from ECF_PURE and DECL_PURE_P.
Referenced by gen_const_vector().
Referenced by change_subst_attribute().
#define rtx_alloc | ( | c | ) | rtx_alloc_stat (c MEM_STAT_INFO) |
#define RTX_ARITHMETIC_MASK (~1) |
#define RTX_ARITHMETIC_RESULT (RTX_COMM_ARITH & RTX_ARITHMETIC_MASK) |
#define RTX_BINARY_MASK (~3) |
#define RTX_BINARY_RESULT (RTX_COMPARE & RTX_BINARY_MASK) |
#define RTX_CODE enum rtx_code |
Register Transfer Language EXPRESSIONS CODES
Referenced by check_code_iterator(), cmp_c_test(), emit_clobber(), gen_insn(), new_decision(), ok_for_base_p_nonstrict(), remove_note(), set_dv_changed(), and shared_const_p().
#define RTX_CODE_SIZE | ( | CODE | ) | rtx_code_size[CODE] |
The size in bytes of an rtx with code CODE.
Referenced by shallow_copy_rtvec().
#define RTX_COMMUTATIVE_MASK (~2) |
#define RTX_COMMUTATIVE_RESULT (RTX_COMM_COMPARE & RTX_COMMUTATIVE_MASK) |
#define RTX_COMPARE_MASK (~1) |
#define RTX_COMPARE_RESULT (RTX_COMPARE & RTX_COMPARE_MASK) |
#define RTX_FLAG | ( | RTX, | |
FLAG | |||
) | ((RTX)->FLAG) |
General accessor macros for accessing the flags of an rtx. Access an individual rtx flag, with no checking of any kind.
Referenced by set_unique_reg_note().
#define RTX_FRAME_RELATED_P | ( | RTX | ) |
Referenced by cselib_record_sets(), dwarf2out_frame_debug_cfa_window_save(), and prev_nondebug_insn().
The size in bytes of an rtx header (code, mode and flags).
Referenced by shallow_copy_rtvec(), and use_object_blocks_p().
#define RTX_NEXT | ( | X | ) |
#define RTX_NON_COMMUTATIVE_RESULT (RTX_COMPARE & RTX_COMMUTATIVE_MASK) |
#define RTX_OBJ_MASK (~1) |
#define RTX_OBJ_RESULT (RTX_OBJ & RTX_OBJ_MASK) |
#define RTX_PREV | ( | X | ) |
#define SCHED_GROUP_P | ( | RTX | ) |
During sched, 1 if RTX is an insn that must be scheduled together with the preceding insn.
For a SET rtx, SET_DEST is the place that is set and SET_SRC is the value it is set to.
Referenced by add_equal_note(), alter_reg(), analyze_insn_to_expand_var(), asm_noperands(), assign_parm_setup_reg(), bb_has_abnormal_call_pred(), change_subst_attribute(), cheap_bb_rtx_cost_p(), check_eliminable_occurrences(), combine_set_extension(), copy_replacements_1(), count_reg_usage(), cse_prescan_path(), cselib_record_set(), dwarf2out_frame_debug_cfa_window_save(), emit_move_insn(), equiv_init_varies_p(), expand_copysign_bit(), extract_asm_operands(), find_all_hard_reg_sets(), find_invariants_to_move(), find_loads(), gen_label_rtx(), gen_mnemonic_setattr(), insert_insn_end_basic_block(), lra_set_insn_deleted(), maybe_fix_stack_asms(), maybe_propagate_label_ref(), move2add_use_add3_insn(), note_stores(), one_code_hoisting_pass(), preserve_value(), process_bb_node_lives(), process_reg_shuffles(), record_jump_cond_subreg(), reg_killed_on_edge(), reg_overlap_mentioned_p(), reload_combine_closest_single_use(), remove_reg_equal_offset_note(), remove_value_from_changed_variables(), returnjump_p_1(), save_call_clobbered_regs(), set_nonzero_bits_and_sign_copies(), simplejump_p(), store_killed_before(), target_canonicalize_comparison(), try_back_substitute_reg(), try_combine(), unroll_loop_stupid(), and update_cfg_for_uncondjump().
#define SET_INSN_DELETED | ( | INSN | ) | set_insn_deleted (INSN); |
Referenced by lra_invalidate_insn_data().
#define SET_IS_RETURN_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("SET_IS_RETURN_P", (RTX), SET)->jump) |
Referenced by any_condjump_p().
#define SET_LABEL_KIND | ( | LABEL, | |
KIND | |||
) |
#define SET_REGNO | ( | RTX, | |
N | |||
) | (df_ref_change_reg_with_loc (REGNO (RTX), N, RTX), XCUINT (RTX, 0, REG) = N) |
Referenced by add_equal_note(), adjust_for_new_dest(), alloc_use_cost_map(), alter_reg(), asm_noperands(), change_subst_attribute(), check_eliminable_occurrences(), compress_float_constant(), copy_replacements_1(), count_reg_usage(), cse_prescan_path(), cselib_record_set(), diagnostic_for_asm(), dwarf2out_frame_debug_cfa_window_save(), emit_move_insn(), equiv_init_varies_p(), expand_copysign_bit(), extract_asm_operands(), find_call_stack_args(), find_defs(), gen_label_rtx(), in_list_p(), insert_insn_end_basic_block(), maybe_fix_stack_asms(), memory_operand(), move2add_use_add3_insn(), move2add_valid_value_p(), note_stores(), one_code_hoisting_pass(), ordered_comparison_operator(), preserve_value(), process_reg_shuffles(), record_jump_cond_subreg(), reload_combine_closest_single_use(), remove_reg_equal_offset_note(), remove_value_from_changed_variables(), replace_dead_reg(), rtx_addr_varies_p(), simplify_if_then_else(), try_back_substitute_reg(), try_combine(), undo_commit(), unroll_loop_stupid(), and update_cfg_for_uncondjump().
#define SET_SYMBOL_REF_CONSTANT | ( | RTX, | |
C | |||
) | (gcc_assert (CONSTANT_POOL_ADDRESS_P (RTX)), X0CONSTANT ((RTX), 2) = (C)) |
Set RTX's SYMBOL_REF_CONSTANT to C. RTX must be a constant pool symbol.
#define SET_SYMBOL_REF_DECL | ( | RTX, | |
DECL | |||
) | (gcc_assert (!CONSTANT_POOL_ADDRESS_P (RTX)), X0TREE ((RTX), 2) = (DECL)) |
Set RTX's SYMBOL_REF_DECL to DECL. RTX must not be a constant pool symbol.
#define shallow_copy_rtx | ( | a | ) | shallow_copy_rtx_stat (a MEM_STAT_INFO) |
Referenced by convert_memory_address_addr_space(), and move_by_pieces_1().
#define SIBLING_CALL_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("SIBLING_CALL_P", (RTX), CALL_INSN)->jump) |
1 if RTX is a call_insn for a sibling call.
Referenced by can_throw_external(), do_warn_unused_parameter(), fprint_whex(), gen_const_vector(), and merge_dir().
#define single_set | ( | I | ) |
Functions in rtlanal.c Single set is implemented as macro for performance reasons.
Referenced by add_equal_note(), adjust_for_new_dest(), alloc_use_cost_map(), analyze_insn_to_expand_var(), assign_parm_setup_reg(), cheap_bb_rtx_cost_p(), copy_replacements_1(), count_reg_usage(), emit_move_insn(), find_call_stack_args(), find_invariants_to_move(), get_loop_level(), in_list_p(), may_assign_reg_p(), maybe_fix_stack_asms(), maybe_propagate_label_ref(), memref_referenced_p(), move2add_use_add3_insn(), process_reg_shuffles(), reload_combine_closest_single_use(), returnjump_p_1(), store_killed_before(), union_defs(), and unroll_loop_stupid().
#define single_set_1 | ( | I | ) | single_set_2 (I, PATTERN (I)) |
#define stack_pointer_rtx (global_rtl[GR_STACK_POINTER]) |
All references to certain hard regs, except those created by allocating pseudo regs into them (when that's possible), go through these unique rtx objects.
Referenced by calculate_bb_reg_pressure(), compress_float_constant(), df_simulate_uses(), emit_library_call_value_1(), emit_move_insn(), find_call_stack_args(), free_csa_reflist(), notice_stack_pointer_modification_1(), push_block(), subst_reloads(), and volatile_refs_p().
#define SUBREG_BYTE | ( | RTX | ) | XCUINT (RTX, 1, SUBREG) |
Referenced by invert_exp_1(), make_extraction(), match_reload(), print_value(), resolve_subreg_use(), setup_reg_equiv(), and validate_simplify_insn().
#define SUBREG_PROMOTED_UNSIGNED_P | ( | RTX | ) |
Valid for subregs which are SUBREG_PROMOTED_VAR_P(). In that case this gives the necessary extensions: 0 - signed 1 - normal unsigned -1 - pointer unsigned, which most often can be handled like unsigned extension, except for generating instructions where we need to emit special code (ptr_extend insns) on some architectures.
#define SUBREG_PROMOTED_UNSIGNED_SET | ( | RTX, | |
VAL | |||
) |
Referenced by compute_argument_block_size().
#define SUBREG_PROMOTED_VAR_P | ( | RTX | ) | (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED", (RTX), SUBREG)->in_struct) |
1 if RTX is a subreg containing a reg that is already known to be sign- or zero-extended from the mode of the subreg to the mode of the reg. SUBREG_PROMOTED_UNSIGNED_P gives the signedness of the extension.
When used as a LHS, is means that this extension must be done when assigning to SUBREG_REG.
Referenced by compute_argument_block_size(), and convert_memory_address_addr_space().
#define SUBREG_REG | ( | RTX | ) | XCEXP (RTX, 0, SUBREG) |
For a SUBREG rtx, SUBREG_REG extracts the value we want a subreg of. SUBREG_BYTE extracts the byte-number.
Referenced by check_iv_ref_table_size(), convert_memory_address_addr_space(), copy_rtx_if_shared_1(), df_bb_regno_first_def_find(), distribute_and_simplify_rtx(), find_invariants_to_move(), find_single_use(), for_each_rtx(), free_loop_data(), get_biv_step_1(), get_final_hard_regno(), insert_move_for_subreg(), invert_exp_1(), invert_jump_1(), make_extraction(), mark_pseudo_reg_dead(), match_reload(), ok_for_base_p_nonstrict(), operands_match_p(), print_value(), process_bb_node_lives(), process_reg_shuffles(), push_secondary_reload(), reg_class_from_constraints(), reg_overlap_mentioned_p(), reload_as_needed(), replace_oldest_value_addr(), resolve_subreg_use(), scompare_loc_descriptor(), set_reg_attrs_from_value(), simplify_set(), uses_hard_regs_p(), valid_address_p(), validate_simplify_insn(), and vt_stack_adjustments().
#define SWAPPABLE_OPERANDS_P | ( | X | ) |
1 if X is a commutative arithmetic operator or a comparison operator. These two are sometimes selected together because it is possible to swap the two operands.
Referenced by num_validated_changes().
#define SYMBOL_FLAG_ANCHOR (1 << 8) |
Set if this symbol is a section anchor. SYMBOL_REF_ANCHOR_P implies SYMBOL_REF_HAS_BLOCK_INFO_P.
#define SYMBOL_FLAG_EXTERNAL (1 << 6) |
Set if this symbol is not defined in this translation unit.
#define SYMBOL_FLAG_FUNCTION (1 << 0) |
These flags are common enough to be defined for all targets. They are computed by the default version of targetm.encode_section_info. Set if this symbol is a function.
#define SYMBOL_FLAG_HAS_BLOCK_INFO (1 << 7) |
Set if this symbol has a block_symbol structure associated with it.
#define SYMBOL_FLAG_LOCAL (1 << 1) |
Set if targetm.binds_local_p is true.
#define SYMBOL_FLAG_MACH_DEP (1 << SYMBOL_FLAG_MACH_DEP_SHIFT) |
#define SYMBOL_FLAG_MACH_DEP_SHIFT 9 |
Subsequent bits are available for the target to use.
#define SYMBOL_FLAG_SMALL (1 << 2) |
Set if targetm.in_small_data_p is true.
#define SYMBOL_FLAG_TLS_SHIFT 3 |
The three-bit field at [5:3] is true for TLS variables; use SYMBOL_REF_TLS_MODEL to extract the field as an enum tls_model.
#define SYMBOL_REF_ANCHOR_P | ( | RTX | ) | ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_ANCHOR) != 0) |
#define SYMBOL_REF_BLOCK | ( | RTX | ) | (BLOCK_SYMBOL_CHECK (RTX)->block) |
If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the object_block structure to which the symbol belongs, or NULL if it has not been assigned a block.
Referenced by default_select_rtx_section(), and get_variable_section().
#define SYMBOL_REF_BLOCK_OFFSET | ( | RTX | ) | (BLOCK_SYMBOL_CHECK (RTX)->offset) |
If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the offset of RTX from the first object in SYMBOL_REF_BLOCK (RTX). The value is negative if RTX has not yet been assigned to a block, or it has not been given an offset within that block.
Referenced by default_binds_local_p(), and get_variable_section().
#define SYMBOL_REF_CONSTANT | ( | RTX | ) | (CONSTANT_POOL_ADDRESS_P (RTX) ? X0CONSTANT ((RTX), 2) : NULL) |
The rtx constant pool entry for a symbol, or null.
Referenced by default_elf_select_rtx_section().
#define SYMBOL_REF_DATA | ( | RTX | ) | X0ANY ((RTX), 2) |
A pointer attached to the SYMBOL_REF; either SYMBOL_REF_DECL or SYMBOL_REF_CONSTANT.
#define SYMBOL_REF_DECL | ( | RTX | ) | (CONSTANT_POOL_ADDRESS_P (RTX) ? NULL : X0TREE ((RTX), 2)) |
The tree (decl or constant) associated with the symbol, or null.
Referenced by dwarf2out_init(), macinfo_entry_hasher::equal(), force_reg(), get_integer_term(), init_block_clear_fn(), member_declared_type(), old_insns_match_p(), and reverse_op().
#define SYMBOL_REF_EXTERNAL_P | ( | RTX | ) | ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_EXTERNAL) != 0) |
#define SYMBOL_REF_FLAG | ( | RTX | ) | (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAG", (RTX), SYMBOL_REF)->volatil) |
Used if RTX is a symbol_ref, for machine-specific purposes.
#define SYMBOL_REF_FLAGS | ( | RTX | ) | X0INT ((RTX), 1) |
A set of flags on a symbol_ref that are, in some respects, redundant with information derivable from the tree decl associated with this symbol. Except that we build a lot of SYMBOL_REFs that aren't associated with a decl. In some cases this is a bug. But beyond that, it's nice to cache this information to avoid recomputing it. Finally, this allows space for the target to store more than one bit of information, as with SYMBOL_REF_FLAG.
#define SYMBOL_REF_FUNCTION_P | ( | RTX | ) | ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_FUNCTION) != 0) |
#define SYMBOL_REF_HAS_BLOCK_INFO_P | ( | RTX | ) | ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_HAS_BLOCK_INFO) != 0) |
Referenced by shallow_copy_rtvec().
#define SYMBOL_REF_LOCAL_P | ( | RTX | ) | ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_LOCAL) != 0) |
Referenced by default_section_type_flags().
#define SYMBOL_REF_SMALL_P | ( | RTX | ) | ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_SMALL) != 0) |
#define SYMBOL_REF_TLS_MODEL | ( | RTX | ) | ((enum tls_model) ((SYMBOL_REF_FLAGS (RTX) >> SYMBOL_FLAG_TLS_SHIFT) & 7)) |
#define SYMBOL_REF_USED | ( | RTX | ) | (RTL_FLAG_CHECK1 ("SYMBOL_REF_USED", (RTX), SYMBOL_REF)->used) |
1 if RTX is a symbol_ref that has been the library function in emit_library_call.
Referenced by assemble_vtv_preinit_initializer().
#define SYMBOL_REF_WEAK | ( | RTX | ) | (RTL_FLAG_CHECK1 ("SYMBOL_REF_WEAK", (RTX), SYMBOL_REF)->return_val) |
1 if RTX is a symbol_ref for a weak symbol.
#define this_target_rtl (&default_target_rtl) |
#define top_of_stack (this_target_rtl->x_top_of_stack) |
#define TRAP_CODE | ( | RTX | ) | XCEXP (RTX, 1, TRAP_IF) |
Referenced by cond_exec_find_if_block().
#define TRAP_CONDITION | ( | RTX | ) | XCEXP (RTX, 0, TRAP_IF) |
For a TRAP_IF rtx, TRAP_CONDITION is an expression.
#define TREE_CONSTANT_POOL_ADDRESS_P | ( | RTX | ) |
1 if RTX is a symbol_ref that addresses a value in the file's tree constant pool. This information is private to varasm.c.
#define UINTVAL | ( | RTX | ) | ((unsigned HOST_WIDE_INT) INTVAL (RTX)) |
Referenced by make_extraction(), and simplify_and_const_int_1().
#define UNARY_P | ( | X | ) | (GET_RTX_CLASS (GET_CODE (X)) == RTX_UNARY) |
1 if X is a unary operator.
#define USE_LOAD_POST_DECREMENT | ( | MODE | ) | HAVE_POST_DECREMENT |
#define USE_LOAD_POST_INCREMENT | ( | MODE | ) | HAVE_POST_INCREMENT |
Some architectures do not have complete pre/post increment/decrement instruction sets, or only move some modes efficiently. These macros allow us to tune autoincrement generation.
#define USE_LOAD_PRE_DECREMENT | ( | MODE | ) | HAVE_PRE_DECREMENT |
Referenced by can_move_by_pieces().
#define USE_LOAD_PRE_INCREMENT | ( | MODE | ) | HAVE_PRE_INCREMENT |
#define USE_STORE_POST_DECREMENT | ( | MODE | ) | HAVE_POST_DECREMENT |
#define USE_STORE_POST_INCREMENT | ( | MODE | ) | HAVE_POST_INCREMENT |
#define USE_STORE_PRE_DECREMENT | ( | MODE | ) | HAVE_PRE_DECREMENT |
#define USE_STORE_PRE_INCREMENT | ( | MODE | ) | HAVE_PRE_INCREMENT |
#define VAR_LOC_UNKNOWN_P | ( | X | ) | (GET_CODE (X) == CLOBBER && XEXP ((X), 0) == const0_rtx) |
Determine whether X is such an unknown location.
Referenced by print_insn().
#define VIRTUAL_CFA_REGNUM ((FIRST_VIRTUAL_REGISTER) + 4) |
Referenced by emit_barrier().
#define virtual_cfa_rtx (global_rtl[GR_VIRTUAL_CFA]) |
This points to the Canonical Frame Address of the function. This should correspond to the CFA produced by INCOMING_FRAME_SP_OFFSET, but is calculated relative to the arg pointer for simplicity; the frame pointer nor stack pointer are necessarily fixed relative to the CFA until after reload.
Referenced by emit_barrier().
#define VIRTUAL_INCOMING_ARGS_REGNUM (FIRST_VIRTUAL_REGISTER) |
Referenced by emit_barrier().
#define virtual_incoming_args_rtx (global_rtl[GR_VIRTUAL_INCOMING_ARGS]) |
This points to the first word of the incoming arguments passed on the stack, either by the caller or by the callee when pretending it was passed by the caller.
Referenced by emit_barrier(), and get_hard_reg_initial_val().
#define VIRTUAL_OUTGOING_ARGS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 3) |
Referenced by emit_barrier().
#define virtual_outgoing_args_rtx (global_rtl[GR_VIRTUAL_OUTGOING_ARGS]) |
This points to the location in the stack at which outgoing arguments should be written when the stack is pre-pushed (arguments pushed using push insns always use sp).
Referenced by emit_barrier(), emit_library_call_value_1(), and emit_move_insn_1().
#define VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM ((FIRST_VIRTUAL_REGISTER) + 5) |
Referenced by emit_barrier().
#define virtual_preferred_stack_boundary_rtx (global_rtl[GR_VIRTUAL_PREFERRED_STACK_BOUNDARY]) |
This is replaced by crtl->preferred_stack_boundary / BITS_PER_UNIT when finalized.
Referenced by emit_barrier().
#define VIRTUAL_STACK_DYNAMIC_REGNUM ((FIRST_VIRTUAL_REGISTER) + 2) |
Referenced by emit_barrier().
#define virtual_stack_dynamic_rtx (global_rtl[GR_VIRTUAL_STACK_DYNAMIC]) |
This points to the location of dynamically-allocated memory on the stack immediately after the stack pointer has been adjusted by the amount desired.
Referenced by emit_barrier().
#define VIRTUAL_STACK_VARS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 1) |
Referenced by emit_barrier().
#define virtual_stack_vars_rtx (global_rtl[GR_VIRTUAL_STACK_ARGS]) |
If FRAME_GROWS_DOWNWARD, this points to immediately above the first variable on the stack. Otherwise, it points to the first variable on the stack.
Referenced by emit_barrier(), and expand_stack_vars().
#define X0ADVFLAGS | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_addr_diff_vec_flags) |
#define X0ANY | ( | RTX, | |
N | |||
) | RTL_CHECK1 (RTX, N, '0') |
Access a '0' field with any type.
#define X0BBDEF | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_bb) |
#define X0CONSTANT | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_constant) |
#define X0CSELIB | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_cselib) |
#define X0EXP | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_rtx) |
#define X0INT | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_int) |
These are like XINT, etc. except that they expect a '0' field instead of the normal type code.
#define X0MEMATTR | ( | RTX, | |
N | |||
) | (RTL_CHECKC1 (RTX, N, MEM).rt_mem) |
#define X0MODE | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_type) |
#define X0REGATTR | ( | RTX, | |
N | |||
) | (RTL_CHECKC1 (RTX, N, REG).rt_reg) |
#define X0STR | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_str) |
#define X0TREE | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_tree) |
#define X0UINT | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_uint) |
#define X0VEC | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, '0').rt_rtvec) |
#define XBBDEF | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, 'B').rt_bb) |
#define XC2EXP | ( | RTX, | |
N, | |||
C1, | |||
C2 | |||
) | (RTL_CHECKC2 (RTX, N, C1, C2).rt_rtx) |
#define XCBBDEF | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_bb) |
#define XCCFI | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_cfi) |
#define XCCSELIB | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_cselib) |
#define XCEXP | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_rtx) |
#define XCFI | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, 'C').rt_cfi) |
#define XCINT | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_int) |
#define XCMODE | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_type) |
#define XCNMPFV | ( | RTX, | |
C, | |||
M | |||
) | (&(RTX)->u.fv) |
#define XCNMPRV | ( | RTX, | |
C, | |||
M | |||
) | (&(RTX)->u.rv) |
#define XCSTR | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_str) |
#define XCTREE | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_tree) |
#define XCUINT | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_uint) |
#define XCVEC | ( | RTX, | |
N, | |||
C | |||
) | (RTL_CHECKC1 (RTX, N, C).rt_rtvec) |
#define XCVECLEN | ( | RTX, | |
N, | |||
C | |||
) | GET_NUM_ELEM (XCVEC (RTX, N, C)) |
#define XEXP | ( | RTX, | |
N | |||
) | (RTL_CHECK2 (RTX, N, 'e', 'u').rt_rtx) |
Referenced by add_equal_note(), add_int_reg_note(), add_mode_tests(), add_name_attribute(), address_of_int_loc_descriptor(), adjust_decomposed_uses(), adjust_offset_for_component_ref(), alignment_for_piecewise_move(), allocate_struct_function(), alter_predicate_for_insn(), alter_reg(), array_ref_element_size(), assemble_asm(), assign_parm_setup_reg(), assign_stack_slot_num_and_sort_pseudos(), attr_alt_subset_of_compl_p(), attr_copy_rtx(), attr_hash_add_string(), attr_rtx_1(), cached_num_sign_bit_copies(), can_reload_into(), canon_condition(), canon_rtx(), check_argument_store(), check_defs(), combine_stack_adjustments(), combine_temp_slots(), compress_float_constant(), compute_argument_addresses(), compute_const_anchors(), compute_local_properties(), compute_store_table(), concat_INSN_LIST(), cond_exec_find_if_block(), contains_replace_regs(), convert_memory_address_addr_space(), copy_boolean(), copy_rtx_if_shared_1(), count_alternatives(), count_reg_usage(), covers_regno_no_parallel_p(), covers_regno_p(), cselib_invalidate_regno(), cselib_record_sets(), dataflow_set_destroy(), dead_or_set_regno_p(), decl_by_reference_p(), decl_for_component_ref(), decode_asm_operands(), determine_common_wider_type(), df_find_hard_reg_defs_1(), df_read_modify_subreg_p(), df_simulate_uses(), df_word_lr_add_problem(), df_word_lr_bottom_dump(), df_word_lr_top_dump(), do_output_reload(), drop_overlapping_mem_locs(), dump_prediction(), dv_changed_p(), dwarf2out_flush_queued_reg_saves(), eliminate_constant_term(), emit_barrier_after_bb(), emit_call_insn_before(), emit_call_insn_before_setloc(), emit_group_load_1(), emit_notes_for_differences_2(), equiv_init_varies_p(), expansion_failed(), find_call_stack_args(), find_comparison_args(), find_dead_or_set_registers(), find_implicit_sets(), find_list_elem(), find_loads(), find_moveable_store(), find_reg_equal_equiv_note(), find_reg_note(), find_reloads_toplev(), find_single_use(), find_tune_attr(), for_each_eh_label(), force_reg(), free_csa_reflist(), free_hash_table(), free_INSN_LIST_list(), free_INSN_LIST_node(), gcse_emit_move_after(), gen_addr_rtx(), gen_attr(), gen_formal_parameter_die(), gen_insn(), gen_int_to_fp_nondecimal_conv_libfunc(), gen_lowpart_for_combine(), gen_lowpart_if_possible(), get_attr_value(), get_call_rtx_from(), get_eh_region_from_rtx(), get_reg_known_equiv_p(), get_reg_known_value(), hard_reg_set_here_p(), hash_rtx(), in_list_p(), init_block_clear_fn(), init_label_info(), init_num_sign_bit_copies_in_rep(), initialize_argument_information(), insert_store(), insert_temp_slot_address(), invert_jump(), iv_analysis_done(), kill_set_value(), lookup_type_die_strip_naming_typedef(), lra_emit_add(), make_extraction(), make_memloc(), make_note_raw(), may_trap_p(), maybe_memory_address_addr_space_p(), maybe_propagate_label_ref(), mem_overlaps_already_clobbered_arg_p(), memref_referenced_p(), merge_dir(), merge_outer_ops(), move2add_use_add3_insn(), move_block_to_reg(), next_active_insn(), noce_emit_store_flag(), nonimmediate_operand(), note_reg_elim_costly(), note_stores(), notice_stack_pointer_modification_1(), num_changes_pending(), num_validated_changes(), offset_within_block_p(), old_insns_match_p(), one_code_hoisting_pass(), operands_match_p(), oprs_unchanged_p(), optimize_attrs(), prev_nonnote_insn(), prev_nonnote_insn_bb(), previous_insn(), print_value(), process_alt_operands(), read_mapping(), recog_for_combine(), record_last_set_info(), record_store(), record_truncated_values(), redirect_jump(), redirect_jump_1(), referenced_in_one_insn_in_loop_p(), reg_loc_descriptor(), reg_mentioned_p(), reg_overlap_mentioned_p(), reg_saved_in(), reload_combine_closest_single_use(), reload_combine_note_store(), remove_child_TAG(), remove_child_with_prev(), remove_free_INSN_LIST_elem(), remove_reg_equal_offset_note(), remove_unreachable_eh_regions_worker(), replace_label(), reset_active_stores(), reset_debug_uses_in_loop(), reset_insn_used_flags(), reverse_op(), rtx_addr_varies_p(), rtx_equal_for_memref_p(), rtx_equal_p_cb(), rtx_for_function_call(), rtx_unstable_p(), rtx_varies_p(), save_call_clobbered_regs(), scan_paradoxical_subregs(), scompare_loc_descriptor(), set_label_offsets(), set_mem_alias_set(), set_mem_align(), set_nonzero_bits_and_sign_copies(), set_reg_attrs_from_value(), set_storage_via_setmem(), setup_elimination_map(), simplify_byte_swapping_operation(), simplify_relational_operation_1(), simplify_replace_rtx(), simplify_set(), simplify_shift_const(), simplify_truncation(), simplify_unary_operation_1(), simplify_while_replacing(), spill_hard_reg(), store_killed_in_pat(), subreg_lowpart_offset(), subst_pattern_match(), subst_reloads(), try_back_substitute_reg(), try_widen_shift_mode(), unique_base_value(), update_auto_inc_notes(), validate_change(), validize_mem(), var_reg_decl_set(), var_regno_delete(), volatile_refs_p(), vt_get_canonicalize_base(), vt_stack_adjustments(), walk_attr_value(), and write_header().
#define XINT | ( | RTX, | |
N | |||
) | (RTL_CHECK2 (RTX, N, 'i', 'n').rt_int) |
Referenced by adjust_operands_numbers(), attr_copy_rtx(), canon_rtx(), find_first_unused_number_of_operand(), fixup_new_cold_bb(), gen_rtx_scratch(), get_last_bb_insn(), make_pass_into_cfg_layout_mode(), mark_operands_from_match_dup(), output_added_clobbers_hard_reg_p(), output_get_insn_name(), prev_nondebug_insn(), simplify_or_tree(), and subst_pattern_match().
#define XMODE | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, 'M').rt_type) |
#define XSTR | ( | RTX, | |
N | |||
) | (RTL_CHECK2 (RTX, N, 's', 'S').rt_str) |
Referenced by add_define_attr(), add_name_attribute(), alter_constraints(), alter_test_for_insn(), asm_noperands(), assemble_asm(), attr_copy_rtx(), canon_rtx(), change_subst_attribute(), check_attr_test(), check_defs(), compute_alternative_mask(), count_alternatives(), create_node(), find_first_unused_number_of_operand(), find_int(), find_tune_attr(), gen_attr(), gen_automata_option(), gen_bypass(), gen_insn(), gen_mnemonic_setattr(), gen_satfractuns_conv_libfunc(), get_reg_known_value(), hard_reg_set_here_p(), invert_jump(), n_occurrences(), n_sep_els(), output_added_clobbers_hard_reg_p(), print_value(), read_mapping(), rtx_equal_p_cb(), splay_tree_compare_strings(), subst_pattern_match(), valid_predicate_name_p(), and write_const_num_delay_slots().
#define XTMPL | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, 'T').rt_str) |
Referenced by process_define_cond_exec().
#define XTREE | ( | RTX, | |
N | |||
) | (RTL_CHECK1 (RTX, N, 't').rt_tree) |
#define XUINT | ( | RTX, | |
N | |||
) | (RTL_CHECK2 (RTX, N, 'i', 'n').rt_uint) |
#define XVEC | ( | RTX, | |
N | |||
) | (RTL_CHECK2 (RTX, N, 'E', 'V').rt_rtvec) |
Referenced by alter_predicate_for_insn(), alter_reg(), asm_noperands(), assign_stack_slot_num_and_sort_pseudos(), attr_copy_rtx(), canon_rtx(), check_defs(), check_tune_attr(), clobber_return_register(), collect_one_action_chain(), decompose_register(), df_read_modify_subreg_p(), diagnostic_for_asm(), dwarf2out_frame_debug_adjust_cfa(), dwarf2out_frame_debug_cfa_window_save(), emit_move_insn(), find_tune_attr(), finish_reg_equiv(), fprint_whex(), gcse_emit_move_after(), gen_insn(), gen_label_rtx(), gen_mnemonic_setattr(), gen_split(), get_attr_value(), get_eh_region_and_lp_from_rtx(), hash_rtx(), initialize_argument_information(), insert_insn_end_basic_block(), insn_invalid_p(), max_fn(), memory_operand(), move_block_to_reg(), multiple_sets(), oprs_unchanged_p(), optimize_attrs(), ordered_comparison_operator(), output_added_clobbers_hard_reg_p(), previous_insn(), process_bb_node_lives(), record_hard_reg_sets(), reg_mentioned_p(), reg_overlap_mentioned_p(), remove_reg_equal_equiv_notes_for_regno(), replace_label(), rtx_addr_varies_p(), save_call_clobbered_regs(), set_nonzero_bits_and_sign_copies(), set_reg_attrs_from_value(), simplejump_p(), single_set_2(), store_killed_in_pat(), strict_memory_address_addr_space_p(), subst_pattern_match(), tablejump_p(), try_combine(), update_cfg_for_uncondjump(), walk_attr_value(), and write_header().
#define XVECLEN | ( | RTX, | |
N | |||
) | GET_NUM_ELEM (XVEC (RTX, N)) |
Referenced by adjust_mem_uses(), alter_predicate_for_insn(), alter_reg(), asm_noperands(), assign_stack_slot_num_and_sort_pseudos(), attr_copy_rtx(), canon_rtx(), check_defs(), clobber_return_register(), decompose_register(), df_read_modify_subreg_p(), dwarf2out_frame_debug_cfa_window_save(), emit_move_insn(), finish_reg_equiv(), gcse_emit_move_after(), gen_insn(), gen_label_rtx(), gen_mnemonic_setattr(), gen_split(), get_eh_region_and_lp_from_rtx(), hash_rtx(), insert_insn_end_basic_block(), max_fn(), multiple_sets(), oprs_unchanged_p(), optimize_attrs(), output_added_clobbers_hard_reg_p(), process_bb_node_lives(), record_hard_reg_sets(), reg_mentioned_p(), remove_reg_equal_equiv_notes_for_regno(), replace_label(), set_nonzero_bits_and_sign_copies(), set_reg_attrs_from_value(), single_set_2(), store_killed_in_pat(), subst_pattern_match(), update_cfg_for_uncondjump(), walk_attr_value(), and write_header().
Referenced by attr_copy_rtx().
Callback for for_each_inc_dec, to process the autoinc operation OP within MEM that sets DEST to SRC + SRCOFF, or SRC if SRCOFF is NULL. The callback is passed the same opaque ARG passed to for_each_inc_dec. Return zero to continue looking for other autoinc operations, -1 to skip OP's operands, and any other value to interrupt the traversal and return that value to the caller of for_each_inc_dec.
Structure used to describe the attributes of a MEM. These are hashed so MEMs that the same attributes share a data structure. This means they cannot be modified in place.
Structure used to describe the attributes of a REG in similar way as mem_attrs does for MEM above. Note that the OFFSET field is calculated in the same way as for mem_attrs, rather than in the same way as a SUBREG_BYTE. For example, if a big-endian target stores a byte object in the low part of a 4-byte register, the OFFSET field will be -3 rather than 0.
typedef struct replace_label_data replace_label_data |
Structure used for passing data to REPLACE_LABEL.
typedef union rtunion_def rtunion |
typedef int(* rtx_function)(rtx *, void *) |
enum global_rtl_index |
Index labels for global_rtl.
enum insn_note |
Codes that appear in the NOTE_KIND field for kinds of notes that are not line numbers. These codes are all negative.
Notice that we do not try to use zero here for any of the special note codes because sometimes the source line actually can be zero! This happens (for example) when we are generating code for the per-translation-unit constructor and destructor routines for some C++ translation unit.
enum label_kind |
enum libcall_type |
In calls.c
enum reg_note |
enum rtx_class |
Similar, but since generator files get more entries... Register Transfer Language EXPRESSIONS CODE CLASSES
enum rtx_code |
DEF_RTL_EXPR |
This file contains the definitions and documentation for the Register Transfer Expressions (rtx's) that make up the Register Transfer Language (rtl) used in the Back End of the GNU compiler. Copyright (C) 1987-2013 Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/. Expression definitions and descriptions for all targets are in this file. Some will not be used for some targets. The fields in the cpp macro call "DEF_RTL_EXPR()" are used to create declarations in the C source of the compiler. The fields are:
All of the expressions that appear only in machine descriptions, not in RTL used by the compiler itself, are at the end of the file. Unknown, or no such operation; the enumeration constant should have value zero. |
DEF_RTL_EXPR |
This file contains the definitions and documentation for the Register Transfer Expressions (rtx's) that make up the Register Transfer Language (rtl) used in the Back End of the GNU compiler. Copyright (C) 1987-2013 Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/. Expression definitions and descriptions for all targets are in this file. Some will not be used for some targets. The fields in the cpp macro call "DEF_RTL_EXPR()" are used to create declarations in the C source of the compiler. The fields are:
All of the expressions that appear only in machine descriptions, not in RTL used by the compiler itself, are at the end of the file. Unknown, or no such operation; the enumeration constant should have value zero. |
void _fatal_insn | ( | const char * | , |
const_rtx | , | ||
const char * | , | ||
int | , | ||
const char * | |||
) |
The above incremented error_count, but isn't an error that we want to count, so reset it here.
void _fatal_insn_not_found | ( | const_rtx | , |
const char * | , | ||
int | , | ||
const char * | |||
) |
int active_insn_p | ( | const_rtx | ) |
void add_insn | ( | rtx | ) |
void add_insn_after | ( | rtx | , |
rtx | , | ||
basic_block | |||
) |
void add_insn_before | ( | rtx | , |
rtx | , | ||
basic_block | |||
) |
void add_int_reg_note | ( | rtx | , |
enum | reg_note, | ||
int | |||
) |
Referenced by next_real_insn().
int address_cost | ( | rtx | , |
enum | machine_mode, | ||
addr_space_t | , | ||
bool | |||
) |
Referenced by find_defs(), and reload_combine_closest_single_use().
Referenced by use_crosses_set_p().
int any_condjump_p | ( | const_rtx | ) |
int any_uncondjump_p | ( | const_rtx | ) |
int asm_noperands | ( | const_rtx | ) |
int asm_str_count | ( | const char * | templ | ) |
rtx assign_stack_local | ( | enum | machine_mode, |
HOST_WIDE_INT | , | ||
int | |||
) |
In function.c
rtx assign_stack_local_1 | ( | enum machine_mode | mode, |
HOST_WIDE_INT | size, | ||
int | align, | ||
int | kind | ||
) |
Allocate a stack slot of SIZE bytes and return a MEM rtx for it with machine mode MODE.
ALIGN controls the amount of alignment for the address of the slot: 0 means according to MODE, -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, -2 means use BITS_PER_UNIT, positive specifies alignment boundary in bits.
KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce alignment and ASLK_RECORD_PAD bit set if we should remember extra space we allocated for alignment purposes. When we are called from assign_stack_temp_for_type, it is not set so we don't track the same stack slot in two independent lists.
We do not round to stack_boundary here.
Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.
If stack is realigned and stack alignment value hasn't been finalized, it is OK not to increase stack_alignment_estimated. The bigger alignment requirement is recorded in stack_alignment_needed below.
It is OK to reduce the alignment as long as the requested size is 0 or the estimated stack alignment >= mode alignment.
On a big-endian machine, if we are allocating more space than we will use, use the least significant bytes of those that are allocated.
If we have already instantiated virtual registers, return the actual address relative to the frame pointer.
rtx assign_stack_temp | ( | enum | machine_mode, |
HOST_WIDE_INT | |||
) |
rtx assign_stack_temp_for_type | ( | enum machine_mode | mode, |
HOST_WIDE_INT | size, | ||
tree | type | ||
) |
Allocate a temporary stack slot and record it for possible later reuse.
MODE is the machine mode to be given to the returned rtx.
SIZE is the size in units of the space required. We do no rounding here since assign_stack_local will do any required rounding.
TYPE is the type that will be used for the stack slot.
If SIZE is -1 it means that somebody tried to allocate a temporary of a variable size.
Try to find an available, already-allocated temporary of the proper mode which meets the size and alignment requirements. Choose the smallest one with the closest alignment. If assign_stack_temp is called outside of the tree->rtl expansion, we cannot reuse the stack slots (that may still refer to VIRTUAL_STACK_VARS_REGNUM).
Make our best, if any, the one to use.
If there are enough aligned bytes left over, make them into a new temp_slot so that the extra bytes don't get wasted. Do this only for BLKmode slots, so that we can be sure of the alignment.
If we still didn't find one, make a new temporary.
We are passing an explicit alignment request to assign_stack_local. One side effect of that is assign_stack_local will not round SIZE to ensure the frame offset remains suitably aligned. So for requests which depended on the rounding of SIZE, we go ahead and round it now. We also make sure ALIGNMENT is at least BIGGEST_ALIGNMENT.
The following slot size computation is necessary because we don't know the actual size of the temporary slot until assign_stack_local has performed all the frame alignment and size rounding for the requested temporary. Note that extra space added for alignment can be either above or below this stack slot depending on which way the frame grows. We include the extra space if and only if it is above this slot.
Now define the fields used by combine_temp_slots.
Create a new MEM rtx to avoid clobbering MEM flags of old slots.
If we know the alias set for the memory that will be used, use it. If there's no TYPE, then we don't know anything about the alias set for the memory.
If a type is specified, set the relevant flags.
Assign a temporary. If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl and so that should be used in error messages. In either case, we allocate of the given type. MEMORY_REQUIRED is 1 if the result must be addressable stack memory; it is 0 if a register is OK. DONT_PROMOTE is 1 if we should not promote values in register to wider modes.
Zero sized arrays are GNU C extension. Set size to 1 to avoid problems with allocating the stack space.
Unfortunately, we don't yet know how to allocate variable-sized temporaries. However, sometimes we can find a fixed upper limit on the size, so try that instead.
The size of the temporary may be too large to fit into an integer.
??? Not sure this should happen except for user silliness, so limit this to things that aren't compiler-generated temporaries. The rest of the time we'll die in assign_stack_temp_for_type.
References avail_temp_slots, temp_slot::base_offset, cut_slot_from_list(), temp_slot::full_size, GET_MODE, temp_slot::next, temp_slot::size, and temp_slot::slot.
Referenced by expand_value_return(), and initialize_argument_information().
int auto_inc_p | ( | const_rtx | ) |
Referenced by for_each_rtx(), and simplify_relational_operation_1().
int byte_lowpart_offset | ( | enum machine_mode | outer_mode, |
enum machine_mode | inner_mode | ||
) |
Return the number of bytes between the start of an OUTER_MODE in-memory value and the start of an INNER_MODE in-memory value, given that the former is a lowpart of the latter. It may be a paradoxical lowpart, in which case the offset will be negative on big-endian targets.
Referenced by set_reg_attrs_from_value().
int canon_anti_dependence | ( | const_rtx | mem, |
bool | mem_canonicalized, | ||
const_rtx | x, | ||
enum machine_mode | x_mode, | ||
rtx | x_addr | ||
) |
Likewise, but we already have a canonicalized MEM, and X_ADDR for X. Also, consider X in X_MODE (which might be from an enclosing STRICT_LOW_PART / ZERO_EXTRACT). If MEM_CANONICALIZED is true, MEM is canonicalized.
Referenced by cselib_invalidate_regno().
int canon_true_dependence | ( | const_rtx | mem, |
enum machine_mode | mem_mode, | ||
rtx | mem_addr, | ||
const_rtx | x, | ||
rtx | x_addr | ||
) |
Canonical true dependence: X is read after store in MEM takes place. Variant of true_dependence which assumes MEM has already been canonicalized (hence we no longer do that here). The mem_addr argument has been added, since true_dependence_1 computed this value prior to canonicalizing.
rtx canonicalize_condition | ( | rtx | insn, |
rtx | cond, | ||
int | reverse, | ||
rtx * | earliest, | ||
rtx | want_reg, | ||
int | allow_cc_mode, | ||
int | valid_at_insn_p | ||
) |
Given an insn and condition, return a canonical description of the test being made.
Given an insn INSN and condition COND, return the condition in a canonical form to simplify testing by callers. Specifically:
(1) The code will always be a comparison operation (EQ, NE, GT, etc.). (2) Both operands will be machine operands; (cc0) will have been replaced. (3) If an operand is a constant, it will be the second operand. (4) (LE x const) will be replaced with (LT x <const+1>) and similarly for GE, GEU, and LEU.
If the condition cannot be understood, or is an inequality floating-point comparison which needs to be reversed, 0 will be returned.
If REVERSE is nonzero, then reverse the condition prior to canonizing it.
If EARLIEST is nonzero, it is a pointer to a place where the earliest insn used in locating the condition was found. If a replacement test of the condition is desired, it should be placed in front of that insn and we will be sure that the inputs are still valid.
If WANT_REG is nonzero, we wish the condition to be relative to that register, if possible. Therefore, do not canonicalize the condition further. If ALLOW_CC_MODE is nonzero, allow the condition returned to be a compare to a CC mode register.
If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST and at INSN.
If we are comparing a register with zero, see if the register is set in the previous insn to a COMPARE or a comparison operation. Perform the same tests as a function of STORE_FLAG_VALUE as find_comparison_args in cse.c
Set nonzero when we find something of interest.
If this is a COMPARE, pick up the two things being compared.
Go back to the previous insn. Stop if it is not an INSN. We also stop if it isn't a single set or if it has a REG_INC note because we don't want to bother dealing with it.
In cfglayout mode, there do not have to be labels at the beginning of a block, or jumps at the end, so the previous conditions would not stop us when we reach bb boundary.
If this is setting OP0, get what it sets it to if it looks relevant.
??? We may not combine comparisons done in a CCmode with comparisons not done in a CCmode. This is to aid targets like Alpha that have an IEEE compliant EQ instruction, and a non-IEEE compliant BEQ instruction. The use of CCmode is actually artificial, simply to prevent the combination, but should not affect other platforms. However, we must allow VOIDmode comparisons to match either CCmode or non-CCmode comparison, because some ports have modeless comparisons inside branch patterns. ??? This mode check should perhaps look more like the mode check in simplify_comparison in combine.
If this sets OP0, but not directly, we have to give up.
If the caller is expecting the condition to be valid at INSN, make sure X doesn't change before INSN.
If constant is first, put it last.
If OP0 is the result of a comparison, we weren't able to find what was really being compared, so fail.
Canonicalize any ordered comparison with integers involving equality if we can do computations in the relevant mode and we do not overflow.
When cross-compiling, const_val might be sign-extended from BITS_PER_WORD to HOST_BITS_PER_WIDE_INT
Never return CC0; return zero instead.
enum machine_mode choose_hard_reg_mode | ( | unsigned int | regno, |
unsigned int | nregs, | ||
bool | call_saved | ||
) |
In reginfo.c
Return a machine mode that is legitimate for hard reg REGNO and large enough to save nregs. If we can't find one, return VOIDmode. If CALL_SAVED is true, only consider modes that are call saved.
We first look for the largest integer mode that can be validly held in REGNO. If none, we look for the largest floating-point mode. If we still didn't find a valid mode, try CCmode.
Iterate over all of the CCmodes.
We can't find a mode valid for this register.
Referenced by expand_builtin_dwarf_sp_column().
int commutative_operand_precedence | ( | rtx | ) |
Prepare a sequence comparing OP0 with OP1 using COMP and jumping to LABEL if true, with probability PROB. If CINSN is not NULL, it is the insn to copy in order to create a jump.
A hack – there seems to be no easy generic way how to make a conditional jump from a ccmode comparison.
int comparison_dominates_p | ( | enum | rtx_code, |
enum | rtx_code | ||
) |
In jump.c
unsigned int compute_alignments | ( | void | ) |
In final.c
Compute branch alignments based on frequency information in the CFG.
If not optimizing or optimizing for size, don't assign any alignments.
There are two purposes to align block with no fallthru incoming edge: 1) to avoid fetch stalls when branch destination is near cache boundary 2) to improve cache efficiency in case the previous block is not executed (so it does not need to be in the cache). We to catch first case, we align frequently executed blocks. To catch the second, we align blocks that are executed more frequently than the predecessor and the predecessor is likely to not be executed when function is called.
In case block is frequent and reached mostly by non-fallthru edge, align it. It is most likely a first block of loop.
References EDGE_FREQUENCY, and edge_def::flags.
int computed_jump_p | ( | const_rtx | ) |
Referenced by default_fixed_point_supported_p(), and reorder_basic_blocks().
int condjump_in_parallel_p | ( | const_rtx | ) |
int condjump_p | ( | const_rtx | ) |
rtx convert_memory_address_addr_space | ( | enum machine_mode | to_mode, |
rtx | x, | ||
addr_space_t | as | ||
) |
Given X, a memory address in address space AS' pointer mode, convert it to an address in the address space's address mode, or vice versa (TO_MODE says which way). We take advantage of the fact that pointers are not allowed to overflow by commuting arithmetic operations over conversions so that address arithmetic insns can be used.
References CASE_CONST_SCALAR_INT, CONST_INT_P, convert_memory_address_addr_space(), GET_CODE, GET_MODE, GET_MODE_SIZE, LABEL_REF_NONLOCAL_P, PUT_MODE, REG_POINTER, shallow_copy_rtx, simplify_unary_operation(), SUBREG_PROMOTED_VAR_P, SUBREG_REG, and XEXP.
Referenced by addr_expr_of_non_mem_decl_p_1(), and convert_memory_address_addr_space().
In emit-rtl.c
|
inlinestatic |
Increase both members of the full_rtx_costs structure C by the cost of N insns.
Referenced by move2add_use_add3_insn().
|
inlinestatic |
Compare two full_rtx_costs structures A and B, returning true if A < B when optimizing for speed.
Referenced by move2add_use_add3_insn().
location_t curr_insn_location | ( | void | ) |
Get current location.
Referenced by mark_label_nuses().
Referenced by try_combine(), and use_crosses_set_p().
int dead_or_set_regno_p | ( | const_rtx | , |
unsigned | int | ||
) |
Referenced by note_uses(), and use_crosses_set_p().
void debug | ( | const rtx_def & | ref | ) |
void debug | ( | const rtx_def * | ptr | ) |
void debug_rtx | ( | const_rtx | ) |
void debug_rtx_list | ( | const_rtx | , |
int | |||
) |
void decide_function_section | ( | tree | ) |
const char* decode_asm_operands | ( | rtx | body, |
rtx * | operands, | ||
rtx ** | operand_locs, | ||
const char ** | constraints, | ||
enum machine_mode * | modes, | ||
location_t * | loc | ||
) |
Assuming BODY is an insn body that uses ASM_OPERANDS, copy its operands (both input and output) into the vector OPERANDS, the locations of the operands within the insn into the vector OPERAND_LOCS, and the constraints for the operands into CONSTRAINTS. Write the modes of the operands into MODES. Return the assembler-template.
If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0, we don't store that info.
Zero output asm: BODY is (asm_operands ...).
Single output asm: BODY is (set OUTPUT (asm_operands ...)).
The output is in the SET. Its constraint is in the ASM_OPERANDS itself.
At least one output, plus some CLOBBERs. The outputs are in the SETs. Their constraints are in the ASM_OPERANDS itself.
References address_operand(), asm_operand_ok(), CONST_DOUBLE_AS_FLOAT_P, GET_CODE, GET_MODE, GET_MODE_CLASS, MEM_P, memory_operand(), NULL, offsettable_nonstrict_memref_p(), TARGET_MEM_CONSTRAINT, and XEXP.
void decompose_address | ( | struct address_info * | info, |
rtx * | loc, | ||
enum machine_mode | mode, | ||
addr_space_t | as, | ||
enum rtx_code | outer_code | ||
) |
Describe address *LOC in *INFO. MODE is the mode of the addressed value, or VOIDmode if not known. AS is the address space associated with LOC. OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise.
Referenced by lsb_bitfield_op_p().
void decompose_lea_address | ( | struct address_info * | , |
rtx * | |||
) |
Referenced by valid_address_p().
void decompose_mem_address | ( | struct address_info * | , |
rtx | |||
) |
Referenced by valid_address_p().
void delete_dead_jumptables | ( | void | ) |
In cfgcleanup.c
Delete any jump tables never referenced. We can't delete them at the time of removing tablejump insn as they are referenced by the preceding insns computing the destination, so we delay deleting and garbagecollect them once life information is computed.
A dead jump table does not belong to any basic block. Scan insns between two adjacent basic blocks.
void delete_insn | ( | rtx | ) |
void delete_insn_and_edges | ( | rtx | ) |
void delete_insns_since | ( | rtx | ) |
void dump_combine_stats | ( | FILE * | ) |
void dump_combine_total_stats | ( | FILE * | ) |
void dump_insn_slim | ( | FILE * | , |
const_rtx | |||
) |
Referenced by lra_inheritance().
Same as above, but stop at LAST or when COUNT == 0. If COUNT < 0 it will stop only at LAST or NULL rtx.
References FOR_BB_INSNS, pp_bar, pp_write_text_as_dot_label_to_stream(), pp_write_text_to_stream(), and print_insn_with_notes().
Referenced by lra_get_insn_regs().
void dump_rtx_statistics | ( | void | ) |
References GET_CODE, GET_RTX_FORMAT, GET_RTX_NAME, internal_error(), and trim_filename().
void dump_value_slim | ( | FILE * | , |
const_rtx | , | ||
int | |||
) |
Functions in sched-vis.c. FIXME: Ideally these functions would not be in sched-vis.c but in rtl.c, because they are not only used by the scheduler anymore but for all "slim" RTL dumping.
Referenced by valid_address_p().
int eh_returnjump_p | ( | rtx | ) |
rtx emit_barrier | ( | void | ) |
Make an insn of code BARRIER and add it to the end of the doubly-linked list.
References VIRTUAL_CFA_REGNUM, virtual_cfa_rtx, VIRTUAL_INCOMING_ARGS_REGNUM, virtual_incoming_args_rtx, VIRTUAL_OUTGOING_ARGS_REGNUM, virtual_outgoing_args_rtx, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM, virtual_preferred_stack_boundary_rtx, VIRTUAL_STACK_DYNAMIC_REGNUM, virtual_stack_dynamic_rtx, VIRTUAL_STACK_VARS_REGNUM, and virtual_stack_vars_rtx.
Referenced by emit_debug_insn(), expand_float(), and std_expand_builtin_va_start().
rtx emit_insn_after_noloc | ( | rtx | , |
rtx | , | ||
basic_block | |||
) |
void emit_insn_at_entry | ( | rtx | ) |
In emit-rtl.c
rtx emit_insn_before_noloc | ( | rtx | , |
rtx | , | ||
basic_block | |||
) |
void emit_jump | ( | rtx | ) |
Referenced by do_compare_rtx_and_jump(), and do_jump_by_parts_zero_rtx().
void emit_library_call | ( | rtx | orgfun, |
enum libcall_type | fn_type, | ||
enum machine_mode | outmode, | ||
int | nargs, | ||
... | |||
) |
Output a library call to function FUN (a SYMBOL_REF rtx) (emitting the queue unless NO_QUEUE is nonzero), for a value of mode OUTMODE, with NARGS different arguments, passed as alternating rtx values and machine_modes to convert them to.
FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for other types of library calls.
Referenced by maybe_emit_sync_lock_test_and_set().
rtx emit_library_call_value | ( | rtx | orgfun, |
rtx | value, | ||
enum libcall_type | fn_type, | ||
enum machine_mode | outmode, | ||
int | nargs, | ||
... | |||
) |
Like emit_library_call except that an extra argument, VALUE, comes second and says where to store the result. (If VALUE is zero, this function chooses a convenient way to return the value.
This function returns an rtx for where the value is to be found. If VALUE is nonzero, VALUE is returned.
rtx emit_note | ( | enum | insn_note | ) |
void end_alias_analysis | ( | void | ) |
Referenced by pre_insert_copies().
void end_sequence | ( | void | ) |
After emitting to a sequence, restore previous saved state.
To get the contents of the sequence just made, you must call `get_insns' before calling here.
If the compiler might have deferred popping arguments while generating this sequence, and this sequence will not be immediately inserted into the instruction stream, use do_pending_stack_adjust before calling get_insns. That will ensure that the deferred pops are inserted into this sequence, and not into some random location in the instruction stream. See INHIBIT_DEFER_POP for more information about deferred popping of arguments.
Referenced by emit_insn_before_setloc(), expand_atomic_load(), expand_builtin_sincos(), expand_ffs(), gmalloc(), init_set_costs(), noce_emit_store_flag(), noce_try_abs(), noce_try_addcc(), record_insns(), sjlj_assign_call_site_values(), and split_iv().
rtx entry_of_function | ( | void | ) |
Return RTX to emit after when we want to emit code on the entry of function.
void expand_naked_return | ( | void | ) |
Generate RTL to return directly from the current function. (That is, we bypass any return value.)
References copy_blkmode_to_reg(), expand_null_return(), expand_value_return(), GET_MODE, and PUT_MODE.
void expand_null_return | ( | void | ) |
In stmt.c
Generate RTL to return from the current function, with no value. (That is, we do not do anything about returning any value.)
If this function was declared to return a value, but we didn't, clobber the return registers so that they are not propagated live to the rest of the function.
Referenced by expand_naked_return().
bool expensive_function_p | ( | int | ) |
HOST_WIDE_INT find_args_size_adjust | ( | rtx | ) |
Referenced by sjlj_assign_call_site_values().
Referenced by alloc_reg_note(), expand_var_during_unrolling(), find_implicit_sets(), insert_store(), and one_code_hoisting_pass().
Referenced by expand_copysign_bit(), and get_last_value_validate().
Referenced by contains_replace_regs(), cselib_record_sets(), dump_prediction(), find_all_hard_reg_sets(), find_partition_fixes(), fixup_eh_region_note(), fixup_new_cold_bb(), for_each_eh_label(), force_move_args_size_note(), get_eh_region_from_rtx(), get_last_bb_insn(), make_pass_into_cfg_layout_mode(), merge_dir(), multiple_sets(), next_real_insn(), noce_can_store_speculate_p(), prev_nondebug_insn(), previous_insn(), process_alt_operands(), process_bb_node_lives(), process_reg_shuffles(), record_promoted_value(), remove_unreachable_eh_regions_worker(), reorder_basic_blocks(), save_call_clobbered_regs(), saved_hard_reg_compare_func(), set_nonzero_bits_and_sign_copies(), too_high_register_pressure_p(), and try_back_substitute_reg().
int find_regno_fusage | ( | const_rtx | , |
enum | rtx_code, | ||
unsigned | int | ||
) |
Referenced by find_regno_note(), and use_crosses_set_p().
Referenced by get_last_value(), ira_reassign_pseudos(), match_reload(), process_alt_operands(), record_promoted_value(), and update_table_tick().
void finish_subregs_of_mode | ( | void | ) |
void fix_register | ( | const char * | , |
int | , | ||
int | |||
) |
int for_each_inc_dec | ( | rtx * | x, |
for_each_inc_dec_fn | fn, | ||
void * | arg | ||
) |
Traverse *X looking for MEMs, and for autoinc operations within them. For each such autoinc operation found, call FN, passing it the innermost enclosing MEM, the operation itself, the RTX modified by the operation, two RTXs (the second may be NULL) that, once added, represent the value to be held by the modified RTX afterwards, and ARG. FN is to return -1 to skip looking for other autoinc operations within the visited operation, 0 to continue the traversal, or any other value to have it returned to the caller of for_each_inc_dec.
int for_each_rtx | ( | rtx * | , |
rtx_function | , | ||
void * | |||
) |
In varasm.c
Referenced by emit_move_ccmode().
void free_EXPR_LIST_node | ( | rtx | ) |
void free_INSN_LIST_list | ( | rtx * | ) |
void free_INSN_LIST_node | ( | rtx | ) |
void free_reg_info | ( | void | ) |
Free up register info memory.
Free up the space allocated by allocate_reg_info.
rtx gen_hard_reg_clobber | ( | enum | machine_mode, |
unsigned | int | ||
) |
In emit-rtl.c
rtx gen_label_rtx | ( | void | ) |
Return a newly created CODE_LABEL rtx with a unique label number.
References GET_CODE, SET, SET_DEST, SET_SRC, verify_rtx_sharing(), XVECEXP, and XVECLEN.
Referenced by do_jump_by_parts_zero_rtx(), expand_float(), have_sub2_insn(), and set_stack_check_libfunc().
rtx gen_raw_REG | ( | enum | machine_mode, |
int | |||
) |
rtx gen_reg_rtx | ( | enum | machine_mode | ) |
In emit-rtl.c
rtx gen_rtx_CONST_INT | ( | enum | machine_mode, |
HOST_WIDE_INT | |||
) |
There are some RTL codes that require special attention; the generation functions included above do the raw handling. If you add to this list, modify special_rtx in gengenrtl.c as well.
rtx gen_rtx_REG | ( | enum | machine_mode, |
unsigned | |||
) |
Generate a register with same attributes as REG, but with OFFSET added to the REG_OFFSET.
enum machine_mode get_address_mode | ( | rtx | mem | ) |
Referenced by merge_dir(), and reverse_op().
Given a JUMP_INSN, return a canonical description of the test being made.
int get_first_label_num | ( | void | ) |
Return first label number used in this function (if any were used).
rtx get_first_nonnote_insn | ( | void | ) |
void get_full_rtx_cost | ( | rtx | x, |
enum rtx_code | outer, | ||
int | opno, | ||
struct full_rtx_costs * | c | ||
) |
Fill in the structure C with information about both speed and size rtx costs for X, which is operand OPNO in an expression with code OUTER.
Referenced by init_costs_to_zero().
|
inlinestatic |
Like set_rtx_cost, but return both the speed and size costs in C.
Referenced by move2add_use_add3_insn(), and move2add_valid_value_p().
|
inlinestatic |
Like set_src_cost, but return both the speed and size costs in C.
Referenced by move2add_use_add3_insn().
enum rtx_code get_index_code | ( | const struct address_info * | ) |
HOST_WIDE_INT get_index_scale | ( | const struct address_info * | ) |
const char* get_insn_name | ( | int | ) |
HOST_WIDE_INT get_integer_term | ( | const_rtx | ) |
rtx get_last_insn_anywhere | ( | void | ) |
Emission of insns (adding them to the doubly-linked list). Return the last insn emitted, even if it is in a sequence now pushed.
References DEBUG_INSN_P, NOTE_P, and PREV_INSN.
rtx get_last_nonnote_insn | ( | void | ) |
Return the last nonnote insn emitted in current sequence or current function. This routine looks inside SEQUENCEs.
int get_max_insn_count | ( | void | ) |
Return the number of actual (non-debug) insns emitted in this function.
The table size must be stable across -g, to avoid codegen differences due to debug insns, and not be affected by -fmin-insn-uid, to avoid excessive table size and to simplify debugging of -fcompare-debug failures.
Referenced by canon_list_insert(), and compute_hash_table_work().
|
staticread |
Return the attributes of a MEM rtx.
Referenced by set_mem_alias_set(), set_mem_align(), and set_mem_attributes().
void get_mode_bounds | ( | enum machine_mode | mode, |
int | sign, | ||
enum machine_mode | target_mode, | ||
rtx * | mmin, | ||
rtx * | mmax | ||
) |
In stor-layout.c.
Gets minimal and maximal values for MODE (signed or unsigned depending on SIGN). The returned constants are made to be usable in TARGET_MODE.
Referenced by simplify_relational_operation_1().
enum machine_mode get_pool_mode | ( | const_rtx | ) |
Referenced by get_integer_term().
rtx get_reg_base_value | ( | unsigned | int | ) |
bool get_reg_known_equiv_p | ( | unsigned | int | ) |
rtx get_reg_known_value | ( | unsigned | int | ) |
Referenced by insert_with_costs(), and remove_invalid_refs().
void globalize_reg | ( | tree | , |
int | |||
) |
unsigned hash_rtx | ( | const_rtx | x, |
enum machine_mode | mode, | ||
int * | do_not_record_p, | ||
int * | hash_arg_in_memory_p, | ||
bool | have_reg_qty | ||
) |
Hash an rtx. We are careful to make sure the value is never negative. Equivalent registers hash identically. MODE is used in hashing for CONST_INTs only; otherwise the mode of X is used.
Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains a MEM rtx which does not have the MEM_READONLY_P flag set.
Note that cse_insn knows that the hash code of a MEM expression is just (int) MEM plus the hash code of the address.
References exp_equiv_p(), XEXP, XVECEXP, and XVECLEN.
Referenced by invariant_for_use().
unsigned hash_rtx_cb | ( | const_rtx | x, |
enum machine_mode | mode, | ||
int * | do_not_record_p, | ||
int * | hash_arg_in_memory_p, | ||
bool | have_reg_qty, | ||
hash_rtx_callback_function | cb | ||
) |
Same as hash_rtx, but call CB on each rtx if it is not NULL. When the callback returns true, we continue with the new rtx.
Used to turn recursion into iteration. We can't rely on GCC's tail-recursion elimination since we need to keep accumulating values in HASH.
Invoke the callback first.
On some machines, we can't record any non-fixed hard register, because extending its life will cause reload problems. We consider ap, fp, sp, gp to be fixed for this purpose. We also consider CCmode registers to be fixed for this purpose; failure to do so leads to failure to simplify 0<100 type of conditionals. On all machines, we can't record any global registers. Nor should we record any register that is in a small class, as defined by TARGET_CLASS_LIKELY_SPILLED_P.
We handle SUBREG of a REG specially because the underlying reg changes its hash value with every value change; we don't want to have to forget unrelated subregs when one subreg changes.
This is like the general case, except that it only counts the integers representing the constant.
Assume there is only one rtx object for any given label.
We don't hash on the address of the CODE_LABEL to avoid bootstrap differences and differences between each stage's debugging dumps.
Don't hash on the symbol's address to avoid bootstrap differences. Different hash values may cause expressions to be recorded in different orders and thus different registers to be used in the final assembler. This also avoids differences in the dump files between various stages.
We don't record if marked volatile or if BLKmode since we don't know the size of the move.
Now that we have already found this special case, might as well speed it up as much as possible.
A USE that mentions non-volatile memory needs special handling since the MEM may be BLKmode which normally prevents an entry from being made. Pure calls are marked by a USE which mentions BLKmode memory. See calls.c:emit_call_1.
Now that we have already found this special case, might as well speed it up as much as possible.
We don't want to take the filename and line into account.
If we are about to do the last recursive call needed at this level, change it into iteration. This function is called enough to be worth it.
Unused.
Referenced by invalidate_for_call().
rtx immed_double_const | ( | HOST_WIDE_INT | , |
HOST_WIDE_INT | , | ||
enum | machine_mode | ||
) |
rtx immed_double_int_const | ( | double_int | , |
enum | machine_mode | ||
) |
int in_sequence_p | ( | void | ) |
Return 1 if currently emitting into a sequence.
References mem_attrs::align, GET_MODE_ALIGNMENT, GET_MODE_SIZE, mem_attrs::size, and mem_attrs::size_known_p.
int inequality_comparisons_p | ( | const_rtx | ) |
void init_alias_analysis | ( | void | ) |
Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE array.
If we have memory allocated from the previous run, use it.
The basic idea is that each pass through this loop will use the "constant" information from the previous pass to propagate alias information through another level of assignments. The propagation is done on the CFG in reverse post-order, to propagate things forward as far as possible in each iteration. This could get expensive if the assignment chains are long. Maybe we should throttle the number of iterations, possibly based on the optimization level or flag_expensive_optimizations. We could propagate more information in the first pass by making use of DF_REG_DEF_COUNT to determine immediately that the alias information for a pseudo is "constant". A program with an uninitialized variable can cause an infinite loop here. Instead of doing a full dataflow analysis to detect such problems we just cap the number of iterations for the loop. The state of the arrays for the set chain in question does not matter since the program has undefined behavior.
Assume nothing will change this iteration of the loop.
We want to assign the same IDs each iteration of this loop, so start counting from one each iteration of the loop.
We're at the start of the function each iteration through the loop, so we're copying arguments.
Wipe the potential alias information clean for this pass.
Wipe the reg_seen array clean.
Initialize the alias information for this pass.
Walk the insns adding values to the new_reg_base_value array.
The prologue/epilogue insns are not threaded onto the insn chain until after reload has completed. Thus, there is no sense wasting time checking if INSN is in the prologue/epilogue until after reload has completed.
If this insn has a noalias note, process it, Otherwise, scan for sets. A simple set will have no side effects which could change the base value of any other register.
Now propagate values from new_reg_base_value to reg_base_value.
Fill in the remaining entries.
Clean up.
Referenced by memref_referenced_p(), and pre_insert_copies().
void init_alias_target | ( | void | ) |
Check whether this register can hold an incoming pointer argument. FUNCTION_ARG_REGNO_P tests outgoing register numbers, so translate if necessary due to register windows.
|
inlinestatic |
Initialize a full_rtx_costs structure C to the maximum cost.
References rtx_cost(), and SET.
Referenced by move2add_valid_value_p().
|
inlinestatic |
Initialize a full_rtx_costs structure C to zero cost.
References get_full_rtx_cost(), and SET.
Referenced by move2add_valid_value_p().
void init_emit | ( | void | ) |
Initialize data structures and variables in this file before generating rtl for each function.
Init the tables that describe all the pseudo regs.
Put copies of all the hard registers into regno_reg_rtx.
Put copies of all the virtual register rtx into regno_reg_rtx.
Indicate that the virtual registers and stack locations are all pointers.
Referenced by blocks_nreverse().
void init_emit_once | ( | void | ) |
Create some permanent unique rtl objects shared between all functions.
Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute hash tables.
Compute the word and byte modes.
Create the unique rtx's for certain rtx codes and operand values.
Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case tries to use these variables.
We store the value 1.
We store the value 1.
References gcc_unreachable, MEMMODEL_ACQ_REL, MEMMODEL_ACQUIRE, MEMMODEL_CONSUME, MEMMODEL_MASK, MEMMODEL_RELAXED, MEMMODEL_RELEASE, and MEMMODEL_SEQ_CST.
void init_emit_regs | ( | void | ) |
Initialise global register information required by all functions.
Reset register attributes
We need reg_raw_mode, so initialize the modes now.
Assign register numbers to the globally defined register rtx.
Initialize RTL for commonly used hard registers. These are copied into regno_reg_rtx as we begin to compile each function.
void init_expmed | ( | void | ) |
In expmed.c
Avoid using hard regs in ways which may be unsupported.
void init_fake_stack_mems | ( | void | ) |
Initialize some fake stack-frame MEM references for use in memory_move_secondary_cost.
void init_lower_subreg | ( | void | ) |
Do one-per-target initialisation. This involves determining which operations on the machine are profitable. If none are found, then the pass just returns when called.
void init_reg_modes_target | ( | void | ) |
Compute the table of register modes. These values are used to record death information for individual registers (as opposed to a multi-register mode). This function might be invoked more than once, if the target has support for changing register usage conventions on a per-function basis.
If we couldn't find a valid mode, just use the previous mode if it is suitable, otherwise fall back on word_mode.
void init_reg_sets | ( | void | ) |
Function called only once per target_globals to initialize the target_hard_regs structure. Once this is done, various switches may override.
First copy the register information from the initial int form into the regsets.
Note that we hard-code 32 here, not HOST_BITS_PER_INT.
Sanity check: make sure the target macros FIXED_REGISTERS and CALL_USED_REGISTERS had the right number of initializers.
void init_regs | ( | void | ) |
Finish initializing the register sets and initialize the register modes. This function might be invoked more than once, if the target has support for changing register usage conventions on a per-function basis.
This finishes what was started by init_reg_sets, but couldn't be done until after register usage was specified.
void init_rtlanal | ( | void | ) |
Initialize non_rtx_starting_operands, which is used to speed up for_each_rtx.
void init_subregs_of_mode | ( | void | ) |
Passes for keeping and updating info about modes of registers inside subregisters.
Referenced by setup_allocno_class_and_costs().
void init_varasm_once | ( | void | ) |
In varasm.c
const char* insn_file | ( | const_rtx | ) |
int insn_line | ( | const_rtx | ) |
In emit-rtl.c
void insn_locations_finalize | ( | void | ) |
At the end of emit stage, clear current location.
void insn_locations_init | ( | void | ) |
Allocate insn location datastructure.
Referenced by cheap_bb_rtx_cost_p().
bool invalid_mode_change_p | ( | unsigned | int, |
enum | reg_class | ||
) |
Referenced by scan_one_insn().
hashval_t iterative_hash_rtx | ( | const_rtx | , |
hashval_t | |||
) |
Referenced by cfg_layout_can_merge_blocks_p(), and subreg_offset_representable_p().
rtx last_call_insn | ( | void | ) |
Return the last CALL_INSN in the current list, or 0 if there is none. This routine does not look inside SEQUENCEs.
Referenced by df_word_lr_add_problem().
int low_bitmask_len | ( | enum | machine_mode, |
unsigned | HOST_WIDE_INT | ||
) |
In loop-iv.c
Generates a subreg to get the least significant part of EXPR (in mode INNER_MODE) to OUTER_MODE.
void mark_elimination | ( | int | , |
int | |||
) |
In ira.c
void mark_reg_pointer | ( | rtx | , |
int | |||
) |
void mark_user_reg | ( | rtx | ) |
int max_label_num | ( | void | ) |
Return 1 + the largest label number used so far in the current function.
Referenced by update_alignments().
int max_reg_num | ( | void | ) |
In emit-rtl.c.
Return 1 plus largest pseudo reg number used in the current function.
Referenced by calculate_allocation_cost(), coalescable_pseudo_p(), coalesced_pseudo_reg_slot_compare(), df_scan_add_problem(), emit_add2_insn(), cost_classes_hasher::equal(), finish_live_range_start_chains(), lra_debug_live_range_list(), setup_allocno_class_and_costs(), split_live_ranges_for_shrink_wrap(), and update_lives().
int may_trap_or_fault_p | ( | const_rtx | ) |
Referenced by df_simulate_one_insn_forwards().
int may_trap_p | ( | const_rtx | ) |
Referenced by calculate_bb_reg_pressure(), and remove_unreachable_eh_regions().
int may_trap_p_1 | ( | const_rtx | , |
unsigned | |||
) |
void maybe_set_first_label_num | ( | rtx | ) |
Referenced by bb_has_abnormal_call_pred(), and expand_copysign_bit().
Referenced by count_reg_usage(), cselib_record_sets(), expand_copysign_bit(), and record_reg_saved_in_reg().
rtx move_by_pieces | ( | rtx | to, |
rtx | from, | ||
unsigned HOST_WIDE_INT | len, | ||
unsigned int | align, | ||
int | endp | ||
) |
In expr.c
Generate several move instructions to copy LEN bytes from block FROM to block TO. (These are MEM rtx's with BLKmode).
If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is used to push FROM to the stack.
ALIGN is maximum stack alignment we can assume.
If ENDP is 0 return to, if ENDP is 1 return memory at the end ala mempcpy, and if ENDP is 2 return memory the end minus one byte ala stpcpy.
If copying requires more than two move insns, copy addresses to registers (to make displacements shorter) and use post-increment if available.
Find the mode of the largest move... MODE might not be used depending on the definitions of the USE_* macros below.
First move what we can in the largest integer mode, then go to successively smaller modes.
The code above should have handled everything.
References move_by_pieces_d::autinc_from, copy_to_mode_reg(), move_by_pieces_d::explicit_inc_from, move_by_pieces_d::from_addr, and plus_constant().
Referenced by fixup_args_size_notes(), and move_by_pieces_1().
int multiple_sets | ( | const_rtx | ) |
Referenced by dead_or_set_regno_p(), and process_bb_node_lives().
Referenced by validate_replace_src_1().
Referenced by simplify_relational_operation_1().
unsigned HOST_WIDE_INT nonzero_bits | ( | const_rtx | , |
enum | machine_mode | ||
) |
Referenced by cached_num_sign_bit_copies(), make_extraction(), simplify_relational_operation_1(), and simplify_set().
int noop_move_p | ( | const_rtx | ) |
Call FUN on each register or MEM that is stored into or clobbered by X. (X would be the pattern of an insn). DATA is an arbitrary pointer, ignored by note_stores, but passed to FUN.
FUN receives three arguments:
If the item being stored in or clobbered is a SUBREG of a hard register, the SUBREG will be passed.
If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, each of whose first operand is a register.
References GET_CODE, MEM_P, SET_DEST, SET_SRC, and XEXP.
Referenced by df_simulate_one_insn_forwards(), expand_copysign(), get_stored_val(), mark_nonreg_stores_1(), memref_used_between_p(), notice_stack_pointer_modification(), reg_overlap_mentioned_p(), save_call_clobbered_regs(), set_paradoxical_subreg(), and spill_hard_reg().
Like notes_stores, but call FUN for each expression that is being referenced in PBODY, a pointer to the PATTERN of an insn. We only call FUN for each expression, not any interior subexpressions. FUN receives a pointer to the expression and the DATA passed to this function.
Note that this is not quite the same test as that done in reg_referenced_p since that considers something as being referenced if it is being partially set, while we do not.
For sets we replace everything in source plus registers in memory expression in store and operands of a ZERO_EXTRACT.
All the other possibilities never store.
References dead_or_set_regno_p(), END_REGNO, gcc_assert, GET_CODE, REG_P, and REGNO.
Referenced by find_implicit_sets().
unsigned int num_sign_bit_copies | ( | const_rtx | , |
enum | machine_mode | ||
) |
Referenced by simplify_relational_operation_1().
bool offset_within_block_p | ( | const_rtx | , |
HOST_WIDE_INT | |||
) |
int only_sets_cc0_p | ( | const_rtx | ) |
int onlyjump_p | ( | const_rtx | ) |
In emit-rtl.c
bool optimize_insn_for_size_p | ( | void | ) |
Return TRUE when BB should be optimized for size.
References optimize_loop_for_speed_p().
bool optimize_insn_for_speed_p | ( | void | ) |
Return TRUE when BB should be optimized for speed.
References loop::inner, loop::next, and optimize_loop_for_speed_p().
Referenced by emit_cstore(), expand_mult(), expand_mult_highpart_adjust(), expand_widening_mult(), and no_conflict_move_test().
rtx plus_constant | ( | enum | machine_mode, |
rtx | , | ||
HOST_WIDE_INT | |||
) |
void pop_topmost_sequence | ( | void | ) |
After emitting to the outer-level insn chain, update the outer-level insn chain, and restore the previous saved state.
void print_inline_rtx | ( | FILE * | , |
const_rtx | , | ||
int | |||
) |
void print_insn | ( | pretty_printer * | , |
const_rtx | , | ||
int | |||
) |
void print_mem_expr | ( | FILE * | , |
const_tree | |||
) |
void print_pattern | ( | pretty_printer * | , |
const_rtx | , | ||
int | |||
) |
Referenced by print_insn(), and rtl_dump_bb_for_graph().
void print_rtl | ( | FILE * | , |
const_rtx | |||
) |
int print_rtl_single | ( | FILE * | , |
const_rtx | |||
) |
int print_rtl_single_with_indent | ( | FILE * | , |
const_rtx | , | ||
int | |||
) |
void print_simple_rtl | ( | FILE * | , |
const_rtx | |||
) |
void print_value | ( | pretty_printer * | , |
const_rtx | , | ||
int | |||
) |
Referenced by print_value().
int prologue_epilogue_contains | ( | const_rtx | ) |
void push_to_sequence | ( | rtx | ) |
void push_topmost_sequence | ( | void | ) |
Set up the outer-level insn chain as the current sequence, saving the previously current one.
In read-rtl.c
void rebuild_jump_labels | ( | rtx | ) |
void rebuild_jump_labels_chain | ( | rtx | ) |
void record_hard_reg_uses | ( | rtx * | , |
void * | |||
) |
Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with NLABEL in JUMP. If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref count has dropped to zero.
Negative DELETE_UNUSED used to be used to signalize behavior on moving FUNCTION_END note. Just sanity check that no user still worry about this.
Update labels in any REG_EQUAL note.
Undefined labels will remain outside the insn stream.
Referenced by redirect_exp_1().
Return nonzero if register in range [REGNO, ENDREGNO) appears either explicitly or implicitly in X other than being stored into.
References contained within the substructure at LOC do not count. LOC may be zero, meaning don't ignore anything.
The contents of a REG_NONNEG note is always zero, so we must come here upon repeat in case the last REG_NOTE is a REG_NONNEG note.
If we modifying the stack, frame, or argument pointer, it will clobber a virtual register. In fact, we could be more precise, but it isn't worth it.
If this is a SUBREG of a hard reg, we can see exactly which registers are being modified. Otherwise, handle normally.
Note setting a SUBREG counts as referring to the REG it is in for a pseudo but not for hard registers since we can treat each word individually.
X does not match, so try its subexpressions.
Referenced by use_crosses_set_p().
enum reg_class reg_allocno_class | ( | int | ) |
enum reg_class reg_alternate_class | ( | int | ) |
int reg_class_subset_p | ( | reg_class_t | , |
reg_class_t | |||
) |
int reg_classes_intersect_p | ( | reg_class_t | , |
reg_class_t | |||
) |
In reginfo.c
unsigned int reg_or_subregno | ( | const_rtx | ) |
enum reg_class reg_preferred_class | ( | int | ) |
Referenced by cse_prescan_path(), get_last_value_validate(), and mark_used_regs_combine().
void reg_scan | ( | rtx | , |
unsigned | int | ||
) |
Referenced by set_nonzero_bits_and_sign_copies().
Referenced by check_for_label_ref().
Referenced by expand_copysign_bit().
void regclass | ( | rtx | , |
int | |||
) |
void reinit_regs | ( | void | ) |
The same as previous function plus initializing IRA.
caller_save needs to be re-initialized.
void remove_insn | ( | rtx | ) |
void remove_reg_equal_equiv_notes | ( | rtx | ) |
void remove_reg_equal_equiv_notes_for_regno | ( | unsigned | int | ) |
int replace_label | ( | rtx * | , |
void * | |||
) |
Referenced by block_has_preserve_label().
void reposition_prologue_and_epilogue_notes | ( | void | ) |
In function.c
Reposition the prologue-end and epilogue-begin notes after instruction scheduling.
void reset_used_flags | ( | rtx | ) |
bool resize_reg_info | ( | void | ) |
reginfo.c Resize reg info.
Resize reg info. The new elements will be initialized. Return TRUE if new pseudos were added since the last call.
Referenced by split_live_ranges_for_shrink_wrap().
int returnjump_p | ( | rtx | ) |
enum rtx_code reverse_condition_maybe_unordered | ( | enum | rtx_code | ) |
enum rtx_code reversed_comparison_code_parts | ( | enum rtx_code | code, |
const_rtx | arg0, | ||
const_rtx | arg1, | ||
const_rtx | insn | ||
) |
Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code of reversed comparison if it is possible to do so. Otherwise return UNKNOWN. UNKNOWN may be returned in case we are having CC_MODE compare and we don't know whether it's source is floating point or integer comparison. Machine description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros to help this function avoid overhead in these cases.
If this is not actually a comparison, we can't reverse it.
First see if machine description supplies us way to reverse the comparison. Give it priority over everything else to allow machine description to do tricks.
Try a few special cases based on the comparison code.
It is always safe to reverse EQ and NE, even for the floating point. Similarly the unsigned comparisons are never used for floating point so we can reverse them in the default way.
In case we already see unordered comparison, we can be sure to be dealing with floating point so we don't need any more tests.
We don't have safe way to reverse these yet.
Try to search for the comparison to determine the real mode. This code is expensive, but with sane machine description it will be never used, since REVERSIBLE_CC_MODE will return true in all cases.
These CONST_CAST's are okay because prev_nonnote_insn just returns its argument and we assign it to a const_rtx variable.
We can get past reg-reg moves. This may be useful for model of i387 comparisons that first move flag registers around.
If register is clobbered in some ununderstandable way, give up.
Test for an integer condition, or a floating-point comparison in which NaNs can be ignored.
Referenced by emit_cmp_and_jump_insn_1(), and subst().
|
inlinestatic |
Force the REGNO macro to only be used on the lhs.
void rtl_dump_bb_for_graph | ( | pretty_printer * | , |
basic_block | |||
) |
Referenced by rtl_block_ends_with_condjump_p().
rtvec rtvec_alloc | ( | int | ) |
int rtx_addr_can_trap_p | ( | const_rtx | ) |
Referenced by emit_cstore(), find_first_parameter_load(), init_costs_to_max(), and no_conflict_move_test().
int rtx_equal_p_cb | ( | const_rtx | , |
const_rtx | , | ||
rtx_equal_p_callback_function | |||
) |
Referenced by reset_debug_uses_in_loop().
double_int rtx_to_double_int | ( | const_rtx | ) |
int rtx_to_tree_code | ( | enum | rtx_code | ) |
Translates rtx code to tree code, for those codes needed by REAL_ARITHMETIC. The function returns an int because the caller may not know what `enum tree_code' means.
int rtx_unstable_p | ( | const_rtx | ) |
Referenced by rtx_unstable_p().
Referenced by rtx_varies_p().
void save_register_info | ( | void | ) |
Save the register information.
Sanity check: make sure the target macros FIXED_REGISTERS and CALL_USED_REGISTERS had the right number of initializers.
Likewise for call_really_used_regs.
And similarly for reg_names.
References accessible_reg_set, call_really_used_regs, call_used_regs, COPY_HARD_REG_SET, fixed_regs, operand_reg_set, and reg_names.
void schedule_ebbs | ( | void | ) |
In sched-ebb.c.
void schedule_insns | ( | void | ) |
In sched-rgn.c.
void sel_sched_fix_param | ( | const char * | param, |
const char * | val | ||
) |
In sel-sched-dump.c.
void set_curr_insn_location | ( | location_t | ) |
void set_insn_deleted | ( | rtx | ) |
int set_noop_p | ( | const_rtx | ) |
Referenced by cse_prescan_path(), and multiple_sets().
void set_return_jump_label | ( | rtx | ) |
|
inlinestatic |
Return the cost of SET X. SPEED_P is true if optimizing for speed rather than size.
Referenced by find_defs(), and get_loop_level().
|
inlinestatic |
Return the cost of moving X into a register, relative to the cost of a register move. SPEED_P is true if optimizing for speed rather than size.
Referenced by alloc_use_cost_map(), init_expmed_one_conv(), reload_combine_closest_single_use(), and simplify_set().
In emit-rtl.c
void set_used_flags | ( | rtx | ) |
int sets_cc0_p | ( | const_rtx | ) |
Referenced by reload_combine_recognize_const_pattern().
void setup_reg_classes | ( | int | regno, |
enum reg_class | prefclass, | ||
enum reg_class | altclass, | ||
enum reg_class | allocnoclass | ||
) |
Set up preferred, alternate, and allocno classes for REGNO as PREFCLASS, ALTCLASS, and ALLOCNOCLASS.
int sibcall_epilogue_contains | ( | const_rtx | ) |
int side_effects_p | ( | const_rtx | ) |
enum rtx_code signed_condition | ( | enum | rtx_code | ) |
int simplejump_p | ( | const_rtx | ) |
Simplify a binary operation CODE with result mode MODE, operating on OP0 and OP1. Return 0 if no simplification is possible.
Don't use this for relational operations such as EQ or LT. Use simplify_relational_operation instead.
Relational operations don't work here. We must know the mode of the operands in order to do the comparison correctly. Assuming a full word can give incorrect results. Consider comparing 128 with -128 in QImode.
Make sure the constant is second.
Referenced by simplify_relational_operation_1().
Inf + -Inf = NaN plus exception.
Inf - Inf = NaN plus exception.
Inf / Inf = NaN plus exception.
Inf * 0 = NaN plus exception.
Don't constant fold this floating point operation if the result has overflowed and flag_trapping_math.
Overflow plus exception.
Don't constant fold this floating point operation if the result may dependent upon the run-time rounding mode and flag_rounding_math is set, or if GCC's software emulation is unable to accurately represent the result.
We can fold some multi-word operations.
A - B == A + (-B).
Fall through....
Get the integer argument values in two forms: zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S.
Compute the value of the arithmetic.
Truncate the shift if SHIFT_COUNT_TRUNCATED, otherwise make sure the value is in range. We can't return any old value for out-of-range arguments because either the middle-end (via shift_truncation_mask) or the back-end might be relying on target-specific knowledge. Nor can we rely on shift_truncation_mask, since the shift might not be part of an ashlM3, lshrM3 or ashrM3 instruction.
Sign-extend the result for arithmetic right shifts.
Do nothing here.
??? There are simplifications that can be done.
References double_int::alshift(), GET_MODE_PRECISION, HOST_WIDE_INT, double_int::lrotate(), double_int::rrotate(), double_int::rshift(), and SHIFT_COUNT_TRUNCATED.
rtx simplify_const_relational_operation | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
rtx | op0, | ||
rtx | op1 | ||
) |
Check if the given comparison (done in the given MODE) is actually a tautology or a contradiction. If no simplification is possible, this function returns zero. Otherwise, it returns either const_true_rtx or const0_rtx.
If op0 is a compare, extract the comparison arguments from it.
We can't simplify MODE_CC values since we don't know what the actual comparison is.
Make sure the constant is second.
For integer comparisons of A and B maybe we can simplify A - B and can then simplify a comparison of that with zero. If A and B are both either a register or a CONST_INT, this can't help; testing for these cases will prevent infinite recursion here and speed things up. We can only do this for EQ and NE comparisons as otherwise we may lose or introduce overflow which we cannot disregard as undefined as we do not know the signedness of the operation on either the left or the right hand side of the comparison.
We cannot do this if tem is a nonzero address.
For modes without NaNs, if the two operands are equal, we know the result except if they have side-effects. Even with NaNs we know the result of unordered comparisons and, if signaling NaNs are irrelevant, also the result of LT/GT/LTGT.
If the operands are floating-point constants, see if we can fold the result.
Comparisons are unordered iff at least one of the values is NaN.
Otherwise, see if the operands are both integers.
Get the two words comprising each integer constant.
If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT, we have to sign or zero-extend the values.
Optimize comparisons with upper and lower bounds.
Get a reduced range if the sign bit is zero.
x >= y is always true for y <= mmin, always false for y > mmax.
x <= y is always true for y >= mmax, always false for y < mmin.
x == y is always false for y out of range.
x > y is always false for y >= mmax, always true for y < mmin.
x < y is always false for y <= mmin, always true for y > mmax.
x != y is always true for y out of range.
Optimize integer comparisons with zero.
Some addresses are known to be nonzero. We don't know their sign, but equality comparisons are known.
See if the first operand is an IOR with a constant. If so, we may be able to determine the result of this comparison.
Optimize comparison of ABS with zero.
Optimize abs(x) < 0.0.
Optimize abs(x) >= 0.0.
Optimize ! (abs(x) < 0.0).
Referenced by simplify_relational_operation_1().
rtx simplify_const_unary_operation | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
rtx | op, | ||
enum machine_mode | op_mode | ||
) |
Try to compute the value of a unary operation CODE whose output mode is to be MODE with input operand OP whose mode was originally OP_MODE. Return zero if the value cannot be computed.
The order of these tests is critical so that, for example, we don't check the wrong mode (input vs. output) for a conversion operation, such as FIX. At some point, this should be simplified.
We should never get a negative number.
Even if the value at zero is undefined, we have to come up with some replacement. Seems good enough.
When zero-extending a CONST_INT, we need to know its original mode.
If we were really extending the mode, we would have to distinguish between zero-extension and sign-extension.
If we were really extending the mode, we would have to distinguish between zero-extension and sign-extension.
We can do some operations on integer CONST_DOUBLEs. Also allow for a DImode operation on a CONST_INT.
This is just a change-of-mode, so do nothing.
All this does is change the mode, unless changing mode class.
Although the overflow semantics of RTL's FIX and UNSIGNED_FIX operators are intentionally left unspecified (to ease implementation by target backends), for consistency, this routine implements the same semantics for constant folding as used by the middle-end.
This was formerly used only for non-IEEE float. eggert@twinsun.com says it is safe for IEEE also.
Test against the signed upper bound.
Test against the signed lower bound.
Test against the unsigned upper bound.
References CLZ_DEFINED_VALUE_AT_ZERO, CTZ_DEFINED_VALUE_AT_ZERO, ctz_hwi(), ffs_hwi(), floor_log2(), gcc_assert, gcc_unreachable, GET_MODE_BITSIZE, GET_MODE_MASK, GET_MODE_PRECISION, HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT, and val_signbit_known_set_p().
Make a binary operation by properly ordering the operands and seeing if the expression folds.
If this simplifies, do it.
Put complex operands first and constants second if commutative.
Referenced by iv_mult(), make_extraction(), noce_try_sign_mask(), simplify_and_const_int_1(), simplify_byte_swapping_operation(), simplify_truncation(), and simplify_unary_operation_1().
rtx simplify_gen_relational | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
enum machine_mode | cmp_mode, | ||
rtx | op0, | ||
rtx | op1 | ||
) |
Likewise, for relational operations. CMP_MODE specifies mode comparison is done in.
Referenced by subst().
rtx simplify_gen_subreg | ( | enum machine_mode | outermode, |
rtx | op, | ||
enum machine_mode | innermode, | ||
unsigned int | byte | ||
) |
Make a SUBREG operation or equivalent if it folds.
Referenced by clear_storage_libcall_fn(), expand_debug_parm_decl(), set_storage_via_libcall(), and set_storage_via_setmem().
rtx simplify_gen_ternary | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
enum machine_mode | op0_mode, | ||
rtx | op0, | ||
rtx | op1, | ||
rtx | op2 | ||
) |
Likewise for ternary operations.
If this simplifies, use it.
rtx simplify_gen_unary | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
rtx | op, | ||
enum machine_mode | op_mode | ||
) |
Make a unary operation by first seeing if it folds and otherwise making the specified operation.
If this simplifies, use it.
Referenced by expand_debug_parm_decl(), iv_extend(), make_compound_operation(), make_extraction(), simplify_byte_swapping_operation(), simplify_replace_rtx(), simplify_truncation(), simplify_unary_operation_1(), simplify_while_replacing(), split_iv(), and subst().
rtx simplify_relational_operation | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
enum machine_mode | cmp_mode, | ||
rtx | op0, | ||
rtx | op1 | ||
) |
Like simplify_binary_operation except used for relational operators. MODE is the mode of the result. If MODE is VOIDmode, both operands must not also be VOIDmode.
CMP_MODE specifies in which mode the comparison is done in, so it is the mode of the operands. If CMP_MODE is VOIDmode, it is taken from the operands or, if both are VOIDmode, the operands are compared in "infinite precision".
For the following tests, ensure const0_rtx is op1.
If op0 is a compare, extract the comparison arguments from it.
rtx simplify_replace_fn_rtx | ( | rtx | x, |
const_rtx | old_rtx, | ||
rtx(*)(rtx, const_rtx, void *) | fn, | ||
void * | data | ||
) |
If FN is NULL, replace all occurrences of OLD_RTX in X with copy_rtx (DATA) and simplify the result. If FN is non-NULL, call this callback on each X, if it returns non-NULL, replace X with its return value and simplify the result.
(lo_sum (high x) x) -> x
Referenced by output_constant_def_contents(), and simplify_if_then_else().
rtx simplify_subreg | ( | enum machine_mode | outermode, |
rtx | op, | ||
enum machine_mode | innermode, | ||
unsigned int | byte | ||
) |
Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE) Return 0 if no simplifications are possible.
Little bit of sanity checking.
Changing mode twice with SUBREG => just change it once, or not at all if changing back op starting mode.
The SUBREG_BYTE represents offset, as if the value were stored in memory. Irritating exception is paradoxical subreg, where we define SUBREG_BYTE to be 0. On big endian machines, this value should be negative. For a moment, undo this exception.
See whether resulting subreg will be paradoxical.
In nonparadoxical subregs we can't handle negative offsets.
Bail out in case resulting subreg would be incorrect.
In paradoxical subreg, see if we are still looking on lower part. If so, our SUBREG_BYTE will be 0.
Recurse for further possible simplifications.
SUBREG of a hard register => just change the register number and/or mode. If the hard register is not valid in that mode, suppress this simplification. If the hard register is the stack, frame, or argument pointer, leave this as a SUBREG.
Adjust offset for paradoxical subregs.
Propagate original regno. We don't have any way to specify the offset inside original regno, so do so only for lowpart. The information is used only by alias analysis that can not grog partial register anyway.
If we have a SUBREG of a register that we are replacing and we are replacing it with a MEM, make a new MEM and try replacing the SUBREG with it. Don't do this if the MEM has a mode-dependent address or if we would be widening it.
Allow splitting of volatile memory references in case we don't have instruction to move the whole thing.
Handle complex values represented as CONCAT of real and imaginary part.
A SUBREG resulting from a zero extension may fold to zero if it extracts higher bits that the ZERO_EXTEND's source bits.
Referenced by add_stores(), extract_split_bit_field(), make_extraction(), and set_storage_via_setmem().
int simplify_subreg_regno | ( | unsigned int | xregno, |
enum machine_mode | xmode, | ||
unsigned int | offset, | ||
enum machine_mode | ymode | ||
) |
Return the number of a YMODE register to which
(subreg:YMODE (reg:XMODE XREGNO) OFFSET)
can be simplified. Return -1 if the subreg can't be simplified.
XREGNO is a hard register number.
We shouldn't simplify stack-related registers.
We should convert hard stack register in LRA if it is possible.
Try to get the register offset.
Make sure that the offsetted register value is in range.
See whether (reg:YMODE YREGNO) is valid. ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid. This is a kludge to work around how complex FP arguments are passed on IA-64 and should be fixed. See PR target/49226.
Referenced by resolve_reg_notes().
rtx simplify_ternary_operation | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
enum machine_mode | op0_mode, | ||
rtx | op0, | ||
rtx | op1, | ||
rtx | op2 | ||
) |
Simplify CODE, an operation with result mode MODE and three operands, OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became a constant. Return 0 if no simplifications is possible.
VOIDmode means "infinite" precision.
Simplify negations around the multiplication.
-a * -b + c => a * b + c.
Canonicalize the two multiplication operands.
a * -b + c => -b * a + c.
Extracting a bit-field from a constant
First zero-extend.
If desired, propagate sign bit.
Convert c ? a : a into "a".
Convert a != b ? a : b into "a".
Convert a == b ? a : b into "b".
Look for happy constants in op1 and op2.
See if any simplifications were possible.
Replace (vec_merge (vec_merge a b m) c n) with (vec_merge b c n) if no element from a appears in the result.
rtx simplify_unary_operation | ( | enum rtx_code | code, |
enum machine_mode | mode, | ||
rtx | op, | ||
enum machine_mode | op_mode | ||
) |
Try to simplify a unary operation CODE whose output mode is to be MODE with input operand OP whose mode was originally OP_MODE. Return zero if no simplification can be made.
Referenced by convert_memory_address_addr_space(), find_comparison_args(), and may_trap_p().
void split_all_insns | ( | void | ) |
Split all insns in the function. If UPD_LIFE, update life info after.
Can't use `next_real_insn' because that might go across CODE_LABELS and short-out basic blocks.
Don't split no-op move insns. These should silently disappear later in final. Splitting such insns would break the code that handles LIBCALL blocks.
Nops get in the way while scheduling, so delete them now if register allocation has already been done. It is too risky to try to do this before register allocation, and there are unlikely to be very many nops then anyways.
References DF_INSN_DEFS, DF_REF_REGNO, fixed_regs, gcc_assert, global_regs, HARD_REG_SET, hard_regno_nregs, live, NULL_RTX, reg_alloc_order, REG_SET_TO_HARD_REG_SET, and SET_HARD_REG_BIT.
unsigned int split_all_insns_noflow | ( | void | ) |
Same as split_all_insns, but do not expect CFG to be available. Used by machine dependent reorg passes.
Don't split no-op move insns. These should silently disappear later in final. Splitting such insns would break the code that handles LIBCALL blocks.
Nops get in the way while scheduling, so delete them now if register allocation has already been done. It is too risky to try to do this before register allocation, and there are unlikely to be very many nops then anyways. ??? Should we use delete_insn when the CFG isn't valid?
void start_sequence | ( | void | ) |
Begin emitting insns to a sequence. If this sequence will contain something that might cause the compiler to pop arguments to function calls (because those pops have previously been deferred; see INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust before calling this function. That will ensure that the deferred pops are not accidentally emitted in the middle of this sequence.
Referenced by emit_insn_before_setloc(), end_ifcvt_sequence(), expand_abs(), expand_atomic_load(), expand_builtin_sincos(), expand_ffs(), gmalloc(), init_set_costs(), noce_emit_store_flag(), noce_try_addcc(), record_insns(), sjlj_assign_call_site_values(), split_edge_and_insert(), and split_iv().
const char* str_pattern_slim | ( | const_rtx | ) |
Referenced by init_num_sign_bit_copies_in_rep().
void subreg_get_info | ( | unsigned int | xregno, |
enum machine_mode | xmode, | ||
unsigned int | offset, | ||
enum machine_mode | ymode, | ||
struct subreg_info * | info | ||
) |
Fill in information about a subreg of a hard register. xregno - A regno of an inner hard subreg_reg (or what will become one). xmode - The mode of xregno. offset - The byte offset. ymode - The mode of a top level SUBREG (or what may become one). info - Pointer to structure to fill in.
If there are holes in a non-scalar mode in registers, we expect that it is made up of its units concatenated together.
You can only ask for a SUBREG of a value with holes in the middle if you don't cross the holes. (Such a SUBREG should be done by picking a different register class, or doing it in memory if necessary.) An example of a value with holes is XCmode on 32-bit x86 with -m128bit-long-double; it's represented in 6 32-bit registers, 3 for each part, but in memory it's two 128-bit parts. Padding is assumed to be at the end (not necessarily the 'high part') of each unit.
Paradoxical subregs are otherwise valid.
If this is a big endian paradoxical subreg, which uses more actual hard registers than the original register, we must return a negative offset so that we find the proper highpart of the register.
If registers store different numbers of bits in the different modes, we cannot generally form this subreg.
Lowpart subregs are otherwise valid.
This should always pass, otherwise we don't know how to verify the constraint. These conditions may be relaxed but subreg_regno_offset would need to be redesigned.
The XMODE value can be seen as a vector of NREGS_XMODE values. The subreg must represent a lowpart of given field. Compute what field it is.
Size of ymode must not be greater than the size of xmode.
Referenced by invert_exp_1(), and invert_jump_1().
unsigned int subreg_highpart_offset | ( | enum | machine_mode, |
enum | machine_mode | ||
) |
unsigned int subreg_lowpart_offset | ( | enum | machine_mode, |
enum | machine_mode | ||
) |
int subreg_lowpart_p | ( | const_rtx | ) |
unsigned int subreg_lsb | ( | const_rtx | ) |
unsigned int subreg_lsb_1 | ( | enum machine_mode | outer_mode, |
enum machine_mode | inner_mode, | ||
unsigned int | subreg_byte | ||
) |
Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE, and SUBREG_BYTE, return the bit offset where the subreg begins (counting from the least significant bit of the operand).
A paradoxical subreg begins at bit position 0.
If the subreg crosses a word boundary ensure that it also begins and ends on a word boundary.
unsigned int subreg_nregs | ( | const_rtx | ) |
Referenced by operands_match_p(), and update_live_status().
unsigned int subreg_nregs_with_regno | ( | unsigned | int, |
const_rtx | |||
) |
bool subreg_offset_representable_p | ( | unsigned int | xregno, |
enum machine_mode | xmode, | ||
unsigned int | offset, | ||
enum machine_mode | ymode | ||
) |
This function returns true when the offset is representable via subreg_offset in the given regno. xregno - A regno of an inner hard subreg_reg (or what will become one). xmode - The mode of xregno. offset - The byte offset. ymode - The mode of a top level SUBREG (or what may become one). RETURN - Whether the offset is representable.
References CONST_CAST_RTX, i2, keep_with_call_p(), next_nonnote_insn(), and targetm.
Referenced by validate_subreg().
unsigned int subreg_regno | ( | const_rtx | ) |
Referenced by update_live_status().
unsigned int subreg_regno_offset | ( | unsigned int | xregno, |
enum machine_mode | xmode, | ||
unsigned int | offset, | ||
enum machine_mode | ymode | ||
) |
This function returns the regno offset of a subreg expression. xregno - A regno of an inner hard subreg_reg (or what will become one). xmode - The mode of xregno. offset - The byte offset. ymode - The mode of a top level SUBREG (or what may become one). RETURN - The regno offset which would be used.
Referenced by emit_cmp_and_jump_insn_1(), num_validated_changes(), simplify_relational_operation_1(), and validate_change().
enum rtx_code swap_condition | ( | enum | rtx_code | ) |
Referenced by compute_hash_table(), default_fixed_point_supported_p(), and merge_blocks_move_predecessor_nojumps().
int true_regnum | ( | const_rtx | ) |
HOST_WIDE_INT trunc_int_for_mode | ( | HOST_WIDE_INT | , |
enum | machine_mode | ||
) |
Generally useful functions. In explow.c
In emit-rtl.c
unsigned int unshare_all_rtl | ( | void | ) |
void unshare_all_rtl_again | ( | rtx | ) |
void unshare_all_rtl_in_chain | ( | rtx | ) |
enum rtx_code unsigned_condition | ( | enum | rtx_code | ) |
void update_address | ( | struct address_info * | ) |
bool val_signbit_known_clear_p | ( | enum | machine_mode, |
unsigned | HOST_WIDE_INT | ||
) |
bool val_signbit_known_set_p | ( | enum | machine_mode, |
unsigned | HOST_WIDE_INT | ||
) |
Referenced by convert_modes(), simplify_const_unary_operation(), and simplify_relational_operation_1().
bool val_signbit_p | ( | enum | machine_mode, |
unsigned | HOST_WIDE_INT | ||
) |
Referenced by emit_cstore().
bool validate_subreg | ( | enum machine_mode | omode, |
enum machine_mode | imode, | ||
const_rtx | reg, | ||
unsigned int | offset | ||
) |
We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if this construct would be valid, and false otherwise.
All subregs must be aligned.
The subreg offset cannot be outside the inner object.
??? This should not be here. Temporarily continue to allow word_mode subregs of anything. The most common offender is (subreg:SI (reg:DF)). Generally, backends are doing something sketchy but it'll take time to fix them all.
??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field is the culprit here, and not the backends.
Allow component subregs of complex and vector. Though given the below extraction rules, it's not always clear what that means.
??? x86 sse code makes heavy use of *paradoxical* vector subregs, i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to represent this. It's questionable if this ought to be represented at all – why can't this all be hidden in post-reload splitters that make arbitrarily mode changes to the registers themselves.
Subregs involving floating point modes are not allowed to change size. Therefore (subreg:DI (reg:DF) 0) is fine, but (subreg:SI (reg:DF) 0) isn't.
LRA can use subreg to store a floating point value in an integer mode. Although the floating point and the integer modes need the same number of hard registers, the size of floating point mode can be less than the integer mode. LRA also uses subregs for a register should be used in different mode in on insn.
Paradoxical subregs must have offset zero.
This is a normal subreg. Verify that the offset is representable.
For hard registers, we already have most of these rules collected in subreg_offset_representable_p.
For pseudo registers, we want most of the same checks. Namely: If the register no larger than a word, the subreg must be lowpart. If the register is larger than a word, the subreg must be the lowpart of a subword. A subreg does *not* perform arbitrary bit extraction. Given that we've already checked mode/offset alignment, we only have to check subword subregs here.
References COMPLEX_MODE_P, GET_MODE_INNER, REG_CANNOT_CHANGE_MODE_P, REGNO, subreg_offset_representable_p(), and VECTOR_MODE_P.
Referenced by extract_split_bit_field(), and vt_stack_adjustments().
unsigned int variable_tracking_main | ( | void | ) |
void verify_rtl_sharing | ( | void | ) |
Go through all the RTL insn bodies and check that there is no unexpected sharing in between the subexpressions.
int volatile_insn_p | ( | const_rtx | ) |
Referenced by df_simulate_one_insn_forwards(), and remove_reg_equal_equiv_notes_for_regno().
int volatile_refs_p | ( | const_rtx | ) |
rtx cc0_rtx |
Referenced by cse_prescan_path().
rtx const_int_rtx[MAX_SAVED_CONST_INT *2+1] |
We make one copy of (const_int C) where C is in [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT] to save space during the compilation and simplify comparisons of integers.
rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE] |
We record floating-point CONST_DOUBLEs in each floating-point mode for the values of 0, 1, and 2. For the integer entries and VOIDmode, we record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX is set only for MODE_INT and MODE_VECTOR_INT modes.
rtx const_true_rtx |
int cse_not_expected |
If this is nonzero, we do not bother generating VOLATILE around volatile memory references, and we are willing to output indirect addresses. If cse is to follow, we reject indirect addresses so a useful potential cse is generated; if it is used only once, instruction combination will produce the same indirect address eventually.
int currently_expanding_to_rtl |
Nonzero when we are expanding trees to RTL.
struct target_rtl default_target_rtl |
Emit RTL for the GCC expander. Copyright (C) 1987-2013 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/. Middle-to-low level generation of rtx code and insns.
This file contains support functions for creating rtl expressions and manipulating them in the doubly-linked chain of insns.
The patterns of the insns are created by machine-dependent routines in insn-emit.c, which is generated automatically from the machine description. These routines make the individual rtx's of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch], which are automatically generated from rtl.def; what is machine dependent is the kind of rtx's they make and what arguments they use.
int epilogue_completed |
Nonzero after thread_prologue_and_epilogue_insns has run.
Referenced by redirect_exp_1().
location_t epilogue_location |
struct rtl_hooks general_rtl_hooks |
... but then it has to restore these.
Generic hooks for the RTL middle-end. Copyright (C) 2004-2013 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see http://www.gnu.org/licenses/. For speed, we will copy the RTX hooks struct member-by-member instead of doing indirect calls. For these reason, we initialize two struct rtl_hooks globals: rtl_hooks is the one that is used to actually call the hooks, while general_rtl_hooks is used to restore the hooks by passes that modify them.
int generating_concat_p |
Nonzero when we are generating CONCATs.
Referenced by push_function_context(), and tree_conflicts_with_clobbers_p().
int lra_in_progress |
Set to 1 while in lra.
Referenced by address_operand(), gen_tmp_stack_mem(), and validate_simplify_insn().
const char* const note_insn_name[NOTE_INSN_MAX] |
Names for NOTE insn's other than line numbers.
Names for kinds of NOTEs and REG_NOTEs.
rtx pc_rtx |
Standard pieces of rtx, to be substituted directly into things.
Referenced by add_stack_var_conflict(), alter_reg(), dwf_regno(), gen_formal_parameter_die(), and rtx_for_function_call().
const char* print_rtx_head |
In print-rtl.c
String printed at beginning of each RTL when it is dumped. This string is set to ASM_COMMENT_START when the RTL is dumped in the assembly output file.
Referenced by debug_rtx_find().
location_t prologue_location |
const char* const reg_note_name[] |
Names for REG_NOTE's in EXPR_LIST insn's.
int reload_completed |
Nonzero after end of reload pass. Set to 1 or 0 by reload1.c.
Nonzero after end of reload pass. Set to 1 or 0 by toplev.c. Controls the significance of (SUBREG (MEM)).
Referenced by canonicalize_change_group(), cond_move_convert_if_block(), df_mark_reg(), subreg_lowpart_offset(), and validate_simplify_insn().
int reload_in_progress |
Set to 1 while reload_as_needed is operating. Required by some machines to handle any generated moves differently.
Referenced by canonicalize_change_group(), constrain_operands(), and set_storage_via_setmem().
rtx ret_rtx |
Referenced by prologue_epilogue_contains().
enum rtx_class rtx_class[NUM_RTX_CODE] |
Indexed by rtx code, gives a character representing the "class" of that rtx code. See rtl.def for documentation on the defined classes.
const unsigned char rtx_code_size[NUM_RTX_CODE] |
Indexed by rtx code, gives the size of the rtx in bytes.
const char* const rtx_format[NUM_RTX_CODE] |
Indexed by rtx code, gives a sequence of operand-types for rtx's of that code. The sequence is a C string in which each character describes one operand.
const unsigned char rtx_length[NUM_RTX_CODE] |
const char* const rtx_name[NUM_RTX_CODE] |
Referenced by change_cfi_row(), and connect_traces().
const unsigned char rtx_next[NUM_RTX_CODE] |
rtx simple_return_rtx |
Referenced by prologue_epilogue_contains().
int split_branch_probability |
Probability of the conditional branch currently proceeded by try_split. Set to -1 otherwise.
rtx stack_limit_rtx |
In toplev.c
Referenced by crash_signal(), and handle_common_deferred_options().