@@ -1229,7 +1229,7 @@ store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
if (bytes < UNITS_PER_WORD
#ifdef BLOCK_REG_PADDING
&& (BLOCK_REG_PADDING (args[i].mode,
- TREE_TYPE (args[i].tree_value), 1)
+ TREE_TYPE (args[i].tree_value), 1, -1)
== PAD_DOWNWARD)
#else
&& BYTES_BIG_ENDIAN
@@ -1586,7 +1586,8 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
end it should be padded. */
args[i].locate.where_pad =
BLOCK_REG_PADDING (arg.mode, type,
- int_size_in_bytes (type) <= UNITS_PER_WORD);
+ int_size_in_bytes (type) <= UNITS_PER_WORD,
+ argpos < n_named_args);
#endif
/* Update ARGS_SIZE, the total stack space for args so far. */
@@ -4432,7 +4433,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
argvec[count].locate.where_pad =
BLOCK_REG_PADDING (arg.mode, NULL_TREE,
known_le (GET_MODE_SIZE (arg.mode),
- UNITS_PER_WORD));
+ UNITS_PER_WORD), 1);
#endif
targetm.calls.function_arg_advance (args_so_far, arg);
@@ -917,7 +917,7 @@ char *aarch64_output_simd_xor_imm (rtx, unsigned);
char *aarch64_output_sve_mov_immediate (rtx);
char *aarch64_output_sve_ptrues (rtx);
-bool aarch64_pad_reg_upward (machine_mode, const_tree, bool);
+bool aarch64_pad_reg_upward (machine_mode, const_tree, bool, int);
bool aarch64_regno_ok_for_base_p (int, bool);
bool aarch64_regno_ok_for_index_p (int, bool);
bool aarch64_reinterpret_float_as_int (rtx value, unsigned HOST_WIDE_INT *fail);
@@ -7501,7 +7501,7 @@ aarch64_function_arg_padding (machine_mode mode, const_tree type)
return PAD_UPWARD;
}
-/* Similarly, for use by BLOCK_REG_PADDING (MODE, TYPE, FIRST).
+/* Similarly, for use by BLOCK_REG_PADDING (MODE, TYPE, FIRST, NAMED).
It specifies padding for the last (may also be the only)
element of a block move between registers and memory. If
@@ -7525,7 +7525,7 @@ aarch64_function_arg_padding (machine_mode mode, const_tree type)
bool
aarch64_pad_reg_upward (machine_mode mode, const_tree type,
- bool first ATTRIBUTE_UNUSED)
+ bool first ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
{
/* Aside from pure scalable types, small composite types are always
@@ -21556,7 +21556,7 @@ aarch64_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
if (BYTES_BIG_ENDIAN && ag_size < UNITS_PER_VREG)
adjust = UNITS_PER_VREG - ag_size;
}
- else if (BLOCK_REG_PADDING (mode, type, 1) == PAD_DOWNWARD
+ else if (BLOCK_REG_PADDING (mode, type, 1, 0) == PAD_DOWNWARD
&& size < UNITS_PER_VREG)
{
adjust = UNITS_PER_VREG - size;
@@ -21596,7 +21596,7 @@ aarch64_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
dw_align = true;
}
- if (BLOCK_REG_PADDING (mode, type, 1) == PAD_DOWNWARD
+ if (BLOCK_REG_PADDING (mode, type, 1, 0) == PAD_DOWNWARD
&& size < UNITS_PER_WORD)
{
adjust = UNITS_PER_WORD - size;
@@ -1127,7 +1127,7 @@ enum arm_pcs
/* We can't use machine_mode inside a generator file because it
- hasn't been created yet; we shouldn't be using any code that
+ hasn't been created yet; we shouldn't be using any code that
needs the real definition though, so this ought to be safe. */
#ifdef GENERATOR_FILE
#define MACHMODE int
@@ -1175,8 +1175,9 @@ typedef struct
} CUMULATIVE_ARGS;
#endif
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
- (aarch64_pad_reg_upward (MODE, TYPE, FIRST) ? PAD_UPWARD : PAD_DOWNWARD)
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
+ (aarch64_pad_reg_upward (MODE, TYPE, FIRST, NAMED) \
+ ? PAD_UPWARD : PAD_DOWNWARD)
#define PAD_VARARGS_DOWN 0
@@ -206,7 +206,7 @@ extern rtx arm_stack_protect_tls_canary_mem (bool);
#if defined TREE_CODE
extern void arm_init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, tree);
-extern bool arm_pad_reg_upward (machine_mode, tree, int);
+extern bool arm_pad_reg_upward (machine_mode, tree, int, int);
#endif
extern int arm_apply_result_size (void);
extern opt_machine_mode arm_get_mask_mode (machine_mode mode);
@@ -16944,14 +16944,14 @@ arm_function_arg_padding (machine_mode mode, const_tree type)
}
-/* Similarly, for use by BLOCK_REG_PADDING (MODE, TYPE, FIRST).
+/* Similarly, for use by BLOCK_REG_PADDING (MODE, TYPE, FIRST, NAMED).
Return !BYTES_BIG_ENDIAN if the least significant byte of the
register has useful data, and return the opposite if the most
significant byte does. */
bool
-arm_pad_reg_upward (machine_mode mode,
- tree type, int first ATTRIBUTE_UNUSED)
+arm_pad_reg_upward (machine_mode mode, tree type,
+ int first ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
{
if (TARGET_AAPCS_BASED && BYTES_BIG_ENDIAN)
{
@@ -1707,8 +1707,8 @@ typedef struct
} CUMULATIVE_ARGS;
#endif
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
- (arm_pad_reg_upward (MODE, TYPE, FIRST) ? PAD_UPWARD : PAD_DOWNWARD)
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
+ (arm_pad_reg_upward (MODE, TYPE, FIRST, NAMED) ? PAD_UPWARD : PAD_DOWNWARD)
/* For AAPCS, padding should never be below the argument. For other ABIs,
* mimic the default. */
@@ -25,7 +25,7 @@
#ifdef RTX_CODE
extern void c6x_init_cumulative_args (CUMULATIVE_ARGS *, const_tree, rtx, int);
-extern bool c6x_block_reg_pad_upward (machine_mode, const_tree, bool);
+extern bool c6x_block_reg_pad_upward (machine_mode, const_tree, bool, int);
extern bool c6x_legitimate_address_p_1 (machine_mode, rtx, bool, bool);
extern bool c6x_mem_operand (rtx, enum reg_class, bool);
@@ -535,12 +535,13 @@ c6x_function_arg_advance (cumulative_args_t cum_v, const function_arg_info &)
}
-/* Return true if BLOCK_REG_PADDING (MODE, TYPE, FIRST) should return
+/* Return true if BLOCK_REG_PADDING (MODE, TYPE, FIRST, NAMED) should return
upward rather than downward. */
bool
c6x_block_reg_pad_upward (machine_mode mode ATTRIBUTE_UNUSED,
- const_tree type, bool first)
+ const_tree type, bool first,
+ int named ATTRIBUTE_UNUSED)
{
HOST_WIDE_INT size;
@@ -333,8 +333,9 @@ struct c6x_args {
#define INIT_CUMULATIVE_ARGS(cum, fntype, libname, fndecl, n_named_args) \
c6x_init_cumulative_args (&cum, fntype, libname, n_named_args)
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
- (c6x_block_reg_pad_upward (MODE, TYPE, FIRST) ? PAD_UPWARD : PAD_DOWNWARD)
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
+ (c6x_block_reg_pad_upward (MODE, TYPE, FIRST, NAMED) \
+ ? PAD_UPWARD : PAD_DOWNWARD)
#define FUNCTION_ARG_REGNO_P(r) \
(((r) >= REG_A4 && (r) <= REG_A13) || ((r) >= REG_B4 && (r) <= REG_B13))
@@ -247,7 +247,7 @@ extern bool mips16_expand_copy (rtx, rtx, rtx, rtx);
extern void mips_expand_synci_loop (rtx, rtx);
extern void mips_init_cumulative_args (CUMULATIVE_ARGS *, tree);
-extern bool mips_pad_reg_upward (machine_mode, tree);
+extern bool mips_pad_reg_upward (machine_mode, tree, int, int);
extern bool mips_expand_ext_as_unaligned_load (rtx, rtx, HOST_WIDE_INT,
HOST_WIDE_INT, bool);
@@ -7437,13 +7437,20 @@ mips_function_arg_padding (machine_mode mode, const_tree type)
return PAD_DOWNWARD;
}
-/* Likewise BLOCK_REG_PADDING (MODE, TYPE, ...). Return !BYTES_BIG_ENDIAN
- if the least significant byte of the register has useful data. Return
- the opposite if the most significant byte does. */
+/* Likewise BLOCK_REG_PADDING (MODE, TYPE, FIRST, NAMED).
+ Return !BYTES_BIG_ENDIAN if the least significant byte of the register has
+ useful data. Return the opposite if the most significant byte does. */
bool
-mips_pad_reg_upward (machine_mode mode, tree type)
+mips_pad_reg_upward (machine_mode mode, tree type, int first ATTRIBUTE_UNUSED,
+ int named)
{
+ /* The _Float32 value has to be shifted into upper 32 bits when passed
+ as varags register parameter on big-endian NEWABI targets. */
+ if (TARGET_NEWABI && TARGET_BIG_ENDIAN && named == 0 && mode == SFmode
+ && type != 0 && FLOAT_TYPE_P (type))
+ return true;
+
/* No shifting is required for floating-point arguments. */
if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
return !BYTES_BIG_ENDIAN;
@@ -2593,8 +2593,8 @@ typedef struct mips_args {
#define INIT_CUMULATIVE_ARGS(CUM, FNTYPE, LIBNAME, INDIRECT, N_NAMED_ARGS) \
mips_init_cumulative_args (&CUM, FNTYPE)
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
- (mips_pad_reg_upward (MODE, TYPE) ? PAD_UPWARD : PAD_DOWNWARD)
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
+ (mips_pad_reg_upward (MODE, TYPE, FIRST, NAMED) ? PAD_UPWARD : PAD_DOWNWARD)
/* True if using EABI and varargs can be passed in floating-point
registers. Under these conditions, we need a more complex form
@@ -65,7 +65,7 @@ extern bool gen_ldstwm_peep (bool, int, rtx, rtx *);
extern void nios2_adjust_reg_alloc_order (void);
-extern pad_direction nios2_block_reg_padding (machine_mode, tree, int);
+extern pad_direction nios2_block_reg_padding (machine_mode, tree, int, int);
#endif /* RTX_CODE */
@@ -3459,7 +3459,7 @@ nios2_function_arg_padding (machine_mode mode, const_tree type)
pad_direction
nios2_block_reg_padding (machine_mode mode, tree type,
- int first ATTRIBUTE_UNUSED)
+ int first ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
{
return nios2_function_arg_padding (mode, type);
}
@@ -287,8 +287,8 @@ typedef struct nios2_args
#define PAD_VARARGS_DOWN \
(targetm.calls.function_arg_padding (TYPE_MODE (type), type) == PAD_DOWNWARD)
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
- (nios2_block_reg_padding ((MODE), (TYPE), (FIRST)))
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
+ (nios2_block_reg_padding ((MODE), (TYPE), (FIRST), (NAMED)))
#define FUNCTION_ARG_REGNO_P(REGNO) \
((REGNO) >= FIRST_ARG_REGNO && (REGNO) <= LAST_ARG_REGNO)
@@ -661,7 +661,7 @@ struct hppa_args {int words, nargs_prototype, incoming, indirect; };
We use a DImode register in the parallel for 5 to 7 byte structures
so that there is only one element. This allows the object to be
correctly padded. */
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
targetm.calls.function_arg_padding ((MODE), (TYPE))
@@ -246,7 +246,7 @@
/* Specify padding for the last element of a block move between
registers and memory. FIRST is nonzero if this is the only
element. */
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
(!(FIRST) ? PAD_UPWARD : targetm.calls.function_arg_padding (MODE, TYPE))
/* Indicate that jump tables go in the text section. */
@@ -441,7 +441,7 @@
/* Specify padding for the last element of a block move between
registers and memory. FIRST is nonzero if this is the only
element. */
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
(!(FIRST) ? PAD_UPWARD : targetm.calls.function_arg_padding (MODE, TYPE))
#define DOUBLE_INT_ASM_OP "\t.quad\t"
@@ -172,7 +172,7 @@ extern int dot_symbols;
/* Specify padding for the last element of a block move between
registers and memory. FIRST is nonzero if this is the only
element. */
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
(!(FIRST) ? PAD_UPWARD : targetm.calls.function_arg_padding (MODE, TYPE))
/* FreeBSD doesn't support saving and restoring 64-bit regs with a 32-bit
@@ -255,7 +255,7 @@ extern int dot_symbols;
/* Specify padding for the last element of a block move between
registers and memory. FIRST is nonzero if this is the only
element. */
-#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
+#define BLOCK_REG_PADDING(MODE, TYPE, FIRST, NAMED) \
(!(FIRST) ? PAD_UPWARD : targetm.calls.function_arg_padding (MODE, TYPE))
/* Linux doesn't support saving and restoring 64-bit regs in a 32-bit
@@ -3020,7 +3020,7 @@ emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type,
extract_bit_field loads to the lsb of the reg. */
if (
#ifdef BLOCK_REG_PADDING
- BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
+ BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start, -1)
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
#else
BYTES_BIG_ENDIAN
@@ -3456,7 +3456,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED,
Move the fragment to the lsb if it's not already there. */
if (
#ifdef BLOCK_REG_PADDING
- BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
+ BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start, -1)
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
#else
BYTES_BIG_ENDIAN
@@ -2885,7 +2885,7 @@ assign_parm_setup_block_p (struct assign_parm_data_one *data)
that are padded at the least significant end. */
if (REG_P (data->entry_parm)
&& known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
- && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
+ && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1, 1)
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
return true;
#endif
@@ -3020,7 +3020,7 @@ assign_parm_setup_block (struct assign_parm_data_all *all,
if (mode != BLKmode
#ifdef BLOCK_REG_PADDING
&& (size == UNITS_PER_WORD
- || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
+ || (BLOCK_REG_PADDING (mode, data->arg.type, 1, 1)
!= (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
#endif
)
@@ -3069,7 +3069,7 @@ assign_parm_setup_block (struct assign_parm_data_all *all,
additional changes to work correctly. */
gcc_checking_assert (BYTES_BIG_ENDIAN
&& (BLOCK_REG_PADDING (mode,
- data->arg.type, 1)
+ data->arg.type, 1, 1)
== PAD_UPWARD));
int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
@@ -3090,7 +3090,7 @@ assign_parm_setup_block (struct assign_parm_data_all *all,
handle all cases (e.g. SIZE == 3). */
else if (size != UNITS_PER_WORD
#ifdef BLOCK_REG_PADDING
- && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
+ && (BLOCK_REG_PADDING (mode, data->arg.type, 1, 1)
== PAD_DOWNWARD)
#else
&& BYTES_BIG_ENDIAN
@@ -3114,7 +3114,7 @@ assign_parm_setup_block (struct assign_parm_data_all *all,
gcc_checking_assert (size > UNITS_PER_WORD);
#ifdef BLOCK_REG_PADDING
gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
- data->arg.type, 0)
+ data->arg.type, 0, 1)
== PAD_UPWARD);
#endif
emit_move_insn (mem, entry_parm);