@@ -4122,8 +4122,8 @@ expand_sdiv_pow2 (scalar_int_mode mode, rtx op0, HOST_WIDE_INT d)
temp = force_reg (mode, temp);
/* Construct "temp2 = (temp2 < 0) ? temp : temp2". */
- temp2 = emit_conditional_move (temp2, LT, temp2, const0_rtx,
- mode, temp, temp2, mode, 0);
+ temp2 = emit_conditional_move (temp2, { LT, temp2, const0_rtx, mode },
+ temp, temp2, mode, 0);
if (temp2)
{
rtx_insn *seq = get_insns ();
@@ -6125,10 +6125,10 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
return 0;
if (and_them)
- tem = emit_conditional_move (target, code, op0, op1, mode,
+ tem = emit_conditional_move (target, { code, op0, op1, mode },
tem, const0_rtx, GET_MODE (tem), 0);
else
- tem = emit_conditional_move (target, code, op0, op1, mode,
+ tem = emit_conditional_move (target, { code, op0, op1, mode },
trueval, tem, GET_MODE (tem), 0);
if (tem == 0)
@@ -8840,8 +8840,9 @@ expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
op2 = gen_lowpart (mode, op2);
/* Try to emit the conditional move. */
- insn = emit_conditional_move (temp, comparison_code,
- op00, op01, comparison_mode,
+ insn = emit_conditional_move (temp,
+ { comparison_code, op00, op01,
+ comparison_mode },
op1, op2, mode,
unsignedp);
@@ -9732,8 +9733,9 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
start_sequence ();
/* Try to emit the conditional move. */
- insn = emit_conditional_move (target, comparison_code,
- op0, cmpop1, mode,
+ insn = emit_conditional_move (target,
+ { comparison_code,
+ op0, cmpop1, mode },
op0, op1, mode,
unsignedp);
@@ -772,7 +772,7 @@ static int noce_try_addcc (struct noce_if_info *);
static int noce_try_store_flag_constants (struct noce_if_info *);
static int noce_try_store_flag_mask (struct noce_if_info *);
static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
- rtx, rtx, rtx);
+ rtx, rtx, rtx, rtx = NULL, rtx = NULL);
static int noce_try_cmove (struct noce_if_info *);
static int noce_try_cmove_arith (struct noce_if_info *);
static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
@@ -1711,7 +1711,8 @@ noce_try_store_flag_mask (struct noce_if_info *if_info)
static rtx
noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
- rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
+ rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue, rtx cc_cmp,
+ rtx rev_cc_cmp)
{
rtx target ATTRIBUTE_UNUSED;
int unsignedp ATTRIBUTE_UNUSED;
@@ -1743,23 +1744,30 @@ noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
end_sequence ();
}
- /* Don't even try if the comparison operands are weird
- except that the target supports cbranchcc4. */
- if (! general_operand (cmp_a, GET_MODE (cmp_a))
- || ! general_operand (cmp_b, GET_MODE (cmp_b)))
- {
- if (!have_cbranchcc4
- || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
- || cmp_b != const0_rtx)
- return NULL_RTX;
- }
-
unsignedp = (code == LTU || code == GEU
|| code == LEU || code == GTU);
- target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
- vtrue, vfalse, GET_MODE (x),
- unsignedp);
+ if (cc_cmp != NULL_RTX && rev_cc_cmp != NULL_RTX)
+ target = emit_conditional_move (x, cc_cmp, rev_cc_cmp,
+ vtrue, vfalse, GET_MODE (x));
+ else
+ {
+ /* Don't even try if the comparison operands are weird
+ except that the target supports cbranchcc4. */
+ if (! general_operand (cmp_a, GET_MODE (cmp_a))
+ || ! general_operand (cmp_b, GET_MODE (cmp_b)))
+ {
+ if (!have_cbranchcc4
+ || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
+ || cmp_b != const0_rtx)
+ return NULL_RTX;
+ }
+
+ target = emit_conditional_move (x, { code, cmp_a, cmp_b, VOIDmode },
+ vtrue, vfalse, GET_MODE (x),
+ unsignedp);
+ }
+
if (target)
return target;
@@ -1795,8 +1803,9 @@ noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
- target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
- VOIDmode, reg_vtrue, reg_vfalse,
+ target = emit_conditional_move (promoted_target,
+ { code, cmp_a, cmp_b, VOIDmode },
+ reg_vtrue, reg_vfalse,
GET_MODE (reg_vtrue), unsignedp);
/* Nope, couldn't do it in that mode either. */
if (!target)
@@ -52,6 +52,8 @@ static void prepare_float_lib_cmp (rtx, rtx, enum rtx_code, rtx *,
static rtx expand_unop_direct (machine_mode, optab, rtx, rtx, int);
static void emit_libcall_block_1 (rtx_insn *, rtx, rtx, rtx, bool);
+static rtx emit_conditional_move_1 (rtx, rtx, rtx, rtx, machine_mode);
+
/* Debug facility for use in GDB. */
void debug_optab_libfuncs (void);
@@ -624,12 +626,13 @@ expand_doubleword_shift_condmove (scalar_int_mode op1_mode, optab binoptab,
/* Select between them. Do the INTO half first because INTO_SUPERWORD
might be the current value of OUTOF_TARGET. */
- if (!emit_conditional_move (into_target, cmp_code, cmp1, cmp2, op1_mode,
+ if (!emit_conditional_move (into_target, { cmp_code, cmp1, cmp2, op1_mode },
into_target, into_superword, word_mode, false))
return false;
if (outof_target != 0)
- if (!emit_conditional_move (outof_target, cmp_code, cmp1, cmp2, op1_mode,
+ if (!emit_conditional_move (outof_target,
+ { cmp_code, cmp1, cmp2, op1_mode },
outof_target, outof_superword,
word_mode, false))
return false;
@@ -4843,8 +4846,8 @@ emit_indirect_jump (rtx loc)
is not supported. */
rtx
-emit_conditional_move (rtx target, enum rtx_code code, rtx op0, rtx op1,
- machine_mode cmode, rtx op2, rtx op3,
+emit_conditional_move (rtx target, struct rtx_comparison comp,
+ rtx op2, rtx op3,
machine_mode mode, int unsignedp)
{
rtx comparison;
@@ -4866,31 +4869,33 @@ emit_conditional_move (rtx target, enum rtx_code code, rtx op0, rtx op1,
/* If one operand is constant, make it the second one. Only do this
if the other operand is not constant as well. */
- if (swap_commutative_operands_p (op0, op1))
+ if (swap_commutative_operands_p (comp.op0, comp.op1))
{
- std::swap (op0, op1);
- code = swap_condition (code);
+ std::swap (comp.op0, comp.op1);
+ comp.code = swap_condition (comp.code);
}
/* get_condition will prefer to generate LT and GT even if the old
comparison was against zero, so undo that canonicalization here since
comparisons against zero are cheaper. */
- if (code == LT && op1 == const1_rtx)
- code = LE, op1 = const0_rtx;
- else if (code == GT && op1 == constm1_rtx)
- code = GE, op1 = const0_rtx;
- if (cmode == VOIDmode)
- cmode = GET_MODE (op0);
+ if (comp.code == LT && comp.op1 == const1_rtx)
+ comp.code = LE, comp.op1 = const0_rtx;
+ else if (comp.code == GT && comp.op1 == constm1_rtx)
+ comp.code = GE, comp.op1 = const0_rtx;
+
+ if (comp.mode == VOIDmode)
+ comp.mode = GET_MODE (comp.op0);
- enum rtx_code orig_code = code;
+ enum rtx_code orig_code = comp.code;
bool swapped = false;
if (swap_commutative_operands_p (op2, op3)
- && ((reversed = reversed_comparison_code_parts (code, op0, op1, NULL))
+ && ((reversed =
+ reversed_comparison_code_parts (comp.code, comp.op0, comp.op1, NULL))
!= UNKNOWN))
{
std::swap (op2, op3);
- code = reversed;
+ comp.code = reversed;
swapped = true;
}
@@ -4907,8 +4912,10 @@ emit_conditional_move (rtx target, enum rtx_code code, rtx op0, rtx op1,
for (int pass = 0; ; pass++)
{
- code = unsignedp ? unsigned_condition (code) : code;
- comparison = simplify_gen_relational (code, VOIDmode, cmode, op0, op1);
+ comp.code = unsignedp ? unsigned_condition (comp.code) : comp.code;
+ comparison =
+ simplify_gen_relational (comp.code, VOIDmode,
+ comp.mode, comp.op0, comp.op1);
/* We can get const0_rtx or const_true_rtx in some circumstances. Just
punt and let the caller figure out how best to deal with this
@@ -4919,24 +4926,16 @@ emit_conditional_move (rtx target, enum rtx_code code, rtx op0, rtx op1,
save_pending_stack_adjust (&save);
last = get_last_insn ();
do_pending_stack_adjust ();
- machine_mode cmpmode = cmode;
+ machine_mode cmpmode = comp.mode;
prepare_cmp_insn (XEXP (comparison, 0), XEXP (comparison, 1),
GET_CODE (comparison), NULL_RTX, unsignedp,
OPTAB_WIDEN, &comparison, &cmpmode);
if (comparison)
{
- class expand_operand ops[4];
-
- create_output_operand (&ops[0], target, mode);
- create_fixed_operand (&ops[1], comparison);
- create_input_operand (&ops[2], op2, mode);
- create_input_operand (&ops[3], op3, mode);
- if (maybe_expand_insn (icode, 4, ops))
- {
- if (ops[0].value != target)
- convert_move (target, ops[0].value, false);
- return target;
- }
+ rtx res = emit_conditional_move_1 (target, comparison,
+ op2, op3, mode);
+ if (res != NULL_RTX)
+ return res;
}
delete_insns_since (last);
restore_pending_stack_adjust (&save);
@@ -4948,17 +4947,85 @@ emit_conditional_move (rtx target, enum rtx_code code, rtx op0, rtx op1,
/* If the preferred op2/op3 order is not usable, retry with other
operand order, perhaps it will expand successfully. */
if (swapped)
- code = orig_code;
- else if ((reversed = reversed_comparison_code_parts (orig_code, op0, op1,
+ comp.code = orig_code;
+ else if ((reversed =
+ reversed_comparison_code_parts (orig_code, comp.op0, comp.op1,
NULL))
!= UNKNOWN)
- code = reversed;
+ comp.code = reversed;
else
return NULL_RTX;
std::swap (op2, op3);
}
}
+/* Helper function that, in addition to COMPARISON, also tries
+ the reversed REV_COMPARISON with swapped OP2 and OP3. As opposed
+ to when we pass the specific constituents of a comparison, no
+ additional insns are emitted for it. It might still be necessary
+ to emit more than one insn for the final conditional move, though. */
+
+rtx
+emit_conditional_move (rtx target, rtx comparison, rtx rev_comparison,
+ rtx op2, rtx op3, machine_mode mode)
+{
+ rtx res = emit_conditional_move_1 (target, comparison, op2, op3, mode);
+
+ if (res != NULL_RTX)
+ return res;
+
+ return emit_conditional_move_1 (target, rev_comparison, op3, op2, mode);
+}
+
+/* Helper for emitting a conditional move. */
+
+static rtx
+emit_conditional_move_1 (rtx target, rtx comparison,
+ rtx op2, rtx op3, machine_mode mode)
+{
+ enum insn_code icode;
+
+ if (comparison == NULL_RTX || !COMPARISON_P (comparison))
+ return NULL_RTX;
+
+ /* If the two source operands are identical, that's just a move. */
+ if (rtx_equal_p (op2, op3))
+ {
+ if (!target)
+ target = gen_reg_rtx (mode);
+
+ emit_move_insn (target, op3);
+ return target;
+ }
+
+ if (mode == VOIDmode)
+ mode = GET_MODE (op2);
+
+ icode = direct_optab_handler (movcc_optab, mode);
+
+ if (icode == CODE_FOR_nothing)
+ return NULL_RTX;
+
+ if (!target)
+ target = gen_reg_rtx (mode);
+
+ class expand_operand ops[4];
+
+ create_output_operand (&ops[0], target, mode);
+ create_fixed_operand (&ops[1], comparison);
+ create_input_operand (&ops[2], op2, mode);
+ create_input_operand (&ops[3], op3, mode);
+
+ if (maybe_expand_insn (icode, 4, ops))
+ {
+ if (ops[0].value != target)
+ convert_move (target, ops[0].value, false);
+ return target;
+ }
+
+ return NULL_RTX;
+}
+
/* Emit a conditional negate or bitwise complement using the
negcc or notcc optabs if available. Return NULL_RTX if such operations
@@ -279,8 +279,8 @@ extern void emit_indirect_jump (rtx);
#endif
/* Emit a conditional move operation. */
-rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, machine_mode,
- rtx, rtx, machine_mode, int);
+rtx emit_conditional_move (rtx, rtx_comparison, rtx, rtx, machine_mode, int);
+rtx emit_conditional_move (rtx, rtx, rtx, rtx, rtx, machine_mode);
/* Emit a conditional negate or bitwise complement operation. */
rtx emit_conditional_neg_or_complement (rtx, rtx_code, machine_mode, rtx,
@@ -4589,7 +4589,16 @@ word_register_operation_p (const_rtx x)
return true;
}
}
-
+
+/* Holds an rtx comparison to simplify passing many parameters pertaining to a
+ single comparison. */
+
+struct rtx_comparison {
+ rtx_code code;
+ rtx op0, op1;
+ machine_mode mode;
+};
+
/* gtype-desc.c. */
extern void gt_ggc_mx (rtx &);
extern void gt_pch_nx (rtx &);