Commit cb4347e8 by Trevor Saunders Committed by Trevor Saunders

make targetm.gen_ccmp{first,next} take rtx_insn **

gcc/ChangeLog:

2016-11-03  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* ccmp.c (expand_ccmp_expr_1): Adjust.
	(expand_ccmp_expr): Likewise.
	(expand_ccmp_next): Likewise.
	* config/aarch64/aarch64.c (aarch64_gen_ccmp_next): Likewise.
	(aarch64_gen_ccmp_first): Likewise.
	* doc/tm.texi: Regenerate.
	* target.def (gen_ccmp_first): Change argument types to rtx_insn *.
	(gen_ccmp_next): Likewise.

From-SVN: r241811
parent c12176d7
2016-11-03 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* ccmp.c (expand_ccmp_expr_1): Adjust.
(expand_ccmp_expr): Likewise.
(expand_ccmp_next): Likewise.
* config/aarch64/aarch64.c (aarch64_gen_ccmp_next): Likewise.
(aarch64_gen_ccmp_first): Likewise.
* doc/tm.texi: Regenerate.
* target.def (gen_ccmp_first): Change argument types to rtx_insn *.
(gen_ccmp_next): Likewise.
2016-11-03 Bin Cheng <bin.cheng@arm.com> 2016-11-03 Bin Cheng <bin.cheng@arm.com>
* tree-vect-loop.c (destroy_loop_vec_info): Handle cond_expr. * tree-vect-loop.c (destroy_loop_vec_info): Handle cond_expr.
...@@ -122,7 +122,7 @@ ccmp_candidate_p (gimple *g) ...@@ -122,7 +122,7 @@ ccmp_candidate_p (gimple *g)
GEN_SEQ returns all compare insns. */ GEN_SEQ returns all compare insns. */
static rtx static rtx
expand_ccmp_next (gimple *g, tree_code code, rtx prev, expand_ccmp_next (gimple *g, tree_code code, rtx prev,
rtx *prep_seq, rtx *gen_seq) rtx_insn **prep_seq, rtx_insn **gen_seq)
{ {
rtx_code rcode; rtx_code rcode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (g))); int unsignedp = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (g)));
...@@ -149,10 +149,8 @@ expand_ccmp_next (gimple *g, tree_code code, rtx prev, ...@@ -149,10 +149,8 @@ expand_ccmp_next (gimple *g, tree_code code, rtx prev,
PREP_SEQ returns all insns to prepare opearand. PREP_SEQ returns all insns to prepare opearand.
GEN_SEQ returns all compare insns. */ GEN_SEQ returns all compare insns. */
static rtx static rtx
expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq) expand_ccmp_expr_1 (gimple *g, rtx_insn **prep_seq, rtx_insn **gen_seq)
{ {
rtx prep_seq_1, gen_seq_1;
rtx prep_seq_2, gen_seq_2;
tree exp = gimple_assign_rhs_to_tree (g); tree exp = gimple_assign_rhs_to_tree (g);
tree_code code = TREE_CODE (exp); tree_code code = TREE_CODE (exp);
gimple *gs0 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 0)); gimple *gs0 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 0));
...@@ -180,6 +178,7 @@ expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq) ...@@ -180,6 +178,7 @@ expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq)
rcode0 = get_rtx_code (code0, unsignedp0); rcode0 = get_rtx_code (code0, unsignedp0);
rcode1 = get_rtx_code (code1, unsignedp1); rcode1 = get_rtx_code (code1, unsignedp1);
rtx_insn *prep_seq_1, *gen_seq_1;
tmp = targetm.gen_ccmp_first (&prep_seq_1, &gen_seq_1, rcode0, tmp = targetm.gen_ccmp_first (&prep_seq_1, &gen_seq_1, rcode0,
gimple_assign_rhs1 (gs0), gimple_assign_rhs1 (gs0),
gimple_assign_rhs2 (gs0)); gimple_assign_rhs2 (gs0));
...@@ -187,14 +186,15 @@ expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq) ...@@ -187,14 +186,15 @@ expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq)
if (tmp != NULL) if (tmp != NULL)
{ {
ret = expand_ccmp_next (gs1, code, tmp, &prep_seq_1, &gen_seq_1); ret = expand_ccmp_next (gs1, code, tmp, &prep_seq_1, &gen_seq_1);
cost1 = seq_cost (safe_as_a <rtx_insn *> (prep_seq_1), speed_p); cost1 = seq_cost (prep_seq_1, speed_p);
cost1 += seq_cost (safe_as_a <rtx_insn *> (gen_seq_1), speed_p); cost1 += seq_cost (gen_seq_1, speed_p);
} }
/* FIXME: Temporary workaround for PR69619. /* FIXME: Temporary workaround for PR69619.
Avoid exponential compile time due to expanding gs0 and gs1 twice. Avoid exponential compile time due to expanding gs0 and gs1 twice.
If gs0 and gs1 are complex, the cost will be high, so avoid If gs0 and gs1 are complex, the cost will be high, so avoid
reevaluation if above an arbitrary threshold. */ reevaluation if above an arbitrary threshold. */
rtx_insn *prep_seq_2, *gen_seq_2;
if (tmp == NULL || cost1 < COSTS_N_INSNS (25)) if (tmp == NULL || cost1 < COSTS_N_INSNS (25))
tmp2 = targetm.gen_ccmp_first (&prep_seq_2, &gen_seq_2, rcode1, tmp2 = targetm.gen_ccmp_first (&prep_seq_2, &gen_seq_2, rcode1,
gimple_assign_rhs1 (gs1), gimple_assign_rhs1 (gs1),
...@@ -207,8 +207,8 @@ expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq) ...@@ -207,8 +207,8 @@ expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq)
{ {
ret2 = expand_ccmp_next (gs0, code, tmp2, &prep_seq_2, ret2 = expand_ccmp_next (gs0, code, tmp2, &prep_seq_2,
&gen_seq_2); &gen_seq_2);
cost2 = seq_cost (safe_as_a <rtx_insn *> (prep_seq_2), speed_p); cost2 = seq_cost (prep_seq_2, speed_p);
cost2 += seq_cost (safe_as_a <rtx_insn *> (gen_seq_2), speed_p); cost2 += seq_cost (gen_seq_2, speed_p);
} }
if (cost2 < cost1) if (cost2 < cost1)
...@@ -262,14 +262,13 @@ expand_ccmp_expr (gimple *g) ...@@ -262,14 +262,13 @@ expand_ccmp_expr (gimple *g)
{ {
rtx_insn *last; rtx_insn *last;
rtx tmp; rtx tmp;
rtx prep_seq, gen_seq;
prep_seq = gen_seq = NULL_RTX;
if (!ccmp_candidate_p (g)) if (!ccmp_candidate_p (g))
return NULL_RTX; return NULL_RTX;
last = get_last_insn (); last = get_last_insn ();
rtx_insn *prep_seq = NULL, *gen_seq = NULL;
tmp = expand_ccmp_expr_1 (g, &prep_seq, &gen_seq); tmp = expand_ccmp_expr_1 (g, &prep_seq, &gen_seq);
if (tmp) if (tmp)
......
...@@ -13234,7 +13234,7 @@ aarch64_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size, ...@@ -13234,7 +13234,7 @@ aarch64_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
} }
static rtx static rtx
aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq, aarch64_gen_ccmp_first (rtx_insn **prep_seq, rtx_insn **gen_seq,
int code, tree treeop0, tree treeop1) int code, tree treeop0, tree treeop1)
{ {
machine_mode op_mode, cmp_mode, cc_mode = CCmode; machine_mode op_mode, cmp_mode, cc_mode = CCmode;
...@@ -13308,8 +13308,8 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq, ...@@ -13308,8 +13308,8 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq,
} }
static rtx static rtx
aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, aarch64_gen_ccmp_next (rtx_insn **prep_seq, rtx_insn **gen_seq, rtx prev,
tree treeop0, tree treeop1, int bit_code) int cmp_code, tree treeop0, tree treeop1, int bit_code)
{ {
rtx op0, op1, target; rtx op0, op1, target;
machine_mode op_mode, cmp_mode, cc_mode = CCmode; machine_mode op_mode, cmp_mode, cc_mode = CCmode;
...@@ -13318,7 +13318,7 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, ...@@ -13318,7 +13318,7 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code,
struct expand_operand ops[6]; struct expand_operand ops[6];
int aarch64_cond; int aarch64_cond;
push_to_sequence ((rtx_insn*) *prep_seq); push_to_sequence (*prep_seq);
expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
op_mode = GET_MODE (op0); op_mode = GET_MODE (op0);
...@@ -13384,7 +13384,7 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, ...@@ -13384,7 +13384,7 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code,
create_fixed_operand (&ops[4], prev); create_fixed_operand (&ops[4], prev);
create_fixed_operand (&ops[5], GEN_INT (aarch64_cond)); create_fixed_operand (&ops[5], GEN_INT (aarch64_cond));
push_to_sequence ((rtx_insn*) *gen_seq); push_to_sequence (*gen_seq);
if (!maybe_expand_insn (icode, 6, ops)) if (!maybe_expand_insn (icode, 6, ops))
{ {
end_sequence (); end_sequence ();
......
...@@ -11550,7 +11550,7 @@ This target hook is required only when the target has several different ...@@ -11550,7 +11550,7 @@ This target hook is required only when the target has several different
modes and they have different conditional execution capability, such as ARM. modes and they have different conditional execution capability, such as ARM.
@end deftypefn @end deftypefn
@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_FIRST (rtx *@var{prep_seq}, rtx *@var{gen_seq}, int @var{code}, tree @var{op0}, tree @var{op1}) @deftypefn {Target Hook} rtx TARGET_GEN_CCMP_FIRST (rtx_insn **@var{prep_seq}, rtx_insn **@var{gen_seq}, int @var{code}, tree @var{op0}, tree @var{op1})
This function prepares to emit a comparison insn for the first compare in a This function prepares to emit a comparison insn for the first compare in a
sequence of conditional comparisions. It returns an appropriate comparison sequence of conditional comparisions. It returns an appropriate comparison
with @code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}. with @code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.
...@@ -11560,7 +11560,7 @@ This function prepares to emit a comparison insn for the first compare in a ...@@ -11560,7 +11560,7 @@ This function prepares to emit a comparison insn for the first compare in a
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}. @var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.
@end deftypefn @end deftypefn
@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_NEXT (rtx *@var{prep_seq}, rtx *@var{gen_seq}, rtx @var{prev}, int @var{cmp_code}, tree @var{op0}, tree @var{op1}, int @var{bit_code}) @deftypefn {Target Hook} rtx TARGET_GEN_CCMP_NEXT (rtx_insn **@var{prep_seq}, rtx_insn **@var{gen_seq}, rtx @var{prev}, int @var{cmp_code}, tree @var{op0}, tree @var{op1}, int @var{bit_code})
This function prepares to emit a conditional comparison within a sequence This function prepares to emit a conditional comparison within a sequence
of conditional comparisons. It returns an appropriate comparison with of conditional comparisons. It returns an appropriate comparison with
@code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}. @code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.
......
...@@ -2627,7 +2627,7 @@ DEFHOOK ...@@ -2627,7 +2627,7 @@ DEFHOOK
insns are saved in @var{gen_seq}. They will be emitted when all the\n\ insns are saved in @var{gen_seq}. They will be emitted when all the\n\
compares in the the conditional comparision are generated without error.\n\ compares in the the conditional comparision are generated without error.\n\
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.", @var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.",
rtx, (rtx *prep_seq, rtx *gen_seq, int code, tree op0, tree op1), rtx, (rtx_insn **prep_seq, rtx_insn **gen_seq, int code, tree op0, tree op1),
NULL) NULL)
DEFHOOK DEFHOOK
...@@ -2644,7 +2644,7 @@ DEFHOOK ...@@ -2644,7 +2644,7 @@ DEFHOOK
be appropriate for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.\n\ be appropriate for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.\n\
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.\n\ @var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.\n\
@var{bit_code} is @code{AND} or @code{IOR}, which is the op on the compares.", @var{bit_code} is @code{AND} or @code{IOR}, which is the op on the compares.",
rtx, (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, tree op0, tree op1, int bit_code), rtx, (rtx_insn **prep_seq, rtx_insn **gen_seq, rtx prev, int cmp_code, tree op0, tree op1, int bit_code),
NULL) NULL)
/* Return a new value for loop unroll size. */ /* Return a new value for loop unroll size. */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment