Commit fa626987 by Georg-Johann Lay Committed by Georg-Johann Lay

Some patterns for moving one bit around by BST + BLD.

	* config/avr/avr.md (any_extract, any_shiftrt): New code iterators.
	(*insv.extract, *insv.shiftrt, *insv.not-bit.0, *insv.not-bit.7)
	(*insv.xor-extract, *insv.xor1-bit.0): New insns.
	(adjust_len) [insv_notbit, insv_notbit_0, insv_notbit_7]: New
	values for insn attribute.
	* config/avr/avr.c (avr_out_insert_notbit): New function.
	(avr_adjust_insn_length): Handle ADJUST_LEN_INSV_NOTBIT,
	ADJUST_LEN_INSV_NOTBIT_0/_7.
	* config/avr/avr-protos.h (avr_out_insert_notbit): New proto.

From-SVN: r238587
parent b24d9420
2016-07-21 Georg-Johann Lay <avr@gjlay.de>
* config/avr/avr.md (any_extract, any_shiftrt): New code iterators.
(*insv.extract, *insv.shiftrt, *insv.not-bit.0, *insv.not-bit.7)
(*insv.xor-extract, *insv.xor1-bit.0): New insns.
(adjust_len) [insv_notbit, insv_notbit_0, insv_notbit_7]: New
values for insn attribute.
* config/avr/avr.c (avr_out_insert_notbit): New function.
(avr_adjust_insn_length): Handle ADJUST_LEN_INSV_NOTBIT,
ADJUST_LEN_INSV_NOTBIT_0/_7.
* config/avr/avr-protos.h (avr_out_insert_notbit): New proto.
2016-07-21 Bin Cheng <bin.cheng@arm.com> 2016-07-21 Bin Cheng <bin.cheng@arm.com>
* tree-chrec.c (convert_affine_scev): New parameter. Pass new arg. * tree-chrec.c (convert_affine_scev): New parameter. Pass new arg.
......
...@@ -58,6 +58,7 @@ extern const char *avr_out_compare64 (rtx_insn *, rtx*, int*); ...@@ -58,6 +58,7 @@ extern const char *avr_out_compare64 (rtx_insn *, rtx*, int*);
extern const char *ret_cond_branch (rtx x, int len, int reverse); extern const char *ret_cond_branch (rtx x, int len, int reverse);
extern const char *avr_out_movpsi (rtx_insn *, rtx*, int*); extern const char *avr_out_movpsi (rtx_insn *, rtx*, int*);
extern const char *avr_out_sign_extend (rtx_insn *, rtx*, int*); extern const char *avr_out_sign_extend (rtx_insn *, rtx*, int*);
extern const char *avr_out_insert_notbit (rtx_insn *, rtx*, rtx, int*);
extern const char *ashlqi3_out (rtx_insn *insn, rtx operands[], int *len); extern const char *ashlqi3_out (rtx_insn *insn, rtx operands[], int *len);
extern const char *ashlhi3_out (rtx_insn *insn, rtx operands[], int *len); extern const char *ashlhi3_out (rtx_insn *insn, rtx operands[], int *len);
......
...@@ -7973,6 +7973,76 @@ avr_out_addto_sp (rtx *op, int *plen) ...@@ -7973,6 +7973,76 @@ avr_out_addto_sp (rtx *op, int *plen)
} }
/* Output instructions to insert an inverted bit into OPERANDS[0]:
$0.$1 = ~$2.$3 if XBITNO = NULL
$0.$1 = ~$2.XBITNO if XBITNO != NULL.
If PLEN = NULL then output the respective instruction sequence which
is a combination of BST / BLD and some instruction(s) to invert the bit.
If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
Return "". */
const char*
avr_out_insert_notbit (rtx_insn *insn, rtx operands[], rtx xbitno, int *plen)
{
rtx op[4] = { operands[0], operands[1], operands[2],
xbitno == NULL_RTX ? operands [3] : xbitno };
if (INTVAL (op[1]) == 7
&& test_hard_reg_class (LD_REGS, op[0]))
{
/* If the inserted bit number is 7 and we have a d-reg, then invert
the bit after the insertion by means of SUBI *,0x80. */
if (INTVAL (op[3]) == 7
&& REGNO (op[0]) == REGNO (op[2]))
{
avr_asm_len ("subi %0,0x80", op, plen, -1);
}
else
{
avr_asm_len ("bst %2,%3" CR_TAB
"bld %0,%1" CR_TAB
"subi %0,0x80", op, plen, -3);
}
}
else if (test_hard_reg_class (LD_REGS, op[0])
&& (INTVAL (op[1]) != INTVAL (op[3])
|| !reg_overlap_mentioned_p (op[0], op[2])))
{
/* If the destination bit is in a d-reg we can jump depending
on the source bit and use ANDI / ORI. This just applies if we
have not an early-clobber situation with the bit. */
avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
"sbrs %2,%3" CR_TAB
"ori %0,1<<%1", op, plen, -3);
}
else
{
/* Otherwise, invert the bit by means of COM before we store it with
BST and then undo the COM if needed. */
avr_asm_len ("com %2" CR_TAB
"bst %2,%3", op, plen, -2);
if (!reg_unused_after (insn, op[2])
// A simple 'reg_unused_after' is not enough because that function
// assumes that the destination register is overwritten completely
// and hence is in order for our purpose. This is not the case
// with BLD which just changes one bit of the destination.
|| reg_overlap_mentioned_p (op[0], op[2]))
{
/* Undo the COM from above. */
avr_asm_len ("com %2", op, plen, 1);
}
avr_asm_len ("bld %0,%1", op, plen, 1);
}
return "";
}
/* Outputs instructions needed for fixed point type conversion. /* Outputs instructions needed for fixed point type conversion.
This includes converting between any fixed point type, as well This includes converting between any fixed point type, as well
as converting to any integer type. Conversion between integer as converting to any integer type. Conversion between integer
...@@ -8810,6 +8880,16 @@ avr_adjust_insn_length (rtx_insn *insn, int len) ...@@ -8810,6 +8880,16 @@ avr_adjust_insn_length (rtx_insn *insn, int len)
case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break; case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
case ADJUST_LEN_INSV_NOTBIT:
avr_out_insert_notbit (insn, op, NULL_RTX, &len);
break;
case ADJUST_LEN_INSV_NOTBIT_0:
avr_out_insert_notbit (insn, op, const0_rtx, &len);
break;
case ADJUST_LEN_INSV_NOTBIT_7:
avr_out_insert_notbit (insn, op, GEN_INT (7), &len);
break;
default: default:
gcc_unreachable(); gcc_unreachable();
} }
......
...@@ -156,7 +156,7 @@ ...@@ -156,7 +156,7 @@
ashlhi, ashrhi, lshrhi, ashlhi, ashrhi, lshrhi,
ashlsi, ashrsi, lshrsi, ashlsi, ashrsi, lshrsi,
ashlpsi, ashrpsi, lshrpsi, ashlpsi, ashrpsi, lshrpsi,
insert_bits, insert_bits, insv_notbit, insv_notbit_0, insv_notbit_7,
no" no"
(const_string "no")) (const_string "no"))
...@@ -264,6 +264,8 @@ ...@@ -264,6 +264,8 @@
;; Define two incarnations so that we can build the cross product. ;; Define two incarnations so that we can build the cross product.
(define_code_iterator any_extend [sign_extend zero_extend]) (define_code_iterator any_extend [sign_extend zero_extend])
(define_code_iterator any_extend2 [sign_extend zero_extend]) (define_code_iterator any_extend2 [sign_extend zero_extend])
(define_code_iterator any_extract [sign_extract zero_extract])
(define_code_iterator any_shiftrt [lshiftrt ashiftrt])
(define_code_iterator xior [xor ior]) (define_code_iterator xior [xor ior])
(define_code_iterator eqne [eq ne]) (define_code_iterator eqne [eq ne])
...@@ -6485,6 +6487,11 @@ ...@@ -6485,6 +6487,11 @@
(match_operand:QI 3 "nonmemory_operand" ""))] (match_operand:QI 3 "nonmemory_operand" ""))]
"optimize") "optimize")
;; Some more patterns to support moving around one bit which can be accomplished
;; by BST + BLD in most situations. Unfortunately, there is no canonical
;; representation, and we just implement some more cases that are not too
;; complicated.
;; Insert bit $2.0 into $0.$1 ;; Insert bit $2.0 into $0.$1
(define_insn "*insv.reg" (define_insn "*insv.reg"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r,d,d,l,l") [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r,d,d,l,l")
...@@ -6501,6 +6508,103 @@ ...@@ -6501,6 +6508,103 @@
[(set_attr "length" "2,1,1,2,2") [(set_attr "length" "2,1,1,2,2")
(set_attr "cc" "none,set_zn,set_zn,none,none")]) (set_attr "cc" "none,set_zn,set_zn,none,none")])
;; Insert bit $2.$3 into $0.$1
(define_insn "*insv.extract"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(any_extract:QI (match_operand:QI 2 "register_operand" "r")
(const_int 1)
(match_operand:QI 3 "const_0_to_7_operand" "n")))]
""
"bst %2,%3\;bld %0,%1"
[(set_attr "length" "2")
(set_attr "cc" "none")])
;; Insert bit $2.$3 into $0.$1
(define_insn "*insv.shiftrt"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(any_shiftrt:QI (match_operand:QI 2 "register_operand" "r")
(match_operand:QI 3 "const_0_to_7_operand" "n")))]
""
"bst %2,%3\;bld %0,%1"
[(set_attr "length" "2")
(set_attr "cc" "none")])
;; Same, but with a NOT inverting the source bit.
;; Insert bit ~$2.$3 into $0.$1
(define_insn "*insv.not-shiftrt"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(not:QI (any_shiftrt:QI (match_operand:QI 2 "register_operand" "r")
(match_operand:QI 3 "const_0_to_7_operand" "n"))))]
""
{
return avr_out_insert_notbit (insn, operands, NULL_RTX, NULL);
}
[(set_attr "adjust_len" "insv_notbit")
(set_attr "cc" "clobber")])
;; Insert bit ~$2.0 into $0.$1
(define_insn "*insv.xor1-bit.0"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(xor:QI (match_operand:QI 2 "register_operand" "r")
(const_int 1)))]
""
{
return avr_out_insert_notbit (insn, operands, const0_rtx, NULL);
}
[(set_attr "adjust_len" "insv_notbit_0")
(set_attr "cc" "clobber")])
;; Insert bit ~$2.0 into $0.$1
(define_insn "*insv.not-bit.0"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(not:QI (match_operand:QI 2 "register_operand" "r")))]
""
{
return avr_out_insert_notbit (insn, operands, const0_rtx, NULL);
}
[(set_attr "adjust_len" "insv_notbit_0")
(set_attr "cc" "clobber")])
;; Insert bit ~$2.7 into $0.$1
(define_insn "*insv.not-bit.7"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(ge:QI (match_operand:QI 2 "register_operand" "r")
(const_int 0)))]
""
{
return avr_out_insert_notbit (insn, operands, GEN_INT (7), NULL);
}
[(set_attr "adjust_len" "insv_notbit_7")
(set_attr "cc" "clobber")])
;; Insert bit ~$2.$3 into $0.$1
(define_insn "*insv.xor-extract"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(any_extract:QI (xor:QI (match_operand:QI 2 "register_operand" "r")
(match_operand:QI 4 "const_int_operand" "n"))
(const_int 1)
(match_operand:QI 3 "const_0_to_7_operand" "n")))]
"INTVAL (operands[4]) & (1 << INTVAL (operands[3]))"
{
return avr_out_insert_notbit (insn, operands, NULL_RTX, NULL);
}
[(set_attr "adjust_len" "insv_notbit")
(set_attr "cc" "clobber")])
;; Some combine patterns that try to fix bad code when a value is composed ;; Some combine patterns that try to fix bad code when a value is composed
;; from byte parts like in PR27663. ;; from byte parts like in PR27663.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment