Commit e4c6a07a by Bernd Schmidt Committed by Bernd Schmidt

re PR tree-optimization/42172 (inefficient bit fields assignments)

	PR target/42172
	* config/arm/arm.c (thumb1_rtx_costs): Improve support for SIGN_EXTEND
	and ZERO_EXTEND.
	(arm_rtx_costs_1): Likewise.
	(arm_size_rtx_costs): Use arm_rtx_costs_1 for these codes.
	* config/arm/arm.md (is_arch6): New attribute.
	(zero_extendhisi2, zero_extendqisi2, extendhisi2,
	extendqisi2): Tighten the code somewhat, avoiding invalid
	RTL to occur in the expander patterns.
	(thumb1_zero_extendhisi2): Merge with thumb1_zero_extendhisi2_v6.
	(thumb1_zero_extendhisi2_v6): Delete.
	(thumb1_extendhisi2): Merge with thumb1_extendhisi2_v6.
	(thumb1_extendhisi2_v6): Delete.
	(thumb1_extendqisi2): Merge with thumb1_extendhisi2_v6.
	(thumb1_extendqisi2_v6): Delete.
	(zero_extendhisi2 for register input splitter): New.
	(zero_extendqisi2 for register input splitter): New.
	(thumb1_extendhisi2 for register input splitter): New.
	(extendhisi2 for register input splitter): New.
	(extendqisi2 for register input splitter): New.
	(TARGET_THUMB1 extendqisi2 for memory input splitter): New.
	(arm_zero_extendhisi2): Allow nonimmediate_operand for operand 1,
	and add support for a register alternative requiring a split.
	(thumb1_zero_extendqisi2): Likewise.
	(arm_zero_extendqisi2): Likewise.
	(arm_extendhisi2): Likewise.
	(arm_extendqisi2): Likewise.

testsuite/
	PR target/42172
	* gcc.target/arm/pr42172-1.c: New test.

From-SVN: r161726
parent 18e8200f
...@@ -8,6 +8,34 @@ ...@@ -8,6 +8,34 @@
(compare_scc): Now a define_and_split. Add a number of extra (compare_scc): Now a define_and_split. Add a number of extra
splitters before it. splitters before it.
PR target/42172
* config/arm/arm.c (thumb1_rtx_costs): Improve support for SIGN_EXTEND
and ZERO_EXTEND.
(arm_rtx_costs_1): Likewise.
(arm_size_rtx_costs): Use arm_rtx_costs_1 for these codes.
* config/arm/arm.md (is_arch6): New attribute.
(zero_extendhisi2, zero_extendqisi2, extendhisi2,
extendqisi2): Tighten the code somewhat, avoiding invalid
RTL to occur in the expander patterns.
(thumb1_zero_extendhisi2): Merge with thumb1_zero_extendhisi2_v6.
(thumb1_zero_extendhisi2_v6): Delete.
(thumb1_extendhisi2): Merge with thumb1_extendhisi2_v6.
(thumb1_extendhisi2_v6): Delete.
(thumb1_extendqisi2): Merge with thumb1_extendhisi2_v6.
(thumb1_extendqisi2_v6): Delete.
(zero_extendhisi2 for register input splitter): New.
(zero_extendqisi2 for register input splitter): New.
(thumb1_extendhisi2 for register input splitter): New.
(extendhisi2 for register input splitter): New.
(extendqisi2 for register input splitter): New.
(TARGET_THUMB1 extendqisi2 for memory input splitter): New.
(arm_zero_extendhisi2): Allow nonimmediate_operand for operand 1,
and add support for a register alternative requiring a split.
(thumb1_zero_extendqisi2): Likewise.
(arm_zero_extendqisi2): Likewise.
(arm_extendhisi2): Likewise.
(arm_extendqisi2): Likewise.
2010-07-02 Sandra Loosemore <sandra@codesourcery.com> 2010-07-02 Sandra Loosemore <sandra@codesourcery.com>
* config/arm/arm.c (neon_vdup_constant): Expand into canonical RTL * config/arm/arm.c (neon_vdup_constant): Expand into canonical RTL
......
...@@ -6214,6 +6214,7 @@ static inline int ...@@ -6214,6 +6214,7 @@ static inline int
thumb1_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer) thumb1_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
{ {
enum machine_mode mode = GET_MODE (x); enum machine_mode mode = GET_MODE (x);
int total;
switch (code) switch (code)
{ {
...@@ -6312,24 +6313,20 @@ thumb1_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer) ...@@ -6312,24 +6313,20 @@ thumb1_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
return 14; return 14;
return 2; return 2;
case SIGN_EXTEND:
case ZERO_EXTEND: case ZERO_EXTEND:
/* XXX still guessing. */ total = mode == DImode ? COSTS_N_INSNS (1) : 0;
switch (GET_MODE (XEXP (x, 0))) total += thumb1_rtx_costs (XEXP (x, 0), GET_CODE (XEXP (x, 0)), code);
{
case QImode:
return (1 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case HImode: if (mode == SImode)
return (4 + (mode == DImode ? 4 : 0) return total;
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case SImode: if (arm_arch6)
return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0)); return total + COSTS_N_INSNS (1);
default: /* Assume a two-shift sequence. Increase the cost slightly so
return 99; we prefer actual shifts over an extend operation. */
} return total + 1 + COSTS_N_INSNS (2);
default: default:
return 99; return 99;
...@@ -6798,44 +6795,39 @@ arm_rtx_costs_1 (rtx x, enum rtx_code outer, int* total, bool speed) ...@@ -6798,44 +6795,39 @@ arm_rtx_costs_1 (rtx x, enum rtx_code outer, int* total, bool speed)
return false; return false;
case SIGN_EXTEND: case SIGN_EXTEND:
case ZERO_EXTEND:
*total = 0;
if (GET_MODE_CLASS (mode) == MODE_INT) if (GET_MODE_CLASS (mode) == MODE_INT)
{ {
*total = 0; rtx op = XEXP (x, 0);
enum machine_mode opmode = GET_MODE (op);
if (mode == DImode) if (mode == DImode)
*total += COSTS_N_INSNS (1); *total += COSTS_N_INSNS (1);
if (GET_MODE (XEXP (x, 0)) != SImode) if (opmode != SImode)
{ {
if (arm_arch6) if (MEM_P (op))
{ {
if (GET_CODE (XEXP (x, 0)) != MEM) /* If !arm_arch4, we use one of the extendhisi2_mem
*total += COSTS_N_INSNS (1); or movhi_bytes patterns for HImode. For a QImode
} sign extension, we first zero-extend from memory
else if (!arm_arch4 || GET_CODE (XEXP (x, 0)) != MEM) and then perform a shift sequence. */
if (!arm_arch4 && (opmode != QImode || code == SIGN_EXTEND))
*total += COSTS_N_INSNS (2); *total += COSTS_N_INSNS (2);
} }
else if (arm_arch6)
return false;
}
/* Fall through */
case ZERO_EXTEND:
*total = 0;
if (GET_MODE_CLASS (mode) == MODE_INT)
{
if (mode == DImode)
*total += COSTS_N_INSNS (1); *total += COSTS_N_INSNS (1);
if (GET_MODE (XEXP (x, 0)) != SImode) /* We don't have the necessary insn, so we need to perform some
{ other operation. */
if (arm_arch6) else if (TARGET_ARM && code == ZERO_EXTEND && mode == QImode)
{ /* An and with constant 255. */
if (GET_CODE (XEXP (x, 0)) != MEM)
*total += COSTS_N_INSNS (1); *total += COSTS_N_INSNS (1);
} else
else if (!arm_arch4 || GET_CODE (XEXP (x, 0)) != MEM) /* A shift sequence. Increase costs slightly to avoid
*total += COSTS_N_INSNS (GET_MODE (XEXP (x, 0)) == QImode ? combining two shifts into an extend operation. */
1 : 2); *total += COSTS_N_INSNS (2) + 1;
} }
return false; return false;
...@@ -7191,41 +7183,8 @@ arm_size_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer_code, ...@@ -7191,41 +7183,8 @@ arm_size_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer_code,
return false; return false;
case SIGN_EXTEND: case SIGN_EXTEND:
*total = 0;
if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) < 4)
{
if (!(arm_arch4 && MEM_P (XEXP (x, 0))))
*total += COSTS_N_INSNS (arm_arch6 ? 1 : 2);
}
if (mode == DImode)
*total += COSTS_N_INSNS (1);
return false;
case ZERO_EXTEND: case ZERO_EXTEND:
*total = 0; return arm_rtx_costs_1 (x, outer_code, total, 0);
if (!(arm_arch4 && MEM_P (XEXP (x, 0))))
{
switch (GET_MODE (XEXP (x, 0)))
{
case QImode:
*total += COSTS_N_INSNS (1);
break;
case HImode:
*total += COSTS_N_INSNS (arm_arch6 ? 1 : 2);
case SImode:
break;
default:
*total += COSTS_N_INSNS (2);
}
}
if (mode == DImode)
*total += COSTS_N_INSNS (1);
return false;
case CONST_INT: case CONST_INT:
if (const_ok_for_arm (INTVAL (x))) if (const_ok_for_arm (INTVAL (x)))
......
...@@ -148,6 +148,9 @@ ...@@ -148,6 +148,9 @@
; patterns that share the same RTL in both ARM and Thumb code. ; patterns that share the same RTL in both ARM and Thumb code.
(define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code"))) (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
(define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
;; Operand number of an input operand that is shifted. Zero if the ;; Operand number of an input operand that is shifted. Zero if the
;; given instruction does not shift one of its input operands. ;; given instruction does not shift one of its input operands.
(define_attr "shift" "" (const_int 0)) (define_attr "shift" "" (const_int 0))
...@@ -3987,93 +3990,46 @@ ...@@ -3987,93 +3990,46 @@
) )
(define_expand "zero_extendhisi2" (define_expand "zero_extendhisi2"
[(set (match_dup 2) [(set (match_operand:SI 0 "s_register_operand" "")
(ashift:SI (match_operand:HI 1 "nonimmediate_operand" "") (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
(const_int 16)))
(set (match_operand:SI 0 "s_register_operand" "")
(lshiftrt:SI (match_dup 2) (const_int 16)))]
"TARGET_EITHER" "TARGET_EITHER"
" {
{ if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
{
emit_insn (gen_rtx_SET (VOIDmode, operands[0],
gen_rtx_ZERO_EXTEND (SImode, operands[1])));
DONE;
}
if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
{ {
emit_insn (gen_movhi_bytes (operands[0], operands[1])); emit_insn (gen_movhi_bytes (operands[0], operands[1]));
DONE; DONE;
} }
if (!arm_arch6 && !MEM_P (operands[1]))
if (!s_register_operand (operands[1], HImode))
operands[1] = copy_to_mode_reg (HImode, operands[1]);
if (arm_arch6)
{ {
emit_insn (gen_rtx_SET (VOIDmode, operands[0], rtx t = gen_lowpart (SImode, operands[1]);
gen_rtx_ZERO_EXTEND (SImode, operands[1]))); rtx tmp = gen_reg_rtx (SImode);
emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
DONE; DONE;
} }
})
operands[1] = gen_lowpart (SImode, operands[1]); (define_split
operands[2] = gen_reg_rtx (SImode); [(set (match_operand:SI 0 "register_operand" "")
}" (zero_extend:SI (match_operand:HI 1 "register_operand" "l,m")))]
) "!TARGET_THUMB2 && !arm_arch6"
[(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
(set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
{
operands[2] = gen_lowpart (SImode, operands[1]);
})
(define_insn "*thumb1_zero_extendhisi2" (define_insn "*thumb1_zero_extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=l")
(zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
"TARGET_THUMB1 && !arm_arch6"
"*
rtx mem = XEXP (operands[1], 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
if (GET_CODE (mem) == LABEL_REF)
return \"ldr\\t%0, %1\";
if (GET_CODE (mem) == PLUS)
{
rtx a = XEXP (mem, 0);
rtx b = XEXP (mem, 1);
/* This can happen due to bugs in reload. */
if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
{
rtx ops[2];
ops[0] = operands[0];
ops[1] = a;
output_asm_insn (\"mov %0, %1\", ops);
XEXP (mem, 0) = operands[0];
}
else if ( GET_CODE (a) == LABEL_REF
&& GET_CODE (b) == CONST_INT)
return \"ldr\\t%0, %1\";
}
return \"ldrh\\t%0, %1\";
"
[(set_attr "length" "4")
(set_attr "type" "load_byte")
(set_attr "pool_range" "60")]
)
(define_insn "*thumb1_zero_extendhisi2_v6"
[(set (match_operand:SI 0 "register_operand" "=l,l") [(set (match_operand:SI 0 "register_operand" "=l,l")
(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))] (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
"TARGET_THUMB1 && arm_arch6" "TARGET_THUMB1"
"* "*
rtx mem; rtx mem;
if (which_alternative == 0) if (which_alternative == 0 && arm_arch6)
return \"uxth\\t%0, %1\"; return \"uxth\\t%0, %1\";
if (which_alternative == 0)
return \"#\";
mem = XEXP (operands[1], 0); mem = XEXP (operands[1], 0);
...@@ -4107,20 +4063,25 @@ ...@@ -4107,20 +4063,25 @@
return \"ldrh\\t%0, %1\"; return \"ldrh\\t%0, %1\";
" "
[(set_attr "length" "2,4") [(set_attr_alternative "length"
[(if_then_else (eq_attr "is_arch6" "yes")
(const_int 2) (const_int 4))
(const_int 4)])
(set_attr "type" "alu_shift,load_byte") (set_attr "type" "alu_shift,load_byte")
(set_attr "pool_range" "*,60")] (set_attr "pool_range" "*,60")]
) )
(define_insn "*arm_zero_extendhisi2" (define_insn "*arm_zero_extendhisi2"
[(set (match_operand:SI 0 "s_register_operand" "=r") [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))] (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
"TARGET_ARM && arm_arch4 && !arm_arch6" "TARGET_ARM && arm_arch4 && !arm_arch6"
"ldr%(h%)\\t%0, %1" "@
[(set_attr "type" "load_byte") #
ldr%(h%)\\t%0, %1"
[(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes") (set_attr "predicable" "yes")
(set_attr "pool_range" "256") (set_attr "pool_range" "*,256")
(set_attr "neg_pool_range" "244")] (set_attr "neg_pool_range" "*,244")]
) )
(define_insn "*arm_zero_extendhisi2_v6" (define_insn "*arm_zero_extendhisi2_v6"
...@@ -4150,50 +4111,49 @@ ...@@ -4150,50 +4111,49 @@
[(set (match_operand:SI 0 "s_register_operand" "") [(set (match_operand:SI 0 "s_register_operand" "")
(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))] (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
"TARGET_EITHER" "TARGET_EITHER"
" {
if (!arm_arch6 && GET_CODE (operands[1]) != MEM) if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
{
if (TARGET_ARM)
{ {
emit_insn (gen_andsi3 (operands[0], emit_insn (gen_andsi3 (operands[0],
gen_lowpart (SImode, operands[1]), gen_lowpart (SImode, operands[1]),
GEN_INT (255))); GEN_INT (255)));
DONE;
} }
else /* TARGET_THUMB */ if (!arm_arch6 && !MEM_P (operands[1]))
{ {
rtx temp = gen_reg_rtx (SImode); rtx t = gen_lowpart (SImode, operands[1]);
rtx ops[3]; rtx tmp = gen_reg_rtx (SImode);
emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
operands[1] = copy_to_mode_reg (QImode, operands[1]); emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
operands[1] = gen_lowpart (SImode, operands[1]); DONE;
ops[0] = temp;
ops[1] = operands[1];
ops[2] = GEN_INT (24);
emit_insn (gen_rtx_SET (VOIDmode, ops[0],
gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
ops[0] = operands[0];
ops[1] = temp;
ops[2] = GEN_INT (24);
emit_insn (gen_rtx_SET (VOIDmode, ops[0],
gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
} }
})
(define_split
[(set (match_operand:SI 0 "register_operand" "")
(zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
"!arm_arch6"
[(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
(set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
{
operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
if (TARGET_ARM)
{
emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
DONE; DONE;
} }
" })
)
(define_insn "*thumb1_zero_extendqisi2" (define_insn "*thumb1_zero_extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=l") [(set (match_operand:SI 0 "register_operand" "=l,l")
(zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))] (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
"TARGET_THUMB1 && !arm_arch6" "TARGET_THUMB1 && !arm_arch6"
"ldrb\\t%0, %1" "@
[(set_attr "length" "2") #
(set_attr "type" "load_byte") ldrb\\t%0, %1"
(set_attr "pool_range" "32")] [(set_attr "length" "4,2")
(set_attr "type" "alu_shift,load_byte")
(set_attr "pool_range" "*,32")]
) )
(define_insn "*thumb1_zero_extendqisi2_v6" (define_insn "*thumb1_zero_extendqisi2_v6"
...@@ -4209,14 +4169,17 @@ ...@@ -4209,14 +4169,17 @@
) )
(define_insn "*arm_zero_extendqisi2" (define_insn "*arm_zero_extendqisi2"
[(set (match_operand:SI 0 "s_register_operand" "=r") [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))] (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
"TARGET_ARM && !arm_arch6" "TARGET_ARM && !arm_arch6"
"ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2" "@
[(set_attr "type" "load_byte") #
ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes") (set_attr "predicable" "yes")
(set_attr "pool_range" "4096") (set_attr "pool_range" "*,4096")
(set_attr "neg_pool_range" "4084")] (set_attr "neg_pool_range" "*,4084")]
) )
(define_insn "*arm_zero_extendqisi2_v6" (define_insn "*arm_zero_extendqisi2_v6"
...@@ -4295,108 +4258,42 @@ ...@@ -4295,108 +4258,42 @@
) )
(define_expand "extendhisi2" (define_expand "extendhisi2"
[(set (match_dup 2) [(set (match_operand:SI 0 "s_register_operand" "")
(ashift:SI (match_operand:HI 1 "nonimmediate_operand" "") (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
(const_int 16)))
(set (match_operand:SI 0 "s_register_operand" "")
(ashiftrt:SI (match_dup 2)
(const_int 16)))]
"TARGET_EITHER" "TARGET_EITHER"
" {
{
if (GET_CODE (operands[1]) == MEM)
{
if (TARGET_THUMB1) if (TARGET_THUMB1)
{ {
emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1])); emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
DONE; DONE;
} }
else if (arm_arch4) if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
{
emit_insn (gen_rtx_SET (VOIDmode, operands[0],
gen_rtx_SIGN_EXTEND (SImode, operands[1])));
DONE;
}
}
if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
{ {
emit_insn (gen_extendhisi2_mem (operands[0], operands[1])); emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
DONE; DONE;
} }
if (!s_register_operand (operands[1], HImode)) if (!arm_arch6 && !MEM_P (operands[1]))
operands[1] = copy_to_mode_reg (HImode, operands[1]);
if (arm_arch6)
{ {
if (TARGET_THUMB1) rtx t = gen_lowpart (SImode, operands[1]);
emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1])); rtx tmp = gen_reg_rtx (SImode);
else emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
emit_insn (gen_rtx_SET (VOIDmode, operands[0], emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
gen_rtx_SIGN_EXTEND (SImode, operands[1])));
DONE; DONE;
} }
})
operands[1] = gen_lowpart (SImode, operands[1]); (define_split
operands[2] = gen_reg_rtx (SImode); [(parallel
}" [(set (match_operand:SI 0 "register_operand" "")
) (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
(clobber (match_scratch:SI 2 ""))])]
(define_insn "thumb1_extendhisi2" "!arm_arch6"
[(set (match_operand:SI 0 "register_operand" "=l") [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
(sign_extend:SI (match_operand:HI 1 "memory_operand" "m"))) (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
(clobber (match_scratch:SI 2 "=&l"))] {
"TARGET_THUMB1 && !arm_arch6" operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
"* })
{
rtx ops[4];
rtx mem = XEXP (operands[1], 0);
/* This code used to try to use 'V', and fix the address only if it was
offsettable, but this fails for e.g. REG+48 because 48 is outside the
range of QImode offsets, and offsettable_address_p does a QImode
address check. */
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
if (GET_CODE (mem) == LABEL_REF)
return \"ldr\\t%0, %1\";
if (GET_CODE (mem) == PLUS)
{
rtx a = XEXP (mem, 0);
rtx b = XEXP (mem, 1);
if (GET_CODE (a) == LABEL_REF
&& GET_CODE (b) == CONST_INT)
return \"ldr\\t%0, %1\";
if (GET_CODE (b) == REG)
return \"ldrsh\\t%0, %1\";
ops[1] = a;
ops[2] = b;
}
else
{
ops[1] = mem;
ops[2] = const0_rtx;
}
gcc_assert (GET_CODE (ops[1]) == REG);
ops[0] = operands[0];
ops[3] = operands[2];
output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
return \"\";
}"
[(set_attr "length" "4")
(set_attr "type" "load_byte")
(set_attr "pool_range" "1020")]
)
;; We used to have an early-clobber on the scratch register here. ;; We used to have an early-clobber on the scratch register here.
;; However, there's a bug somewhere in reload which means that this ;; However, there's a bug somewhere in reload which means that this
...@@ -4405,16 +4302,18 @@ ...@@ -4405,16 +4302,18 @@
;; we try to verify the operands. Fortunately, we don't really need ;; we try to verify the operands. Fortunately, we don't really need
;; the early-clobber: we can always use operand 0 if operand 2 ;; the early-clobber: we can always use operand 0 if operand 2
;; overlaps the address. ;; overlaps the address.
(define_insn "*thumb1_extendhisi2_insn_v6" (define_insn "thumb1_extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=l,l") [(set (match_operand:SI 0 "register_operand" "=l,l")
(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m"))) (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
(clobber (match_scratch:SI 2 "=X,l"))] (clobber (match_scratch:SI 2 "=X,l"))]
"TARGET_THUMB1 && arm_arch6" "TARGET_THUMB1"
"* "*
{ {
rtx ops[4]; rtx ops[4];
rtx mem; rtx mem;
if (which_alternative == 0 && !arm_arch6)
return \"#\";
if (which_alternative == 0) if (which_alternative == 0)
return \"sxth\\t%0, %1\"; return \"sxth\\t%0, %1\";
...@@ -4462,7 +4361,10 @@ ...@@ -4462,7 +4361,10 @@
output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops); output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
return \"\"; return \"\";
}" }"
[(set_attr "length" "2,4") [(set_attr_alternative "length"
[(if_then_else (eq_attr "is_arch6" "yes")
(const_int 2) (const_int 4))
(const_int 4)])
(set_attr "type" "alu_shift,load_byte") (set_attr "type" "alu_shift,load_byte")
(set_attr "pool_range" "*,1020")] (set_attr "pool_range" "*,1020")]
) )
...@@ -4503,15 +4405,28 @@ ...@@ -4503,15 +4405,28 @@
}" }"
) )
(define_split
[(set (match_operand:SI 0 "register_operand" "")
(sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
"!arm_arch6"
[(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
(set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
{
operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
})
(define_insn "*arm_extendhisi2" (define_insn "*arm_extendhisi2"
[(set (match_operand:SI 0 "s_register_operand" "=r") [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))] (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
"TARGET_ARM && arm_arch4 && !arm_arch6" "TARGET_ARM && arm_arch4 && !arm_arch6"
"ldr%(sh%)\\t%0, %1" "@
[(set_attr "type" "load_byte") #
ldr%(sh%)\\t%0, %1"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes") (set_attr "predicable" "yes")
(set_attr "pool_range" "256") (set_attr "pool_range" "*,256")
(set_attr "neg_pool_range" "244")] (set_attr "neg_pool_range" "*,244")]
) )
;; ??? Check Thumb-2 pool range ;; ??? Check Thumb-2 pool range
...@@ -4573,46 +4488,45 @@ ...@@ -4573,46 +4488,45 @@
) )
(define_expand "extendqisi2" (define_expand "extendqisi2"
[(set (match_dup 2) [(set (match_operand:SI 0 "s_register_operand" "")
(ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "") (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
(const_int 24)))
(set (match_operand:SI 0 "s_register_operand" "")
(ashiftrt:SI (match_dup 2)
(const_int 24)))]
"TARGET_EITHER" "TARGET_EITHER"
" {
{ if (!arm_arch4 && MEM_P (operands[1]))
if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
{
emit_insn (gen_rtx_SET (VOIDmode, operands[0],
gen_rtx_SIGN_EXTEND (SImode, operands[1])));
DONE;
}
if (!s_register_operand (operands[1], QImode))
operands[1] = copy_to_mode_reg (QImode, operands[1]); operands[1] = copy_to_mode_reg (QImode, operands[1]);
if (arm_arch6) if (!arm_arch6 && !MEM_P (operands[1]))
{ {
emit_insn (gen_rtx_SET (VOIDmode, operands[0], rtx t = gen_lowpart (SImode, operands[1]);
gen_rtx_SIGN_EXTEND (SImode, operands[1]))); rtx tmp = gen_reg_rtx (SImode);
emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
DONE; DONE;
} }
})
operands[1] = gen_lowpart (SImode, operands[1]); (define_split
operands[2] = gen_reg_rtx (SImode); [(set (match_operand:SI 0 "register_operand" "")
}" (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
) "!arm_arch6"
[(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
(set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
{
operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
})
(define_insn "*arm_extendqisi" (define_insn "*arm_extendqisi"
[(set (match_operand:SI 0 "s_register_operand" "=r") [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))] (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
"TARGET_ARM && arm_arch4 && !arm_arch6" "TARGET_ARM && arm_arch4 && !arm_arch6"
"ldr%(sb%)\\t%0, %1" "@
[(set_attr "type" "load_byte") #
ldr%(sb%)\\t%0, %1"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes") (set_attr "predicable" "yes")
(set_attr "pool_range" "256") (set_attr "pool_range" "*,256")
(set_attr "neg_pool_range" "244")] (set_attr "neg_pool_range" "*,244")]
) )
(define_insn "*arm_extendqisi_v6" (define_insn "*arm_extendqisi_v6"
...@@ -4640,162 +4554,82 @@ ...@@ -4640,162 +4554,82 @@
(set_attr "predicable" "yes")] (set_attr "predicable" "yes")]
) )
(define_insn "*thumb1_extendqisi2" (define_split
[(set (match_operand:SI 0 "register_operand" "=l,l") [(set (match_operand:SI 0 "register_operand" "")
(sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))] (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
"TARGET_THUMB1 && !arm_arch6" "TARGET_THUMB1 && reload_completed"
"* [(set (match_dup 0) (match_dup 2))
{ (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
rtx ops[3]; {
rtx mem = XEXP (operands[1], 0); rtx addr = XEXP (operands[1], 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
if (GET_CODE (mem) == LABEL_REF)
return \"ldr\\t%0, %1\";
if (GET_CODE (mem) == PLUS if (GET_CODE (addr) == CONST)
&& GET_CODE (XEXP (mem, 0)) == LABEL_REF) addr = XEXP (addr, 0);
return \"ldr\\t%0, %1\";
if (which_alternative == 0) if (GET_CODE (addr) == PLUS
return \"ldrsb\\t%0, %1\"; && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
/* No split necessary. */
FAIL;
ops[0] = operands[0]; if (GET_CODE (addr) == PLUS
&& !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
FAIL;
if (GET_CODE (mem) == PLUS) if (reg_overlap_mentioned_p (operands[0], addr))
{ {
rtx a = XEXP (mem, 0); rtx t = gen_lowpart (QImode, operands[0]);
rtx b = XEXP (mem, 1); emit_move_insn (t, operands[1]);
emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
ops[1] = a; DONE;
ops[2] = b; }
if (GET_CODE (a) == REG) if (REG_P (addr))
{
if (GET_CODE (b) == REG)
output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
else if (REGNO (a) == REGNO (ops[0]))
{ {
output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops); addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
output_asm_insn (\"lsl\\t%0, %0, #24\", ops); operands[2] = const0_rtx;
output_asm_insn (\"asr\\t%0, %0, #24\", ops);
}
else
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
} }
else else if (GET_CODE (addr) != PLUS)
{ FAIL;
gcc_assert (GET_CODE (b) == REG); else if (REG_P (XEXP (addr, 0)))
if (REGNO (b) == REGNO (ops[0]))
{ {
output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops); operands[2] = XEXP (addr, 1);
output_asm_insn (\"lsl\\t%0, %0, #24\", ops); addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
output_asm_insn (\"asr\\t%0, %0, #24\", ops);
} }
else else
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
}
}
else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
{ {
output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops); operands[2] = XEXP (addr, 0);
output_asm_insn (\"lsl\\t%0, %0, #24\", ops); addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
output_asm_insn (\"asr\\t%0, %0, #24\", ops);
} }
else
{
ops[1] = mem;
ops[2] = const0_rtx;
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops); operands[3] = change_address (operands[1], QImode, addr);
} })
return \"\";
}"
[(set_attr "length" "2,6")
(set_attr "type" "load_byte,load_byte")
(set_attr "pool_range" "32,32")]
)
(define_insn "*thumb1_extendqisi2_v6" (define_insn "thumb1_extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=l,l,l") [(set (match_operand:SI 0 "register_operand" "=l,l,l")
(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))] (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
"TARGET_THUMB1 && arm_arch6" "TARGET_THUMB1"
"* {
{ rtx addr;
rtx ops[3];
rtx mem;
if (which_alternative == 0)
return \"sxtb\\t%0, %1\";
mem = XEXP (operands[1], 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
if (GET_CODE (mem) == LABEL_REF)
return \"ldr\\t%0, %1\";
if (GET_CODE (mem) == PLUS
&& GET_CODE (XEXP (mem, 0)) == LABEL_REF)
return \"ldr\\t%0, %1\";
if (which_alternative == 0 && arm_arch6)
return "sxtb\\t%0, %1";
if (which_alternative == 0) if (which_alternative == 0)
return \"ldrsb\\t%0, %1\"; return "#";
ops[0] = operands[0]; addr = XEXP (operands[1], 0);
if (GET_CODE (addr) == PLUS
if (GET_CODE (mem) == PLUS) && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
{ return "ldrsb\\t%0, %1";
rtx a = XEXP (mem, 0);
rtx b = XEXP (mem, 1);
ops[1] = a;
ops[2] = b;
if (GET_CODE (a) == REG)
{
if (GET_CODE (b) == REG)
output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
else if (REGNO (a) == REGNO (ops[0]))
{
output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
output_asm_insn (\"sxtb\\t%0, %0\", ops);
}
else
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
}
else
{
gcc_assert (GET_CODE (b) == REG);
if (REGNO (b) == REGNO (ops[0]))
{
output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
output_asm_insn (\"sxtb\\t%0, %0\", ops);
}
else
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
}
}
else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
{
output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
output_asm_insn (\"sxtb\\t%0, %0\", ops);
}
else
{
ops[1] = mem;
ops[2] = const0_rtx;
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops); return "#";
} }
return \"\"; [(set_attr_alternative "length"
}" [(if_then_else (eq_attr "is_arch6" "yes")
[(set_attr "length" "2,2,4") (const_int 2) (const_int 4))
(set_attr "type" "alu_shift,load_byte,load_byte") (const_int 2)
(set_attr "pool_range" "*,32,32")] (if_then_else (eq_attr "is_arch6" "yes")
(const_int 4) (const_int 6))])
(set_attr "type" "alu_shift,load_byte,load_byte")]
) )
(define_expand "extendsfdf2" (define_expand "extendsfdf2"
......
...@@ -3,6 +3,9 @@ ...@@ -3,6 +3,9 @@
PR target/42835 PR target/42835
* gcc.target/arm/pr42835.c: New test. * gcc.target/arm/pr42835.c: New test.
PR target/42172
* gcc.target/arm/pr42172-1.c: New test.
2010-07-02 Paolo Carlini <paolo.carlini@oracle.com> 2010-07-02 Paolo Carlini <paolo.carlini@oracle.com>
* g++.dg/template/crash98.C: Remove stray // from dg-error comment. * g++.dg/template/crash98.C: Remove stray // from dg-error comment.
......
/* { dg-options "-O2" } */
struct A {
unsigned int f1 : 3;
unsigned int f2 : 3;
unsigned int f3 : 1;
unsigned int f4 : 1;
};
void init_A (struct A *this)
{
this->f1 = 0;
this->f2 = 1;
this->f3 = 0;
this->f4 = 0;
}
/* { dg-final { scan-assembler-times "ldr" 1 } } */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment