Commit 77fa0940 by Richard Kenner

*** empty log message ***

From-SVN: r1223
parent 54d8c243
...@@ -356,6 +356,7 @@ static rtx expand_compound_operation (); ...@@ -356,6 +356,7 @@ static rtx expand_compound_operation ();
static rtx expand_field_assignment (); static rtx expand_field_assignment ();
static rtx make_extraction (); static rtx make_extraction ();
static int get_pos_from_mask (); static int get_pos_from_mask ();
static rtx force_to_mode ();
static rtx make_field_assignment (); static rtx make_field_assignment ();
static rtx make_compound_operation (); static rtx make_compound_operation ();
static rtx apply_distributive_law (); static rtx apply_distributive_law ();
...@@ -1295,7 +1296,8 @@ try_combine (i3, i2, i1) ...@@ -1295,7 +1296,8 @@ try_combine (i3, i2, i1)
if (undobuf.other_insn == 0 if (undobuf.other_insn == 0
&& (cc_use = find_single_use (SET_DEST (newpat), i3, && (cc_use = find_single_use (SET_DEST (newpat), i3,
&undobuf.other_insn)) &undobuf.other_insn))
&& ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use), i2src)) && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
i2src, const0_rtx))
!= GET_MODE (SET_DEST (newpat)))) != GET_MODE (SET_DEST (newpat))))
{ {
int regno = REGNO (SET_DEST (newpat)); int regno = REGNO (SET_DEST (newpat));
...@@ -2428,6 +2430,11 @@ subst (x, from, to, in_dest, unique_copy) ...@@ -2428,6 +2430,11 @@ subst (x, from, to, in_dest, unique_copy)
case '<': case '<':
temp = simplify_relational_operation (code, op0_mode, temp = simplify_relational_operation (code, op0_mode,
XEXP (x, 0), XEXP (x, 1)); XEXP (x, 0), XEXP (x, 1));
#ifdef FLOAT_STORE_FLAG_VALUE
if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
: immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
#endif
break; break;
case 'c': case 'c':
case '2': case '2':
...@@ -3194,7 +3201,7 @@ subst (x, from, to, in_dest, unique_copy) ...@@ -3194,7 +3201,7 @@ subst (x, from, to, in_dest, unique_copy)
#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES) #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
/* If this machine has CC modes other than CCmode, check to see /* If this machine has CC modes other than CCmode, check to see
if we need to use a different CC mode here. */ if we need to use a different CC mode here. */
compare_mode = SELECT_CC_MODE (new_code, op0); compare_mode = SELECT_CC_MODE (new_code, op0, op1);
/* If the mode changed, we have to change SET_DEST, the mode /* If the mode changed, we have to change SET_DEST, the mode
in the compare, and the mode in the place SET_DEST is used. in the compare, and the mode in the place SET_DEST is used.
...@@ -3636,25 +3643,6 @@ subst (x, from, to, in_dest, unique_copy) ...@@ -3636,25 +3643,6 @@ subst (x, from, to, in_dest, unique_copy)
case ASHIFTRT: case ASHIFTRT:
case ROTATE: case ROTATE:
case ROTATERT: case ROTATERT:
#ifdef SHIFT_COUNT_TRUNCATED
/* (*shift <X> (sign_extend <Y>)) = (*shift <X> <Y>) (most machines).
True for all kinds of shifts and also for zero_extend. */
if ((GET_CODE (XEXP (x, 1)) == SIGN_EXTEND
|| GET_CODE (XEXP (x, 1)) == ZERO_EXTEND)
&& FAKE_EXTEND_SAFE_P (mode, XEXP (XEXP (x, 1), 0)))
SUBST (XEXP (x, 1),
/* This is a perverse SUBREG, wider than its base. */
gen_lowpart_for_combine (mode, XEXP (XEXP (x, 1), 0)));
/* tege: Change (bitshifts ... (and ... mask), c)
to (bitshifts ... c) if mask just masks the bits the bitshift
insns do automatically on this machine. */
if (GET_CODE (XEXP (x, 1)) == AND
&& GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
&& (~ INTVAL (XEXP (XEXP (x, 1), 1)) & GET_MODE_MASK (mode)) == 0)
SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
#endif
/* If this is a shift by a constant amount, simplify it. */ /* If this is a shift by a constant amount, simplify it. */
if (GET_CODE (XEXP (x, 1)) == CONST_INT) if (GET_CODE (XEXP (x, 1)) == CONST_INT)
{ {
...@@ -3663,6 +3651,15 @@ subst (x, from, to, in_dest, unique_copy) ...@@ -3663,6 +3651,15 @@ subst (x, from, to, in_dest, unique_copy)
if (GET_CODE (x) != code) if (GET_CODE (x) != code)
goto restart; goto restart;
} }
#ifdef SHIFT_COUNT_TRUNCATED
else if (GET_CODE (XEXP (x, 1)) != REG)
SUBST (XEXP (x, 1),
force_to_mode (XEXP (x, 1), GET_MODE (x),
exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
0));
#endif
break; break;
} }
...@@ -4011,6 +4008,15 @@ make_extraction (mode, inner, pos, pos_rtx, len, ...@@ -4011,6 +4008,15 @@ make_extraction (mode, inner, pos, pos_rtx, len,
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner); MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner); MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
} }
else if (GET_MODE (inner) == REG)
/* We can't call gen_lowpart_for_combine here since we always want
a SUBREG and it would sometimes return a new hard register. */
new = gen_rtx (SUBREG, tmode, inner,
(WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (is_mode) > UNITS_PER_WORD)
? ((GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (tmode)
/ UNITS_PER_WORD))
: 0);
else else
new = gen_lowpart_for_combine (tmode, inner); new = gen_lowpart_for_combine (tmode, inner);
...@@ -4019,7 +4025,9 @@ make_extraction (mode, inner, pos, pos_rtx, len, ...@@ -4019,7 +4025,9 @@ make_extraction (mode, inner, pos, pos_rtx, len,
if (in_dest) if (in_dest)
return (GET_CODE (new) == MEM ? new return (GET_CODE (new) == MEM ? new
: gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)); : (GET_CODE (new) != SUBREG
? gen_rtx (CLOBBER, tmode, const0_rtx)
: gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
/* Otherwise, sign- or zero-extend unless we already are in the /* Otherwise, sign- or zero-extend unless we already are in the
proper mode. */ proper mode. */
......
...@@ -1602,7 +1602,7 @@ ...@@ -1602,7 +1602,7 @@
(define_insn "ashrsi3" (define_insn "ashrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r") [(set (match_operand:SI 0 "register_operand" "=r,r")
(ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0") (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0")
(match_operand:QI 2 "reg_or_cint_operand" "r,n")))] (match_operand:SI 2 "reg_or_cint_operand" "r,n")))]
"" ""
"@ "@
sar %0,%2 sar %0,%2
...@@ -1612,7 +1612,7 @@ ...@@ -1612,7 +1612,7 @@
(define_insn "lshrsi3" (define_insn "lshrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r") [(set (match_operand:SI 0 "register_operand" "=r,r")
(lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0") (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0")
(match_operand:QI 2 "reg_or_cint_operand" "r,n")))] (match_operand:SI 2 "reg_or_cint_operand" "r,n")))]
"" ""
"@ "@
sr %0,%2 sr %0,%2
...@@ -1631,7 +1631,7 @@ ...@@ -1631,7 +1631,7 @@
(define_insn "ashlsi3" (define_insn "ashlsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r") [(set (match_operand:SI 0 "register_operand" "=r,r")
(ashift:SI (match_operand:SI 1 "register_operand" "0,0") (ashift:SI (match_operand:SI 1 "register_operand" "0,0")
(match_operand:QI 2 "reg_or_cint_operand" "r,n")))] (match_operand:SI 2 "reg_or_cint_operand" "r,n")))]
"" ""
"@ "@
sl %0,%2 sl %0,%2
......
...@@ -2083,6 +2083,26 @@ ...@@ -2083,6 +2083,26 @@
mt%0 %1" mt%0 %1"
[(set_attr "type" "*,load,*,*,*,*,*,mtlr")]) [(set_attr "type" "*,load,*,*,*,*,*,mtlr")])
;; Split a load of a large constant into the appropriate two-insn
;; sequence.
(define_split
[(set (match_operand:SI 0 "gpc_reg_operand" "")
(match_operand:SI 1 "const_int_operand" ""))]
"(unsigned) (INTVAL (operands[1]) + 0x8000) >= 0x10000
&& (INTVAL (operands[1]) & 0xffff) != 0"
[(set (match_dup 0)
(match_dup 2))
(set (match_dup 0)
(ior:SI (match_dup 0)
(match_dup 3)))]
"
{
operands[2] = gen_rtx (CONST_INT, VOIDmode,
INTVAL (operands[1]) & 0xffff0000);
operands[3] = gen_rtx (CONST_INT, VOIDmode, INTVAL (operands[1]) & 0xffff);
}")
(define_insn "" (define_insn ""
[(set (match_operand:CC 2 "cc_reg_operand" "=x") [(set (match_operand:CC 2 "cc_reg_operand" "=x")
(compare:CC (match_operand:SI 1 "gpc_reg_operand" "r") (compare:CC (match_operand:SI 1 "gpc_reg_operand" "r")
......
...@@ -2379,7 +2379,7 @@ canon_reg (x, insn) ...@@ -2379,7 +2379,7 @@ canon_reg (x, insn)
&& (((REGNO (new) < FIRST_PSEUDO_REGISTER) && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
!= (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER)) != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
|| (insn != 0 && insn_n_dups[recog_memoized (insn)] > 0))) || (insn != 0 && insn_n_dups[recog_memoized (insn)] > 0)))
validate_change (insn, &XEXP (x, i), new, 0); validate_change (insn, &XEXP (x, i), new, 1);
else else
XEXP (x, i) = new; XEXP (x, i) = new;
} }
...@@ -5359,6 +5359,7 @@ cse_insn (insn, in_libcall_block) ...@@ -5359,6 +5359,7 @@ cse_insn (insn, in_libcall_block)
else if (GET_CODE (SET_SRC (x)) == CALL) else if (GET_CODE (SET_SRC (x)) == CALL)
{ {
canon_reg (SET_SRC (x), insn); canon_reg (SET_SRC (x), insn);
apply_change_group ();
fold_rtx (SET_SRC (x), insn); fold_rtx (SET_SRC (x), insn);
invalidate (SET_DEST (x)); invalidate (SET_DEST (x));
} }
...@@ -5400,6 +5401,7 @@ cse_insn (insn, in_libcall_block) ...@@ -5400,6 +5401,7 @@ cse_insn (insn, in_libcall_block)
if (GET_CODE (SET_SRC (y)) == CALL) if (GET_CODE (SET_SRC (y)) == CALL)
{ {
canon_reg (SET_SRC (y), insn); canon_reg (SET_SRC (y), insn);
apply_change_group ();
fold_rtx (SET_SRC (y), insn); fold_rtx (SET_SRC (y), insn);
invalidate (SET_DEST (y)); invalidate (SET_DEST (y));
} }
...@@ -5428,6 +5430,7 @@ cse_insn (insn, in_libcall_block) ...@@ -5428,6 +5430,7 @@ cse_insn (insn, in_libcall_block)
else if (GET_CODE (y) == CALL) else if (GET_CODE (y) == CALL)
{ {
canon_reg (y, insn); canon_reg (y, insn);
apply_change_group ();
fold_rtx (y, insn); fold_rtx (y, insn);
} }
} }
...@@ -5449,6 +5452,7 @@ cse_insn (insn, in_libcall_block) ...@@ -5449,6 +5452,7 @@ cse_insn (insn, in_libcall_block)
else if (GET_CODE (x) == CALL) else if (GET_CODE (x) == CALL)
{ {
canon_reg (x, insn); canon_reg (x, insn);
apply_change_group ();
fold_rtx (x, insn); fold_rtx (x, insn);
} }
...@@ -5467,20 +5471,9 @@ cse_insn (insn, in_libcall_block) ...@@ -5467,20 +5471,9 @@ cse_insn (insn, in_libcall_block)
we don't break the duplicate nature of the pattern. So we will replace we don't break the duplicate nature of the pattern. So we will replace
both operands at the same time. Otherwise, we would fail to find an both operands at the same time. Otherwise, we would fail to find an
equivalent substitution in the loop calling validate_change below. equivalent substitution in the loop calling validate_change below.
(We also speed up that loop when a canonicalization was done since
recog_memoized need not be called for just a canonicalization unless
a pseudo register is being replaced by a hard reg of vice versa.)
We used to suppress canonicalization of DEST if it appears in SRC, We used to suppress canonicalization of DEST if it appears in SRC,
but we don't do this any more. but we don't do this any more. */
??? The way this code is written now, if we have a MATCH_DUP between
two operands that are pseudos and we would want to canonicalize them
to a hard register, we won't do that. The only time this would happen
is if the hard reg was a fixed register, and this should be rare.
??? This won't work if there is a MATCH_DUP between an input and an
output, but these never worked and must be declared invalid. */
for (i = 0; i < n_sets; i++) for (i = 0; i < n_sets; i++)
{ {
...@@ -5488,19 +5481,20 @@ cse_insn (insn, in_libcall_block) ...@@ -5488,19 +5481,20 @@ cse_insn (insn, in_libcall_block)
rtx src = SET_SRC (sets[i].rtl); rtx src = SET_SRC (sets[i].rtl);
rtx new = canon_reg (src, insn); rtx new = canon_reg (src, insn);
if (GET_CODE (new) == REG && GET_CODE (src) == REG if ((GET_CODE (new) == REG && GET_CODE (src) == REG
&& ((REGNO (new) < FIRST_PSEUDO_REGISTER) && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
!= (REGNO (src) < FIRST_PSEUDO_REGISTER))) != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
validate_change (insn, &SET_SRC (sets[i].rtl), new, 0); || insn_n_dups[recog_memoized (insn)] > 0)
validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
else else
SET_SRC (sets[i].rtl) = new; SET_SRC (sets[i].rtl) = new;
if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT) if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
{ {
validate_change (insn, &XEXP (dest, 1), validate_change (insn, &XEXP (dest, 1),
canon_reg (XEXP (dest, 1), insn), 0); canon_reg (XEXP (dest, 1), insn), 1);
validate_change (insn, &XEXP (dest, 2), validate_change (insn, &XEXP (dest, 2),
canon_reg (XEXP (dest, 2), insn), 0); canon_reg (XEXP (dest, 2), insn), 1);
} }
while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
...@@ -5512,6 +5506,14 @@ cse_insn (insn, in_libcall_block) ...@@ -5512,6 +5506,14 @@ cse_insn (insn, in_libcall_block)
canon_reg (dest, insn); canon_reg (dest, insn);
} }
/* Now that we have done all the replacements, we can apply the change
group and see if they all work. Note that this will cause some
canonicalizations that would have worked individually not to be applied
because some other canonicalization didn't work, but this should not
occur often. */
apply_change_group ();
/* Set sets[i].src_elt to the class each source belongs to. /* Set sets[i].src_elt to the class each source belongs to.
Detect assignments from or to volatile things Detect assignments from or to volatile things
and set set[i] to zero so they will be ignored and set set[i] to zero so they will be ignored
...@@ -6294,10 +6296,15 @@ cse_insn (insn, in_libcall_block) ...@@ -6294,10 +6296,15 @@ cse_insn (insn, in_libcall_block)
sets[i].src_elt = src_eqv_elt; sets[i].src_elt = src_eqv_elt;
invalidate_from_clobbers (&writes_memory, x); invalidate_from_clobbers (&writes_memory, x);
/* Memory, and some registers, are invalidate by subroutine calls. */
/* Some registers are invalidated by subroutine calls. Memory is
invalidated by non-constant calls. */
if (GET_CODE (insn) == CALL_INSN) if (GET_CODE (insn) == CALL_INSN)
{ {
static struct write_data everything = {0, 1, 1, 1}; static struct write_data everything = {0, 1, 1, 1};
if (! CONST_CALL_P (insn))
invalidate_memory (&everything); invalidate_memory (&everything);
invalidate_for_call (); invalidate_for_call ();
} }
...@@ -7672,7 +7679,7 @@ delete_dead_from_cse (insns, nreg) ...@@ -7672,7 +7679,7 @@ delete_dead_from_cse (insns, nreg)
int nreg; int nreg;
{ {
int *counts = (int *) alloca (nreg * sizeof (int)); int *counts = (int *) alloca (nreg * sizeof (int));
rtx insn; rtx insn, prev;
rtx tem; rtx tem;
int i; int i;
int in_libcall = 0; int in_libcall = 0;
...@@ -7685,14 +7692,16 @@ delete_dead_from_cse (insns, nreg) ...@@ -7685,14 +7692,16 @@ delete_dead_from_cse (insns, nreg)
/* Go from the last insn to the first and delete insns that only set unused /* Go from the last insn to the first and delete insns that only set unused
registers or copy a register to itself. As we delete an insn, remove registers or copy a register to itself. As we delete an insn, remove
usage counts for registers it uses. */ usage counts for registers it uses. */
for (insn = prev_real_insn (get_last_insn ()); for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
insn; insn = prev_real_insn (insn))
{ {
int live_insn = 0; int live_insn = 0;
prev = prev_real_insn (insn);
/* Don't delete any insns that are part of a libcall block. /* Don't delete any insns that are part of a libcall block.
Flow or loop might get confused if we did that. */ Flow or loop might get confused if we did that. Remember
if (find_reg_note (insn, REG_LIBCALL, 0)) that we are scanning backwards. */
if (find_reg_note (insn, REG_RETVAL, 0))
in_libcall = 1; in_libcall = 1;
if (in_libcall) if (in_libcall)
...@@ -7754,12 +7763,10 @@ delete_dead_from_cse (insns, nreg) ...@@ -7754,12 +7763,10 @@ delete_dead_from_cse (insns, nreg)
if (! live_insn) if (! live_insn)
{ {
count_reg_usage (insn, counts, -1); count_reg_usage (insn, counts, -1);
PUT_CODE (insn, NOTE); delete_insn (insn);
NOTE_SOURCE_FILE (insn) = 0;
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
} }
if (find_reg_note (insn, REG_RETVAL, 0)) if (find_reg_note (insn, REG_LIBCALL, 0))
in_libcall = 0; in_libcall = 0;
} }
} }
...@@ -6318,9 +6318,11 @@ compare_from_rtx (op0, op1, code, unsignedp, mode, size, align) ...@@ -6318,9 +6318,11 @@ compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
/* If this is a signed equality comparison, we can do it as an /* If this is a signed equality comparison, we can do it as an
unsigned comparison since zero-extension is cheaper than sign unsigned comparison since zero-extension is cheaper than sign
extension and comparisons with zero are done as unsigned. If we extension and comparisons with zero are done as unsigned. This is
are comparing against a constant, we must convert it to what it the case even on machines that can do fast sign extension, since
would look like unsigned. */ zero-extension is easier to combinen with other operations than
sign-extension is. If we are comparing against a constant, we must
convert it to what it would look like unsigned. */
if ((code == EQ || code == NE) && ! unsignedp if ((code == EQ || code == NE) && ! unsignedp
&& GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT) && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT)
{ {
......
...@@ -952,9 +952,9 @@ fixup_unsigned_type (type) ...@@ -952,9 +952,9 @@ fixup_unsigned_type (type)
VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
mode meeting these conditions. mode meeting these conditions.
Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), if a mode Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
whose size is UNITS_PER_WORD meets all the conditions, it is returned the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
instead. */ all the conditions. */
enum machine_mode enum machine_mode
get_best_mode (bitsize, bitpos, align, largest_mode, volatilep) get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
...@@ -987,12 +987,25 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep) ...@@ -987,12 +987,25 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
|| (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode))) || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
return VOIDmode; return VOIDmode;
if (SLOW_BYTE_ACCESS if (SLOW_BYTE_ACCESS && ! volatilep)
&& ! volatilep {
&& BITS_PER_WORD <= MIN (align, BIGGEST_ALIGNMENT) enum machine_mode wide_mode = VOIDmode, tmode;
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
tmode = GET_MODE_WIDER_MODE (tmode))
{
unit = GET_MODE_BITSIZE (tmode);
if (bitpos / unit == (bitpos + bitsize - 1) / unit
&& unit <= BITS_PER_WORD
&& unit <= MIN (align, BIGGEST_ALIGNMENT)
&& (largest_mode == VOIDmode && (largest_mode == VOIDmode
|| BITS_PER_WORD <= GET_MODE_BITSIZE (largest_mode))) || unit <= GET_MODE_BITSIZE (largest_mode)))
return word_mode; wide_mode = tmode;
}
if (wide_mode != VOIDmode)
return wide_mode;
}
return mode; return mode;
} }
......
...@@ -1541,6 +1541,7 @@ const_hash (exp) ...@@ -1541,6 +1541,7 @@ const_hash (exp)
& ((1 << HASHBITS) - 1)) % MAX_HASH_TABLE; & ((1 << HASHBITS) - 1)) % MAX_HASH_TABLE;
for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link)) for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link))
if (TREE_VALUE (link))
hi = (hi * 603 + const_hash (TREE_VALUE (link))) % MAX_HASH_TABLE; hi = (hi * 603 + const_hash (TREE_VALUE (link))) % MAX_HASH_TABLE;
return hi; return hi;
...@@ -1677,8 +1678,22 @@ compare_constant_1 (exp, p) ...@@ -1677,8 +1678,22 @@ compare_constant_1 (exp, p)
} }
for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link)) for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link))
{
if (TREE_VALUE (link))
{
if ((p = compare_constant_1 (TREE_VALUE (link), p)) == 0) if ((p = compare_constant_1 (TREE_VALUE (link), p)) == 0)
return 0; return 0;
}
else
{
tree zero = 0;
if (bcmp (&zero, p, sizeof zero))
return 0;
p += sizeof zero;
}
}
return p; return p;
} }
else if (code == ADDR_EXPR) else if (code == ADDR_EXPR)
...@@ -1798,7 +1813,17 @@ record_constant_1 (exp) ...@@ -1798,7 +1813,17 @@ record_constant_1 (exp)
} }
for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link)) for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link))
{
if (TREE_VALUE (link))
record_constant_1 (TREE_VALUE (link)); record_constant_1 (TREE_VALUE (link));
else
{
tree zero = 0;
obstack_grow (&permanent_obstack, (char *) &zero, sizeof zero);
}
}
return; return;
} }
else if (code == ADDR_EXPR) else if (code == ADDR_EXPR)
...@@ -2520,8 +2545,7 @@ output_constant (exp, size) ...@@ -2520,8 +2545,7 @@ output_constant (exp, size)
/* Allow a constructor with no elements for any data type. /* Allow a constructor with no elements for any data type.
This means to fill the space with zeros. */ This means to fill the space with zeros. */
if (TREE_CODE (exp) == CONSTRUCTOR if (TREE_CODE (exp) == CONSTRUCTOR && CONSTRUCTOR_ELTS (exp) == 0)
&& TREE_OPERAND (exp, 1) == 0)
{ {
assemble_zeros (size); assemble_zeros (size);
return; return;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment