Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
R
riscv-gcc-1
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lvzhengyang
riscv-gcc-1
Commits
663522cb
Commit
663522cb
authored
Aug 01, 2000
by
Kazu Hirata
Committed by
Jeff Law
Jul 31, 2000
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
* combine.c: Fix formatting.
From-SVN: r35398
parent
c127c127
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
89 additions
and
90 deletions
+89
-90
gcc/ChangeLog
+2
-0
gcc/combine.c
+87
-90
No files found.
gcc/ChangeLog
View file @
663522cb
...
...
@@ -130,6 +130,8 @@
2000-07-31 Kazu Hirata <kazu@hxi.com>
* combine.c: Fix formatting.
* h8300.md: Fix formatting.
* local-alloc.c: Fix formatting.
...
...
gcc/combine.c
View file @
663522cb
...
...
@@ -19,7 +19,6 @@ along with GNU CC; see the file COPYING. If not, write to
the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA. */
/* This module is essentially the "combiner" phase of the U. of Arizona
Portable Optimizer, but redone to work on our list-structured
representation for RTL instead of their string representation.
...
...
@@ -443,7 +442,7 @@ static void record_promoted_value PARAMS ((rtx, rtx));
the undo table. */
static
void
do_SUBST
(
into
,
newval
)
do_SUBST
(
into
,
newval
)
rtx
*
into
,
newval
;
{
struct
undo
*
buf
;
...
...
@@ -472,7 +471,7 @@ do_SUBST(into, newval)
not safe. */
static
void
do_SUBST_INT
(
into
,
newval
)
do_SUBST_INT
(
into
,
newval
)
int
*
into
,
newval
;
{
struct
undo
*
buf
;
...
...
@@ -1527,7 +1526,7 @@ try_combine (i3, i2, i1, new_direct_jump_p)
pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
|| find_reg_note (i3, REG_LIBCALL, NULL_RTX)
#endif
)
)
return
0
;
combine_attempts
++
;
...
...
@@ -2445,7 +2444,8 @@ try_combine (i3, i2, i1, new_direct_jump_p)
they are adjacent to each other or not. */
{
rtx
p
=
prev_nonnote_insn
(
i3
);
if
(
p
&&
p
!=
i2
&&
GET_CODE
(
p
)
==
INSN
&&
newi2pat
&&
sets_cc0_p
(
newi2pat
))
if
(
p
&&
p
!=
i2
&&
GET_CODE
(
p
)
==
INSN
&&
newi2pat
&&
sets_cc0_p
(
newi2pat
))
{
undo_all
();
return
0
;
...
...
@@ -2976,7 +2976,7 @@ find_split_point (loc, insn)
SUBST
(
SET_SRC
(
x
),
gen_binary
(
IOR
,
mode
,
gen_binary
(
AND
,
mode
,
dest
,
GEN_INT
(
~
(
mask
<<
pos
)
GEN_INT
(
~
(
mask
<<
pos
)
&
GET_MODE_MASK
(
mode
))),
GEN_INT
(
src
<<
pos
)));
...
...
@@ -3871,7 +3871,7 @@ combine_simplify_rtx (x, op0_mode, last, in_dest)
return
gen_rtx_combine
(
PLUS
,
mode
,
XEXP
(
XEXP
(
x
,
0
),
0
),
constm1_rtx
);
/* (not (xor X C)) for C constant is (xor X D) with D = ~
C. */
/* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
if
(
GET_CODE
(
XEXP
(
x
,
0
))
==
XOR
&&
GET_CODE
(
XEXP
(
XEXP
(
x
,
0
),
1
))
==
CONST_INT
&&
(
temp
=
simplify_unary_operation
(
NOT
,
mode
,
...
...
@@ -3942,7 +3942,7 @@ combine_simplify_rtx (x, op0_mode, last, in_dest)
in2
=
XEXP
(
in2
,
0
);
else
if
(
GET_CODE
(
in2
)
==
CONST_INT
&&
GET_MODE_BITSIZE
(
mode
)
<=
HOST_BITS_PER_WIDE_INT
)
in2
=
GEN_INT
(
GET_MODE_MASK
(
mode
)
&
~
INTVAL
(
in2
));
in2
=
GEN_INT
(
GET_MODE_MASK
(
mode
)
&
~
INTVAL
(
in2
));
else
in2
=
gen_rtx_combine
(
NOT
,
GET_MODE
(
in2
),
in2
);
...
...
@@ -4092,14 +4092,14 @@ combine_simplify_rtx (x, op0_mode, last, in_dest)
than HOST_BITS_PER_WIDE_INT. */
if
(
GET_MODE_BITSIZE
(
mode
)
<=
HOST_BITS_PER_WIDE_INT
&&
GET_RTX_CLASS
(
GET_CODE
(
XEXP
(
x
,
0
)))
==
'<'
&&
((
HOST_WIDE_INT
)
STORE_FLAG_VALUE
&
~
GET_MODE_MASK
(
mode
))
==
0
)
&&
((
HOST_WIDE_INT
)
STORE_FLAG_VALUE
&
~
GET_MODE_MASK
(
mode
))
==
0
)
return
gen_lowpart_for_combine
(
mode
,
XEXP
(
x
,
0
));
/* Similarly, a truncate of a register whose value is a
comparison can be replaced with a subreg if STORE_FLAG_VALUE
permits. */
if
(
GET_MODE_BITSIZE
(
mode
)
<=
HOST_BITS_PER_WIDE_INT
&&
((
HOST_WIDE_INT
)
STORE_FLAG_VALUE
&
~
GET_MODE_MASK
(
mode
))
==
0
&&
((
HOST_WIDE_INT
)
STORE_FLAG_VALUE
&
~
GET_MODE_MASK
(
mode
))
==
0
&&
(
temp
=
get_last_value
(
XEXP
(
x
,
0
)))
&&
GET_RTX_CLASS
(
GET_CODE
(
temp
))
==
'<'
)
return
gen_lowpart_for_combine
(
mode
,
XEXP
(
x
,
0
));
...
...
@@ -4185,7 +4185,7 @@ combine_simplify_rtx (x, op0_mode, last, in_dest)
if
(
GET_CODE
(
XEXP
(
x
,
0
))
==
XOR
&&
GET_CODE
(
XEXP
(
x
,
1
))
==
CONST_INT
&&
GET_CODE
(
XEXP
(
XEXP
(
x
,
0
),
1
))
==
CONST_INT
&&
INTVAL
(
XEXP
(
x
,
1
))
==
-
INTVAL
(
XEXP
(
XEXP
(
x
,
0
),
1
))
&&
INTVAL
(
XEXP
(
x
,
1
))
==
-
INTVAL
(
XEXP
(
XEXP
(
x
,
0
),
1
))
&&
((
i
=
exact_log2
(
INTVAL
(
XEXP
(
XEXP
(
x
,
0
),
1
))))
>=
0
||
(
i
=
exact_log2
(
INTVAL
(
XEXP
(
x
,
1
))))
>=
0
)
&&
GET_MODE_BITSIZE
(
mode
)
<=
HOST_BITS_PER_WIDE_INT
...
...
@@ -4261,18 +4261,18 @@ combine_simplify_rtx (x, op0_mode, last, in_dest)
&&
XEXP
(
x
,
0
)
==
const1_rtx
&&
GET_RTX_CLASS
(
GET_CODE
(
XEXP
(
x
,
1
)))
==
'<'
&&
reversible_comparison_p
(
XEXP
(
x
,
1
)))
return
gen_binary
(
reverse_condition
(
GET_CODE
(
XEXP
(
x
,
1
))),
mode
,
XEXP
(
XEXP
(
x
,
1
),
0
),
return
gen_binary
(
reverse_condition
(
GET_CODE
(
XEXP
(
x
,
1
))),
mode
,
XEXP
(
XEXP
(
x
,
1
),
0
),
XEXP
(
XEXP
(
x
,
1
),
1
));
/* (minus <foo> (and <foo> (const_int -pow2))) becomes
(and <foo> (const_int pow2-1)) */
if
(
GET_CODE
(
XEXP
(
x
,
1
))
==
AND
&&
GET_CODE
(
XEXP
(
XEXP
(
x
,
1
),
1
))
==
CONST_INT
&&
exact_log2
(
-
INTVAL
(
XEXP
(
XEXP
(
x
,
1
),
1
)))
>=
0
&&
exact_log2
(
-
INTVAL
(
XEXP
(
XEXP
(
x
,
1
),
1
)))
>=
0
&&
rtx_equal_p
(
XEXP
(
XEXP
(
x
,
1
),
0
),
XEXP
(
x
,
0
)))
return
simplify_and_const_int
(
NULL_RTX
,
mode
,
XEXP
(
x
,
0
),
-
INTVAL
(
XEXP
(
XEXP
(
x
,
1
),
1
))
-
1
);
-
INTVAL
(
XEXP
(
XEXP
(
x
,
1
),
1
))
-
1
);
/* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
integers. */
...
...
@@ -4503,7 +4503,6 @@ combine_simplify_rtx (x, op0_mode, last, in_dest)
==
0
)))
return
XEXP
(
x
,
0
);
/* If operand is known to be only -1 or 0, convert ABS to NEG. */
if
(
num_sign_bit_copies
(
XEXP
(
x
,
0
),
mode
)
==
GET_MODE_BITSIZE
(
mode
))
return
gen_rtx_combine
(
NEG
,
mode
,
XEXP
(
x
,
0
));
...
...
@@ -4801,7 +4800,7 @@ simplify_if_then_else (x)
&&
subreg_lowpart_p
(
XEXP
(
XEXP
(
t
,
0
),
0
))
&&
rtx_equal_p
(
SUBREG_REG
(
XEXP
(
XEXP
(
t
,
0
),
0
)),
f
)
&&
((
nonzero_bits
(
f
,
GET_MODE
(
f
))
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
XEXP
(
t
,
0
),
0
))))
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
XEXP
(
t
,
0
),
0
))))
==
0
))
{
c1
=
XEXP
(
XEXP
(
t
,
0
),
1
);
z
=
f
;
op
=
GET_CODE
(
XEXP
(
t
,
0
));
...
...
@@ -4817,7 +4816,7 @@ simplify_if_then_else (x)
&&
subreg_lowpart_p
(
XEXP
(
XEXP
(
t
,
0
),
1
))
&&
rtx_equal_p
(
SUBREG_REG
(
XEXP
(
XEXP
(
t
,
0
),
1
)),
f
)
&&
((
nonzero_bits
(
f
,
GET_MODE
(
f
))
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
XEXP
(
t
,
0
),
1
))))
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
XEXP
(
t
,
0
),
1
))))
==
0
))
{
c1
=
XEXP
(
XEXP
(
t
,
0
),
0
);
z
=
f
;
op
=
GET_CODE
(
XEXP
(
t
,
0
));
...
...
@@ -4852,7 +4851,7 @@ simplify_if_then_else (x)
&&
(
i
=
exact_log2
(
INTVAL
(
true
)))
>=
0
)
||
((
num_sign_bit_copies
(
XEXP
(
cond
,
0
),
mode
)
==
GET_MODE_BITSIZE
(
mode
))
&&
(
i
=
exact_log2
(
-
INTVAL
(
true
)))
>=
0
)))
&&
(
i
=
exact_log2
(
-
INTVAL
(
true
)))
>=
0
)))
return
simplify_shift_const
(
NULL_RTX
,
ASHIFT
,
mode
,
gen_lowpart_for_combine
(
mode
,
XEXP
(
cond
,
0
)),
i
);
...
...
@@ -5221,7 +5220,7 @@ simplify_logical (x, last)
switch
(
GET_CODE
(
x
))
{
case
AND
:
/* Convert (A ^ B) & A to A & (~
B) since the latter is often a single
/* Convert (A ^ B) & A to A & (~B) since the latter is often a single
insn (and may simplify more). */
if
(
GET_CODE
(
op0
)
==
XOR
&&
rtx_equal_p
(
XEXP
(
op0
,
0
),
op1
)
...
...
@@ -5235,7 +5234,7 @@ simplify_logical (x, last)
x
=
gen_binary
(
AND
,
mode
,
gen_unary
(
NOT
,
mode
,
mode
,
XEXP
(
op0
,
0
)),
op1
);
/* Similarly for (~
(A ^ B)) & A. */
/* Similarly for (~(A ^ B)) & A. */
if
(
GET_CODE
(
op0
)
==
NOT
&&
GET_CODE
(
XEXP
(
op0
,
0
))
==
XOR
&&
rtx_equal_p
(
XEXP
(
XEXP
(
op0
,
0
),
0
),
op1
)
...
...
@@ -5267,7 +5266,7 @@ simplify_logical (x, last)
return
gen_binary
(
IOR
,
mode
,
gen_binary
(
AND
,
mode
,
XEXP
(
op0
,
0
),
GEN_INT
(
INTVAL
(
XEXP
(
op0
,
1
))
&
~
INTVAL
(
op1
))),
op1
);
&
~
INTVAL
(
op1
))),
op1
);
if
(
GET_CODE
(
x
)
!=
AND
)
return
x
;
...
...
@@ -5339,7 +5338,7 @@ simplify_logical (x, last)
/* (ior A C) is C if all bits of A that might be nonzero are on in C. */
if
(
GET_CODE
(
op1
)
==
CONST_INT
&&
GET_MODE_BITSIZE
(
mode
)
<=
HOST_BITS_PER_WIDE_INT
&&
(
nonzero_bits
(
op0
,
mode
)
&
~
INTVAL
(
op1
))
==
0
)
&&
(
nonzero_bits
(
op0
,
mode
)
&
~
INTVAL
(
op1
))
==
0
)
return
op1
;
/* Convert (A & B) | A to A. */
...
...
@@ -5597,7 +5596,7 @@ expand_compound_operation (x)
if
(
GET_CODE
(
x
)
==
SIGN_EXTEND
&&
(
GET_MODE_BITSIZE
(
GET_MODE
(
x
))
<=
HOST_BITS_PER_WIDE_INT
&&
((
nonzero_bits
(
XEXP
(
x
,
0
),
GET_MODE
(
XEXP
(
x
,
0
)))
&
~
(((
unsigned
HOST_WIDE_INT
)
&
~
(((
unsigned
HOST_WIDE_INT
)
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
>>
1
))
==
0
)))
...
...
@@ -5616,7 +5615,7 @@ expand_compound_operation (x)
&&
GET_MODE
(
XEXP
(
XEXP
(
x
,
0
),
0
))
==
GET_MODE
(
x
)
&&
GET_MODE_BITSIZE
(
GET_MODE
(
x
))
<=
HOST_BITS_PER_WIDE_INT
&&
(
nonzero_bits
(
XEXP
(
XEXP
(
x
,
0
),
0
),
GET_MODE
(
x
))
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
return
XEXP
(
XEXP
(
x
,
0
),
0
);
/* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
...
...
@@ -5625,7 +5624,7 @@ expand_compound_operation (x)
&&
subreg_lowpart_p
(
XEXP
(
x
,
0
))
&&
GET_MODE_BITSIZE
(
GET_MODE
(
x
))
<=
HOST_BITS_PER_WIDE_INT
&&
(
nonzero_bits
(
SUBREG_REG
(
XEXP
(
x
,
0
)),
GET_MODE
(
x
))
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
return
SUBREG_REG
(
XEXP
(
x
,
0
));
/* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
...
...
@@ -5638,7 +5637,7 @@ expand_compound_operation (x)
&&
(
GET_MODE_BITSIZE
(
GET_MODE
(
XEXP
(
x
,
0
)))
<=
HOST_BITS_PER_WIDE_INT
)
&&
((
HOST_WIDE_INT
)
STORE_FLAG_VALUE
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
return
XEXP
(
XEXP
(
x
,
0
),
0
);
/* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
...
...
@@ -5649,7 +5648,7 @@ expand_compound_operation (x)
&&
(
GET_MODE_BITSIZE
(
GET_MODE
(
XEXP
(
x
,
0
)))
<=
HOST_BITS_PER_WIDE_INT
)
&&
((
HOST_WIDE_INT
)
STORE_FLAG_VALUE
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
return
SUBREG_REG
(
XEXP
(
x
,
0
));
}
...
...
@@ -5689,7 +5688,6 @@ expand_compound_operation (x)
/* Any other cases we can't handle. */
return
x
;
/* If we couldn't do this for some reason, return the original
expression. */
if
(
GET_CODE
(
tem
)
==
CLOBBER
)
...
...
@@ -5999,7 +5997,7 @@ make_extraction (mode, inner, pos, pos_rtx, len,
if
(
flag_expensive_optimizations
&&
(
GET_MODE_BITSIZE
(
tmode
)
<=
HOST_BITS_PER_WIDE_INT
&&
((
nonzero_bits
(
new
,
tmode
)
&
~
(((
unsigned
HOST_WIDE_INT
)
&
~
(((
unsigned
HOST_WIDE_INT
)
GET_MODE_MASK
(
tmode
))
>>
1
))
==
0
)))
...
...
@@ -6218,7 +6216,7 @@ make_extraction (mode, inner, pos, pos_rtx, len,
if
(
flag_expensive_optimizations
&&
(
GET_MODE_BITSIZE
(
GET_MODE
(
pos_rtx
))
<=
HOST_BITS_PER_WIDE_INT
&&
((
nonzero_bits
(
pos_rtx
,
GET_MODE
(
pos_rtx
))
&
~
(((
unsigned
HOST_WIDE_INT
)
&
~
(((
unsigned
HOST_WIDE_INT
)
GET_MODE_MASK
(
GET_MODE
(
pos_rtx
)))
>>
1
))
==
0
)))
...
...
@@ -6592,7 +6590,7 @@ get_pos_from_mask (m, plen)
unsigned
HOST_WIDE_INT
*
plen
;
{
/* Get the bit number of the first 1 bit from the right, -1 if none. */
int
pos
=
exact_log2
(
m
&
-
m
);
int
pos
=
exact_log2
(
m
&
-
m
);
int
len
;
if
(
pos
<
0
)
...
...
@@ -6676,7 +6674,7 @@ force_to_mode (x, mode, mask, reg, just_select)
:
(((
unsigned
HOST_WIDE_INT
)
1
<<
(
floor_log2
(
mask
)
+
1
))
-
1
));
else
fuller_mask
=
~
(
HOST_WIDE_INT
)
0
;
fuller_mask
=
~
(
HOST_WIDE_INT
)
0
;
/* Determine what bits of X are guaranteed to be (non)zero. */
nonzero
=
nonzero_bits
(
x
,
mode
);
...
...
@@ -6704,12 +6702,12 @@ force_to_mode (x, mode, mask, reg, just_select)
/* If X is narrower than MODE and we want all the bits in X's mode, just
get X in the proper mode. */
if
(
GET_MODE_SIZE
(
GET_MODE
(
x
))
<
GET_MODE_SIZE
(
mode
)
&&
(
GET_MODE_MASK
(
GET_MODE
(
x
))
&
~
mask
)
==
0
)
&&
(
GET_MODE_MASK
(
GET_MODE
(
x
))
&
~
mask
)
==
0
)
return
gen_lowpart_for_combine
(
mode
,
x
);
/* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
MASK are already known to be zero in X, we need not do anything. */
if
(
GET_MODE
(
x
)
==
mode
&&
code
!=
SUBREG
&&
(
~
mask
&
nonzero
)
==
0
)
if
(
GET_MODE
(
x
)
==
mode
&&
code
!=
SUBREG
&&
(
~
mask
&
nonzero
)
==
0
)
return
x
;
switch
(
code
)
...
...
@@ -6724,7 +6722,7 @@ force_to_mode (x, mode, mask, reg, just_select)
spanned the boundary of the MEM. If we are now masking so it is
within that boundary, we don't need the USE any more. */
if
(
!
BITS_BIG_ENDIAN
&&
(
mask
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
&&
(
mask
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))))
==
0
)
return
force_to_mode
(
XEXP
(
x
,
0
),
mode
,
mask
,
reg
,
next_select
);
break
;
...
...
@@ -6752,7 +6750,7 @@ force_to_mode (x, mode, mask, reg, just_select)
<
GET_MODE_SIZE
(
GET_MODE
(
SUBREG_REG
(
x
))))
||
(
0
==
(
mask
&
GET_MODE_MASK
(
GET_MODE
(
x
))
&
~
GET_MODE_MASK
(
GET_MODE
(
SUBREG_REG
(
x
)))))))
&
~
GET_MODE_MASK
(
GET_MODE
(
SUBREG_REG
(
x
)))))))
return
force_to_mode
(
SUBREG_REG
(
x
),
mode
,
mask
,
reg
,
next_select
);
break
;
...
...
@@ -6784,7 +6782,7 @@ force_to_mode (x, mode, mask, reg, just_select)
&&
GET_MODE_BITSIZE
(
GET_MODE
(
x
))
<=
HOST_BITS_PER_WIDE_INT
)
{
HOST_WIDE_INT
cval
=
(
INTVAL
(
XEXP
(
x
,
1
))
|
(
GET_MODE_MASK
(
GET_MODE
(
x
))
&
~
mask
));
|
(
GET_MODE_MASK
(
GET_MODE
(
x
))
&
~
mask
));
int
width
=
GET_MODE_BITSIZE
(
GET_MODE
(
x
));
rtx
y
;
...
...
@@ -6832,9 +6830,9 @@ force_to_mode (x, mode, mask, reg, just_select)
int
sp_alignment
=
STACK_BOUNDARY
/
BITS_PER_UNIT
;
unsigned
HOST_WIDE_INT
sp_mask
=
GET_MODE_MASK
(
mode
);
sp_mask
&=
~
(
sp_alignment
-
1
);
if
((
sp_mask
&
~
smask
)
==
0
&&
((
INTVAL
(
XEXP
(
x
,
1
))
-
STACK_BIAS
)
&
~
smask
)
!=
0
)
sp_mask
&=
~
(
sp_alignment
-
1
);
if
((
sp_mask
&
~
smask
)
==
0
&&
((
INTVAL
(
XEXP
(
x
,
1
))
-
STACK_BIAS
)
&
~
smask
)
!=
0
)
return
force_to_mode
(
plus_constant
(
XEXP
(
x
,
0
),
((
INTVAL
(
XEXP
(
x
,
1
))
-
STACK_BIAS
)
&
smask
)
...
...
@@ -6842,8 +6840,8 @@ force_to_mode (x, mode, mask, reg, just_select)
mode
,
smask
,
reg
,
next_select
);
}
#endif
if
((
nonzero_bits
(
XEXP
(
x
,
0
),
mode
)
&
~
smask
)
==
0
&&
(
INTVAL
(
XEXP
(
x
,
1
))
&
~
smask
)
!=
0
)
if
((
nonzero_bits
(
XEXP
(
x
,
0
),
mode
)
&
~
smask
)
==
0
&&
(
INTVAL
(
XEXP
(
x
,
1
))
&
~
smask
)
!=
0
)
return
force_to_mode
(
plus_constant
(
XEXP
(
x
,
0
),
(
INTVAL
(
XEXP
(
x
,
1
))
&
smask
)),
...
...
@@ -6898,7 +6896,7 @@ force_to_mode (x, mode, mask, reg, just_select)
+
floor_log2
(
INTVAL
(
XEXP
(
x
,
1
))))
<
GET_MODE_BITSIZE
(
GET_MODE
(
x
)))
&&
(
INTVAL
(
XEXP
(
x
,
1
))
&
~
nonzero_bits
(
XEXP
(
x
,
0
),
GET_MODE
(
x
)))
==
0
)
&
~
nonzero_bits
(
XEXP
(
x
,
0
),
GET_MODE
(
x
)))
==
0
)
{
temp
=
GEN_INT
((
INTVAL
(
XEXP
(
x
,
1
))
&
mask
)
<<
INTVAL
(
XEXP
(
XEXP
(
x
,
0
),
1
)));
...
...
@@ -6985,7 +6983,7 @@ force_to_mode (x, mode, mask, reg, just_select)
in the mode of the shift and INNER_MASK is no wider than the
width of OP_MODE. */
if
(
GET_MODE_BITSIZE
(
op_mode
)
>
HOST_BITS_PER_WIDE_INT
||
(
inner_mask
&
~
GET_MODE_MASK
(
op_mode
))
!=
0
)
||
(
inner_mask
&
~
GET_MODE_MASK
(
op_mode
))
!=
0
)
op_mode
=
GET_MODE
(
x
);
inner
=
force_to_mode
(
inner
,
op_mode
,
inner_mask
,
reg
,
next_select
);
...
...
@@ -7045,7 +7043,7 @@ force_to_mode (x, mode, mask, reg, just_select)
if
(
GET_MODE_BITSIZE
(
GET_MODE
(
x
))
>
HOST_BITS_PER_WIDE_INT
)
{
nonzero
=
~
(
HOST_WIDE_INT
)
0
;
nonzero
=
~
(
HOST_WIDE_INT
)
0
;
/* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
is the number of bits a full-width mask would have set.
...
...
@@ -7064,7 +7062,7 @@ force_to_mode (x, mode, mask, reg, just_select)
nonzero
>>=
INTVAL
(
XEXP
(
x
,
1
));
}
if
((
mask
&
~
nonzero
)
==
0
if
((
mask
&
~
nonzero
)
==
0
||
(
i
=
exact_log2
(
mask
))
>=
0
)
{
x
=
simplify_shift_const
...
...
@@ -7167,7 +7165,7 @@ force_to_mode (x, mode, mask, reg, just_select)
/* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
which is equal to STORE_FLAG_VALUE. */
if
((
mask
&
~
STORE_FLAG_VALUE
)
==
0
&&
XEXP
(
x
,
1
)
==
const0_rtx
if
((
mask
&
~
STORE_FLAG_VALUE
)
==
0
&&
XEXP
(
x
,
1
)
==
const0_rtx
&&
exact_log2
(
nonzero_bits
(
XEXP
(
x
,
0
),
mode
))
>=
0
&&
nonzero_bits
(
XEXP
(
x
,
0
),
mode
)
==
STORE_FLAG_VALUE
)
return
force_to_mode
(
XEXP
(
x
,
0
),
mode
,
mask
,
reg
,
next_select
);
...
...
@@ -7625,7 +7623,7 @@ make_field_assignment (x)
else
return
x
;
pos
=
get_pos_from_mask
((
~
c1
)
&
GET_MODE_MASK
(
GET_MODE
(
dest
)),
&
len
);
pos
=
get_pos_from_mask
((
~
c1
)
&
GET_MODE_MASK
(
GET_MODE
(
dest
)),
&
len
);
if
(
pos
<
0
||
pos
+
len
>
GET_MODE_BITSIZE
(
GET_MODE
(
dest
))
||
GET_MODE_BITSIZE
(
GET_MODE
(
dest
))
>
HOST_BITS_PER_WIDE_INT
||
(
c1
&
nonzero_bits
(
other
,
GET_MODE
(
dest
)))
!=
0
)
...
...
@@ -7931,7 +7929,7 @@ nonzero_bits (x, mode)
&&
GET_MODE_BITSIZE
(
mode
)
>
GET_MODE_BITSIZE
(
GET_MODE
(
x
)))
{
nonzero
&=
nonzero_bits
(
x
,
GET_MODE
(
x
));
nonzero
|=
GET_MODE_MASK
(
mode
)
&
~
GET_MODE_MASK
(
GET_MODE
(
x
));
nonzero
|=
GET_MODE_MASK
(
mode
)
&
~
GET_MODE_MASK
(
GET_MODE
(
x
));
return
nonzero
;
}
#endif
...
...
@@ -7979,7 +7977,7 @@ nonzero_bits (x, mode)
/* We must return here, otherwise we may get a worse result from
one of the choices below. There is nothing useful below as
far as the stack pointer is concerned. */
return
nonzero
&=
~
(
sp_alignment
-
1
);
return
nonzero
&=
~
(
sp_alignment
-
1
);
}
#endif
...
...
@@ -8074,7 +8072,7 @@ nonzero_bits (x, mode)
#endif
if
(
GET_MODE_SIZE
(
GET_MODE
(
x
))
<
mode_width
)
nonzero
|=
(
GET_MODE_MASK
(
mode
)
&
~
GET_MODE_MASK
(
GET_MODE
(
x
)));
nonzero
|=
(
GET_MODE_MASK
(
mode
)
&
~
GET_MODE_MASK
(
GET_MODE
(
x
)));
break
;
case
ABS
:
...
...
@@ -8109,7 +8107,7 @@ nonzero_bits (x, mode)
&
(((
HOST_WIDE_INT
)
1
<<
(
GET_MODE_BITSIZE
(
GET_MODE
(
XEXP
(
x
,
0
)))
-
1
))))
inner_nz
|=
(
GET_MODE_MASK
(
mode
)
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))));
&
~
GET_MODE_MASK
(
GET_MODE
(
XEXP
(
x
,
0
))));
}
nonzero
&=
inner_nz
;
...
...
@@ -8159,7 +8157,7 @@ nonzero_bits (x, mode)
{
int
sp_alignment
=
STACK_BOUNDARY
/
BITS_PER_UNIT
;
nz0
=
(
GET_MODE_MASK
(
mode
)
&
~
(
sp_alignment
-
1
));
nz0
=
(
GET_MODE_MASK
(
mode
)
&
~
(
sp_alignment
-
1
));
nz1
=
INTVAL
(
XEXP
(
x
,
1
))
-
STACK_BIAS
;
width0
=
floor_log2
(
nz0
)
+
1
;
width1
=
floor_log2
(
nz1
)
+
1
;
...
...
@@ -8201,7 +8199,7 @@ nonzero_bits (x, mode)
nonzero
&=
((
HOST_WIDE_INT
)
1
<<
result_width
)
-
1
;
if
(
result_low
>
0
)
nonzero
&=
~
(((
HOST_WIDE_INT
)
1
<<
result_low
)
-
1
);
nonzero
&=
~
(((
HOST_WIDE_INT
)
1
<<
result_low
)
-
1
);
}
break
;
...
...
@@ -8246,7 +8244,7 @@ nonzero_bits (x, mode)
if
(
GET_MODE_SIZE
(
GET_MODE
(
x
))
>
GET_MODE_SIZE
(
GET_MODE
(
SUBREG_REG
(
x
))))
nonzero
|=
(
GET_MODE_MASK
(
GET_MODE
(
x
))
&
~
GET_MODE_MASK
(
GET_MODE
(
SUBREG_REG
(
x
))));
&
~
GET_MODE_MASK
(
GET_MODE
(
SUBREG_REG
(
x
))));
}
}
break
;
...
...
@@ -8274,7 +8272,7 @@ nonzero_bits (x, mode)
unsigned
HOST_WIDE_INT
outer
=
0
;
if
(
mode_width
>
width
)
outer
=
(
op_nonzero
&
nonzero
&
~
mode_mask
);
outer
=
(
op_nonzero
&
nonzero
&
~
mode_mask
);
if
(
code
==
LSHIFTRT
)
inner
>>=
count
;
...
...
@@ -8419,7 +8417,7 @@ num_sign_bit_copies (x, mode)
nonzero
=
INTVAL
(
x
)
&
GET_MODE_MASK
(
mode
);
if
(
bitwidth
<=
HOST_BITS_PER_WIDE_INT
&&
(
nonzero
&
((
HOST_WIDE_INT
)
1
<<
(
bitwidth
-
1
)))
!=
0
)
nonzero
=
(
~
nonzero
)
&
GET_MODE_MASK
(
mode
);
nonzero
=
(
~
nonzero
)
&
GET_MODE_MASK
(
mode
);
return
(
nonzero
==
0
?
bitwidth
:
bitwidth
-
floor_log2
(
nonzero
)
-
1
);
...
...
@@ -8777,7 +8775,7 @@ merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
op0
=
AND
,
*
pcomp_p
=
1
;
else
/* op1 == IOR */
/* (a | b) ^ b == a & ~b */
op0
=
AND
,
*
pconst0
=
~
const0
;
op0
=
AND
,
*
pconst0
=
~
const0
;
break
;
case
AND
:
...
...
@@ -9093,7 +9091,7 @@ simplify_shift_const (x, code, result_mode, varop, input_count)
/* C3 has the low-order C1 bits zero. */
mask
=
(
GET_MODE_MASK
(
mode
)
&
~
(((
HOST_WIDE_INT
)
1
<<
first_count
)
-
1
));
&
~
(((
HOST_WIDE_INT
)
1
<<
first_count
)
-
1
));
varop
=
simplify_and_const_int
(
NULL_RTX
,
result_mode
,
XEXP
(
varop
,
0
),
mask
);
...
...
@@ -9120,7 +9118,7 @@ simplify_shift_const (x, code, result_mode, varop, input_count)
signed_count
=
count
-
first_count
;
if
(
signed_count
<
0
)
count
=
-
signed_count
,
code
=
ASHIFT
;
count
=
-
signed_count
,
code
=
ASHIFT
;
else
count
=
signed_count
;
...
...
@@ -9199,7 +9197,7 @@ simplify_shift_const (x, code, result_mode, varop, input_count)
||
(
first_code
==
ASHIFTRT
&&
code
==
LSHIFTRT
)))
code
=
first_code
,
count
=
signed_count
;
else
if
(
signed_count
<
0
)
code
=
first_code
,
count
=
-
signed_count
;
code
=
first_code
,
count
=
-
signed_count
;
else
count
=
signed_count
;
...
...
@@ -9267,7 +9265,7 @@ simplify_shift_const (x, code, result_mode, varop, input_count)
&&
(
new
=
simplify_binary_operation
(
code
,
result_mode
,
XEXP
(
varop
,
1
),
GEN_INT
(
count
)))
!=
0
&&
GET_CODE
(
new
)
==
CONST_INT
&&
GET_CODE
(
new
)
==
CONST_INT
&&
merge_outer_ops
(
&
outer_op
,
&
outer_const
,
GET_CODE
(
varop
),
INTVAL
(
new
),
result_mode
,
&
complement_p
))
{
...
...
@@ -9884,7 +9882,7 @@ gen_binary (code, mode, op0, op1)
an AND. */
else
if
(
code
==
AND
&&
GET_CODE
(
op1
)
==
CONST_INT
&&
GET_MODE_BITSIZE
(
mode
)
<=
HOST_BITS_PER_WIDE_INT
&&
(
nonzero_bits
(
op0
,
mode
)
&
~
INTVAL
(
op1
))
==
0
)
&&
(
nonzero_bits
(
op0
,
mode
)
&
~
INTVAL
(
op1
))
==
0
)
return
op0
;
return
gen_rtx_combine
(
code
,
mode
,
op0
,
op1
);
...
...
@@ -9984,8 +9982,8 @@ simplify_comparison (code, pop0, pop1)
else
if
(
GET_CODE
(
op0
)
==
ASHIFT
)
mask
=
(
mask
&
(
mask
<<
shift_count
))
>>
shift_count
;
if
((
nonzero_bits
(
XEXP
(
op0
,
0
),
mode
)
&
~
mask
)
==
0
&&
(
nonzero_bits
(
XEXP
(
op1
,
0
),
mode
)
&
~
mask
)
==
0
)
if
((
nonzero_bits
(
XEXP
(
op0
,
0
),
mode
)
&
~
mask
)
==
0
&&
(
nonzero_bits
(
XEXP
(
op1
,
0
),
mode
)
&
~
mask
)
==
0
)
op0
=
XEXP
(
op0
,
0
),
op1
=
XEXP
(
op1
,
0
);
else
break
;
...
...
@@ -10180,7 +10178,7 @@ simplify_comparison (code, pop0, pop1)
break
;
case
GT
:
/* > C is equivalent to >= (C + 1); we do this for C < 0*/
/* > C is equivalent to >= (C + 1); we do this for C < 0
.
*/
if
(
const_op
<
0
)
{
const_op
+=
1
;
...
...
@@ -10421,7 +10419,7 @@ simplify_comparison (code, pop0, pop1)
continue
;
}
/*
... fall through ..
. */
/*
Fall through
. */
case
ABS
:
/* ABS is ignorable inside an equality comparison with zero. */
...
...
@@ -10432,7 +10430,6 @@ simplify_comparison (code, pop0, pop1)
}
break
;
case
SIGN_EXTEND
:
/* Can simplify (compare (zero/sign_extend FOO) CONST)
to (compare FOO CONST) if CONST fits in FOO's mode and we
...
...
@@ -10465,12 +10462,12 @@ simplify_comparison (code, pop0, pop1)
&&
GET_CODE
(
SUBREG_REG
(
op0
))
==
PLUS
&&
GET_CODE
(
XEXP
(
SUBREG_REG
(
op0
),
1
))
==
CONST_INT
&&
INTVAL
(
XEXP
(
SUBREG_REG
(
op0
),
1
))
<
0
&&
(
-
INTVAL
(
XEXP
(
SUBREG_REG
(
op0
),
1
))
<
(
HOST_WIDE_INT
)(
GET_MODE_MASK
(
mode
)
/
2
))
&&
(
-
INTVAL
(
XEXP
(
SUBREG_REG
(
op0
),
1
))
<
(
HOST_WIDE_INT
)
(
GET_MODE_MASK
(
mode
)
/
2
))
&&
(
unsigned
HOST_WIDE_INT
)
const_op
<
GET_MODE_MASK
(
mode
)
/
2
&&
(
0
==
(
nonzero_bits
(
XEXP
(
SUBREG_REG
(
op0
),
0
),
GET_MODE
(
SUBREG_REG
(
op0
)))
&
~
GET_MODE_MASK
(
mode
))
&
~
GET_MODE_MASK
(
mode
))
||
(
num_sign_bit_copies
(
XEXP
(
SUBREG_REG
(
op0
),
0
),
GET_MODE
(
SUBREG_REG
(
op0
)))
>
(
GET_MODE_BITSIZE
(
GET_MODE
(
SUBREG_REG
(
op0
)))
...
...
@@ -10657,7 +10654,7 @@ simplify_comparison (code, pop0, pop1)
{
mask
=
((
INTVAL
(
XEXP
(
op0
,
1
))
&
GET_MODE_MASK
(
mode
))
<<
INTVAL
(
XEXP
(
XEXP
(
op0
,
0
),
1
)));
if
((
~
STORE_FLAG_VALUE
&
mask
)
==
0
if
((
~
STORE_FLAG_VALUE
&
mask
)
==
0
&&
(
GET_RTX_CLASS
(
GET_CODE
(
XEXP
(
XEXP
(
op0
,
0
),
0
)))
==
'<'
||
((
tem
=
get_last_value
(
XEXP
(
XEXP
(
op0
,
0
),
0
)))
!=
0
&&
GET_RTX_CLASS
(
GET_CODE
(
tem
))
==
'<'
)))
...
...
@@ -10728,8 +10725,8 @@ simplify_comparison (code, pop0, pop1)
&&
mode_width
<=
HOST_BITS_PER_WIDE_INT
&&
(
GET_MODE_BITSIZE
(
GET_MODE
(
SUBREG_REG
(
XEXP
(
op0
,
0
))))
<=
HOST_BITS_PER_WIDE_INT
)
&&
(
INTVAL
(
XEXP
(
op0
,
1
))
&
~
mask
)
==
0
&&
0
==
(
~
GET_MODE_MASK
(
GET_MODE
(
SUBREG_REG
(
XEXP
(
op0
,
0
))))
&&
(
INTVAL
(
XEXP
(
op0
,
1
))
&
~
mask
)
==
0
&&
0
==
(
~
GET_MODE_MASK
(
GET_MODE
(
SUBREG_REG
(
XEXP
(
op0
,
0
))))
&
INTVAL
(
XEXP
(
op0
,
1
)))
&&
(
unsigned
HOST_WIDE_INT
)
INTVAL
(
XEXP
(
op0
,
1
))
!=
mask
&&
((
unsigned
HOST_WIDE_INT
)
INTVAL
(
XEXP
(
op0
,
1
))
...
...
@@ -10760,7 +10757,7 @@ simplify_comparison (code, pop0, pop1)
&
(((
HOST_WIDE_INT
)
1
<<
INTVAL
(
XEXP
(
op0
,
1
)))
-
1
))
==
0
)
&&
mode_width
<=
HOST_BITS_PER_WIDE_INT
&&
(
nonzero_bits
(
XEXP
(
op0
,
0
),
mode
)
&
~
(
mask
>>
(
INTVAL
(
XEXP
(
op0
,
1
))
&
~
(
mask
>>
(
INTVAL
(
XEXP
(
op0
,
1
))
+
!
equality_comparison_p
)))
==
0
)
{
/* We must perform a logical shift, not an arithmetic one,
...
...
@@ -10820,7 +10817,7 @@ simplify_comparison (code, pop0, pop1)
&&
(
tmode
=
mode_for_size
(
mode_width
-
INTVAL
(
XEXP
(
op0
,
1
)),
MODE_INT
,
1
))
!=
BLKmode
&&
((
unsigned
HOST_WIDE_INT
)
const_op
<=
GET_MODE_MASK
(
tmode
)
||
((
unsigned
HOST_WIDE_INT
)
-
const_op
||
((
unsigned
HOST_WIDE_INT
)
-
const_op
<=
GET_MODE_MASK
(
tmode
))))
{
op0
=
gen_lowpart_for_combine
(
tmode
,
XEXP
(
XEXP
(
op0
,
0
),
0
));
...
...
@@ -10839,7 +10836,7 @@ simplify_comparison (code, pop0, pop1)
&&
(
tmode
=
mode_for_size
(
mode_width
-
INTVAL
(
XEXP
(
op0
,
1
)),
MODE_INT
,
1
))
!=
BLKmode
&&
((
unsigned
HOST_WIDE_INT
)
const_op
<=
GET_MODE_MASK
(
tmode
)
||
((
unsigned
HOST_WIDE_INT
)
-
const_op
||
((
unsigned
HOST_WIDE_INT
)
-
const_op
<=
GET_MODE_MASK
(
tmode
))))
{
rtx
inner
=
XEXP
(
XEXP
(
XEXP
(
op0
,
0
),
0
),
0
);
...
...
@@ -10921,11 +10918,11 @@ simplify_comparison (code, pop0, pop1)
&&
(
GET_MODE_BITSIZE
(
GET_MODE
(
SUBREG_REG
(
op0
)))
<=
HOST_BITS_PER_WIDE_INT
)
&&
(
nonzero_bits
(
SUBREG_REG
(
op0
),
GET_MODE
(
SUBREG_REG
(
op0
)))
&
~
GET_MODE_MASK
(
GET_MODE
(
op0
)))
==
0
&
~
GET_MODE_MASK
(
GET_MODE
(
op0
)))
==
0
&&
(
tem
=
gen_lowpart_for_combine
(
GET_MODE
(
SUBREG_REG
(
op0
)),
op1
),
(
nonzero_bits
(
tem
,
GET_MODE
(
SUBREG_REG
(
op0
)))
&
~
GET_MODE_MASK
(
GET_MODE
(
op0
)))
==
0
))
&
~
GET_MODE_MASK
(
GET_MODE
(
op0
)))
==
0
))
op0
=
SUBREG_REG
(
op0
),
op1
=
tem
;
/* We now do the opposite procedure: Some machines don't have compare
...
...
@@ -10950,8 +10947,8 @@ simplify_comparison (code, pop0, pop1)
values, in which case it is true for all comparisons. */
if
(((
code
==
EQ
||
code
==
NE
||
code
==
GEU
||
code
==
GTU
||
code
==
LEU
||
code
==
LTU
)
&&
(
nonzero_bits
(
op0
,
tmode
)
&
~
GET_MODE_MASK
(
mode
))
==
0
&&
(
nonzero_bits
(
op1
,
tmode
)
&
~
GET_MODE_MASK
(
mode
))
==
0
)
&&
(
nonzero_bits
(
op0
,
tmode
)
&
~
GET_MODE_MASK
(
mode
))
==
0
&&
(
nonzero_bits
(
op1
,
tmode
)
&
~
GET_MODE_MASK
(
mode
))
==
0
)
||
((
num_sign_bit_copies
(
op0
,
tmode
)
>
GET_MODE_BITSIZE
(
tmode
)
-
GET_MODE_BITSIZE
(
mode
))
&&
(
num_sign_bit_copies
(
op1
,
tmode
)
...
...
@@ -11290,7 +11287,7 @@ record_promoted_value (insn, subreg)
if
(
GET_MODE_BITSIZE
(
mode
)
>
HOST_BITS_PER_WIDE_INT
)
return
;
for
(
links
=
LOG_LINKS
(
insn
);
links
;
)
for
(
links
=
LOG_LINKS
(
insn
);
links
;)
{
insn
=
XEXP
(
links
,
0
);
set
=
single_set
(
insn
);
...
...
@@ -11303,10 +11300,10 @@ record_promoted_value (insn, subreg)
continue
;
}
if
(
reg_last_set
[
regno
]
==
insn
)
if
(
reg_last_set
[
regno
]
==
insn
)
{
if
(
SUBREG_PROMOTED_UNSIGNED_P
(
subreg
))
reg_last_set_nonzero_bits
[
regno
]
&=
GET_MODE_MASK
(
mode
);
reg_last_set_nonzero_bits
[
regno
]
&=
GET_MODE_MASK
(
mode
);
}
if
(
GET_CODE
(
SET_SRC
(
set
))
==
REG
)
...
...
@@ -11336,7 +11333,7 @@ check_promoted_subreg (insn, x)
int
i
,
j
;
for
(
i
=
0
;
i
<
GET_RTX_LENGTH
(
GET_CODE
(
x
));
i
++
)
switch
(
format
[
i
])
switch
(
format
[
i
])
{
case
'e'
:
check_promoted_subreg
(
insn
,
XEXP
(
x
,
i
));
...
...
@@ -11738,7 +11735,6 @@ mark_used_regs_combine (x)
}
}
/* Remove register number REGNO from the dead registers list of INSN.
Return the note used to record the death, if there was one. */
...
...
@@ -12228,9 +12224,9 @@ distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
if
(
set
!=
0
)
for
(
inner_dest
=
SET_DEST
(
set
);
GET_CODE
(
inner_dest
)
==
STRICT_LOW_PART
(
GET_CODE
(
inner_dest
)
==
STRICT_LOW_PART
||
GET_CODE
(
inner_dest
)
==
SUBREG
||
GET_CODE
(
inner_dest
)
==
ZERO_EXTRACT
;
||
GET_CODE
(
inner_dest
)
==
ZERO_EXTRACT
)
;
inner_dest
=
XEXP
(
inner_dest
,
0
))
;
...
...
@@ -12324,7 +12320,8 @@ distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
i2. */
if
(
i2
&&
INSN_UID
(
place
)
<=
max_uid_cuid
&&
INSN_CUID
(
place
)
>
INSN_CUID
(
i2
)
&&
from_insn
&&
INSN_CUID
(
from_insn
)
>
INSN_CUID
(
i2
)
&&
from_insn
&&
INSN_CUID
(
from_insn
)
>
INSN_CUID
(
i2
)
&&
reg_referenced_p
(
XEXP
(
note
,
0
),
PATTERN
(
i2
)))
{
rtx
links
=
LOG_LINKS
(
place
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment