Commit b9f22704 by Jan van Male Committed by Andreas Jaeger

emit-rtl.c (adjust_address, [...]): Cast offset to unsigned HOST_WIDE_INT to avoid warning.

2001-08-08  Jan van Male <jan.vanmale@fenk.wau.nl>

        * emit-rtl.c (adjust_address, adjust_address_nv): Cast offset to
        unsigned HOST_WIDE_INT to avoid warning.
        * final.c (final): Cast INSN_UID to unsigned to avoid warning.
        * flow.c (set_block_for_new_insns): Likewise.

From-SVN: r44724
parent a4b07e7f
2001-08-08 Jan van Male <jan.vanmale@fenk.wau.nl>
* emit-rtl.c (adjust_address, adjust_address_nv): Cast offset to
unsigned HOST_WIDE_INT to avoid warning.
* final.c (final): Cast INSN_UID to unsigned to avoid warning.
* flow.c (set_block_for_new_insns): Likewise.
Wed Aug 8 21:08:14 CEST 2001 Jan Hubicka <jh@suse.cz> Wed Aug 8 21:08:14 CEST 2001 Jan Hubicka <jh@suse.cz>
* sibcall.c (return_value_pseudo): New static variable. * sibcall.c (return_value_pseudo): New static variable.
...@@ -93,7 +100,7 @@ Wed Aug 8 18:01:58 CEST 2001 Jan Hubicka <jh@suse.cz> ...@@ -93,7 +100,7 @@ Wed Aug 8 18:01:58 CEST 2001 Jan Hubicka <jh@suse.cz>
* config/ia64/ia64.md (cond_opsi2_internal, cond_opsi2_internal_b): * config/ia64/ia64.md (cond_opsi2_internal, cond_opsi2_internal_b):
Turn into define_insn_and_split. Turn into define_insn_and_split.
* sched-deps.c: Include "cselib.h". * sched-deps.c: Include "cselib.h".
(add_insn_mem_dependence, sched_analyze_1, sched_analyze_2): (add_insn_mem_dependence, sched_analyze_1, sched_analyze_2):
Use cselib to turn memory addresses into VALUEs. Use cselib to turn memory addresses into VALUEs.
...@@ -107,7 +114,7 @@ Wed Aug 8 18:01:58 CEST 2001 Jan Hubicka <jh@suse.cz> ...@@ -107,7 +114,7 @@ Wed Aug 8 18:01:58 CEST 2001 Jan Hubicka <jh@suse.cz>
* cselib.c (cselib_subst_to_values): No longer static. Allow MEMs * cselib.c (cselib_subst_to_values): No longer static. Allow MEMs
that have no value and autoincs to be handled by generating a new that have no value and autoincs to be handled by generating a new
dummy value. dummy value.
2001-08-08 Graham Stott <grahams@redhat.com> 2001-08-08 Graham Stott <grahams@redhat.com>
* final.c (shorten_branches): Update the INSN_ADDRESSES of insns * final.c (shorten_branches): Update the INSN_ADDRESSES of insns
......
...@@ -1643,7 +1643,8 @@ adjust_address (memref, mode, offset) ...@@ -1643,7 +1643,8 @@ adjust_address (memref, mode, offset)
object, we can merge it into the LO_SUM. */ object, we can merge it into the LO_SUM. */
if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
&& offset >= 0 && offset >= 0
&& offset < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) && (unsigned HOST_WIDE_INT) offset
< GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0), addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
plus_constant (XEXP (addr, 1), offset)); plus_constant (XEXP (addr, 1), offset));
else else
...@@ -1668,7 +1669,8 @@ adjust_address_nv (memref, mode, offset) ...@@ -1668,7 +1669,8 @@ adjust_address_nv (memref, mode, offset)
object, we can merge it into the LO_SUM. */ object, we can merge it into the LO_SUM. */
if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
&& offset >= 0 && offset >= 0
&& offset < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) && (unsigned HOST_WIDE_INT) offset
< GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0), addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
plus_constant (XEXP (addr, 1), offset)); plus_constant (XEXP (addr, 1), offset));
else else
......
...@@ -526,7 +526,7 @@ end_final (filename) ...@@ -526,7 +526,7 @@ end_final (filename)
} }
/* Default target function prologue and epilogue assembler output. /* Default target function prologue and epilogue assembler output.
If not overridden for epilogue code, then the function body itself If not overridden for epilogue code, then the function body itself
contains return instructions wherever needed. */ contains return instructions wherever needed. */
void void
...@@ -918,7 +918,7 @@ insn_current_reference_address (branch) ...@@ -918,7 +918,7 @@ insn_current_reference_address (branch)
return insn_current_address; return insn_current_address;
dest = JUMP_LABEL (branch); dest = JUMP_LABEL (branch);
/* BRANCH has no proper alignment chain set, so use SEQ. /* BRANCH has no proper alignment chain set, so use SEQ.
BRANCH also has no INSN_SHUID. */ BRANCH also has no INSN_SHUID. */
if (INSN_SHUID (seq) < INSN_SHUID (dest)) if (INSN_SHUID (seq) < INSN_SHUID (dest))
{ {
...@@ -1938,7 +1938,7 @@ final (first, file, optimize, prescan) ...@@ -1938,7 +1938,7 @@ final (first, file, optimize, prescan)
for (insn = NEXT_INSN (first); insn;) for (insn = NEXT_INSN (first); insn;)
{ {
#ifdef HAVE_ATTR_length #ifdef HAVE_ATTR_length
if (INSN_UID (insn) >= INSN_ADDRESSES_SIZE ()) if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
{ {
#ifdef STACK_REGS #ifdef STACK_REGS
/* Irritatingly, the reg-stack pass is creating new instructions /* Irritatingly, the reg-stack pass is creating new instructions
...@@ -2057,12 +2057,12 @@ final_scan_insn (insn, file, optimize, prescan, nopeepholes) ...@@ -2057,12 +2057,12 @@ final_scan_insn (insn, file, optimize, prescan, nopeepholes)
break; break;
case NOTE_INSN_PROLOGUE_END: case NOTE_INSN_PROLOGUE_END:
(*targetm.asm_out.function_end_prologue) (file); (*targetm.asm_out.function_end_prologue) (file);
profile_after_prologue (file); profile_after_prologue (file);
break; break;
case NOTE_INSN_EPILOGUE_BEG: case NOTE_INSN_EPILOGUE_BEG:
(*targetm.asm_out.function_begin_epilogue) (file); (*targetm.asm_out.function_begin_epilogue) (file);
break; break;
case NOTE_INSN_FUNCTION_BEG: case NOTE_INSN_FUNCTION_BEG:
...@@ -2749,7 +2749,7 @@ final_scan_insn (insn, file, optimize, prescan, nopeepholes) ...@@ -2749,7 +2749,7 @@ final_scan_insn (insn, file, optimize, prescan, nopeepholes)
print_rtl_single (asm_out_file, insn); print_rtl_single (asm_out_file, insn);
print_rtx_head = ""; print_rtx_head = "";
} }
if (! constrain_operands_cached (1)) if (! constrain_operands_cached (1))
fatal_insn_not_found (insn); fatal_insn_not_found (insn);
......
...@@ -366,7 +366,7 @@ typedef struct depth_first_search_dsS *depth_first_search_ds; ...@@ -366,7 +366,7 @@ typedef struct depth_first_search_dsS *depth_first_search_ds;
print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__) print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
/* Forward declarations */ /* Forward declarations */
static bool try_crossjump_to_edge PARAMS ((int, edge, edge)); static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
static bool try_crossjump_bb PARAMS ((int, basic_block)); static bool try_crossjump_bb PARAMS ((int, basic_block));
static bool outgoing_edges_match PARAMS ((basic_block, basic_block)); static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block, static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
...@@ -1783,7 +1783,7 @@ can_fallthru (src, target) ...@@ -1783,7 +1783,7 @@ can_fallthru (src, target)
/* Attempt to perform edge redirection by replacing possibly complex jump /* Attempt to perform edge redirection by replacing possibly complex jump
instruction by unconditional jump or removing jump completely. instruction by unconditional jump or removing jump completely.
This can apply only if all edges now point to the same block. This can apply only if all edges now point to the same block.
The parameters and return values are equivalent to redirect_edge_and_branch. The parameters and return values are equivalent to redirect_edge_and_branch.
*/ */
...@@ -2532,7 +2532,7 @@ need_fake_edge_p (insn) ...@@ -2532,7 +2532,7 @@ need_fake_edge_p (insn)
/* Add fake edges to the function exit for any non constant and non noreturn /* Add fake edges to the function exit for any non constant and non noreturn
calls, volatile inline assembly in the bitmap of blocks specified by calls, volatile inline assembly in the bitmap of blocks specified by
BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
that were split. that were split.
The goal is to expose cases in which entering a basic block does not imply The goal is to expose cases in which entering a basic block does not imply
that all subsequent instructions must be executed. */ that all subsequent instructions must be executed. */
...@@ -2560,7 +2560,7 @@ flow_call_edges_add (blocks) ...@@ -2560,7 +2560,7 @@ flow_call_edges_add (blocks)
} }
else else
{ {
EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
{ {
bbs[bb_num++] = BASIC_BLOCK (i); bbs[bb_num++] = BASIC_BLOCK (i);
if (i == n_basic_blocks - 1) if (i == n_basic_blocks - 1)
...@@ -2572,7 +2572,7 @@ flow_call_edges_add (blocks) ...@@ -2572,7 +2572,7 @@ flow_call_edges_add (blocks)
a fallthru edge to EXIT. Special care is required if the last insn a fallthru edge to EXIT. Special care is required if the last insn
of the last basic block is a call because make_edge folds duplicate of the last basic block is a call because make_edge folds duplicate
edges, which would result in the fallthru edge also being marked edges, which would result in the fallthru edge also being marked
fake, which would result in the fallthru edge being removed by fake, which would result in the fallthru edge being removed by
remove_fake_edges, which would result in an invalid CFG. remove_fake_edges, which would result in an invalid CFG.
Moreover, we can't elide the outgoing fake edge, since the block Moreover, we can't elide the outgoing fake edge, since the block
...@@ -3336,7 +3336,7 @@ try_simplify_condjump (cbranch_block) ...@@ -3336,7 +3336,7 @@ try_simplify_condjump (cbranch_block)
redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block); redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
cbranch_jump_edge->flags |= EDGE_FALLTHRU; cbranch_jump_edge->flags |= EDGE_FALLTHRU;
cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU; cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
/* Delete the block with the unconditional jump, and clean up the mess. */ /* Delete the block with the unconditional jump, and clean up the mess. */
flow_delete_block (jump_block); flow_delete_block (jump_block);
tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block); tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
...@@ -3363,7 +3363,7 @@ try_forward_edges (mode, b) ...@@ -3363,7 +3363,7 @@ try_forward_edges (mode, b)
next = e->succ_next; next = e->succ_next;
/* Skip complex edges because we don't know how to update them. /* Skip complex edges because we don't know how to update them.
Still handle fallthru edges, as we can suceed to forward fallthru Still handle fallthru edges, as we can suceed to forward fallthru
edge to the same place as the branch edge of conditional branch edge to the same place as the branch edge of conditional branch
and turn conditional branch to an unconditonal branch. */ and turn conditional branch to an unconditonal branch. */
...@@ -3386,13 +3386,13 @@ try_forward_edges (mode, b) ...@@ -3386,13 +3386,13 @@ try_forward_edges (mode, b)
/* Avoid killing of loop pre-headers, as it is the place loop /* Avoid killing of loop pre-headers, as it is the place loop
optimizer wants to hoist code to. optimizer wants to hoist code to.
For fallthru forwarders, the LOOP_BEG note must appear between For fallthru forwarders, the LOOP_BEG note must appear between
the header of block and CODE_LABEL of the loop, for non forwarders the header of block and CODE_LABEL of the loop, for non forwarders
it must appear before the JUMP_INSN. */ it must appear before the JUMP_INSN. */
if (mode & CLEANUP_PRE_LOOP) if (mode & CLEANUP_PRE_LOOP)
{ {
rtx insn = (target->succ->flags & EDGE_FALLTHRU rtx insn = (target->succ->flags & EDGE_FALLTHRU
? target->head : prev_nonnote_insn (target->end)); ? target->head : prev_nonnote_insn (target->end));
if (GET_CODE (insn) != NOTE) if (GET_CODE (insn) != NOTE)
...@@ -3630,7 +3630,7 @@ flow_find_cross_jump (mode, bb1, bb2, f1, f2) ...@@ -3630,7 +3630,7 @@ flow_find_cross_jump (mode, bb1, bb2, f1, f2)
/* Return true iff outgoing edges of BB1 and BB2 match, together with /* Return true iff outgoing edges of BB1 and BB2 match, together with
the branch instruction. This means that if we commonize the control the branch instruction. This means that if we commonize the control
flow before end of the basic block, the semantic remains unchanged. flow before end of the basic block, the semantic remains unchanged.
We may assume that there exists one edge with a common destination. */ We may assume that there exists one edge with a common destination. */
...@@ -3971,7 +3971,7 @@ try_crossjump_bb (mode, bb) ...@@ -3971,7 +3971,7 @@ try_crossjump_bb (mode, bb)
If there is a match, we'll do it the other way around. */ If there is a match, we'll do it the other way around. */
if (e == fallthru) if (e == fallthru)
continue; continue;
if (try_crossjump_to_edge (mode, e, fallthru)) if (try_crossjump_to_edge (mode, e, fallthru))
{ {
changed = true; changed = true;
...@@ -4013,7 +4013,7 @@ try_crossjump_bb (mode, bb) ...@@ -4013,7 +4013,7 @@ try_crossjump_bb (mode, bb)
/* The "first successor" check above only prevents multiple /* The "first successor" check above only prevents multiple
checks of crossjump(A,B). In order to prevent redundant checks of crossjump(A,B). In order to prevent redundant
checks of crossjump(B,A), require that A be the block checks of crossjump(B,A), require that A be the block
with the lowest index. */ with the lowest index. */
if (e->src->index > e2->src->index) if (e->src->index > e2->src->index)
continue; continue;
...@@ -4080,7 +4080,7 @@ try_optimize_cfg (mode) ...@@ -4080,7 +4080,7 @@ try_optimize_cfg (mode)
&& !(b->pred->flags & EDGE_COMPLEX) && !(b->pred->flags & EDGE_COMPLEX)
&& GET_CODE (b->head) == CODE_LABEL && GET_CODE (b->head) == CODE_LABEL
&& (!(mode & CLEANUP_PRE_SIBCALL) && (!(mode & CLEANUP_PRE_SIBCALL)
|| !tail_recursion_label_p (b->head)) || !tail_recursion_label_p (b->head))
/* If previous block ends with condjump jumping to next BB, /* If previous block ends with condjump jumping to next BB,
we can't delete the label. */ we can't delete the label. */
&& (b->pred->src == ENTRY_BLOCK_PTR && (b->pred->src == ENTRY_BLOCK_PTR
...@@ -6251,8 +6251,8 @@ mark_set_1 (pbi, code, reg, cond, insn, flags) ...@@ -6251,8 +6251,8 @@ mark_set_1 (pbi, code, reg, cond, insn, flags)
if (regno_first < FIRST_PSEUDO_REGISTER) if (regno_first < FIRST_PSEUDO_REGISTER)
{ {
regno_first += subreg_regno_offset (regno_first, inner_mode, regno_first += subreg_regno_offset (regno_first, inner_mode,
SUBREG_BYTE (reg), SUBREG_BYTE (reg),
outer_mode); outer_mode);
regno_last = (regno_first regno_last = (regno_first
+ HARD_REGNO_NREGS (regno_first, outer_mode) - 1); + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
...@@ -8277,7 +8277,7 @@ set_block_for_insn (insn, bb) ...@@ -8277,7 +8277,7 @@ set_block_for_insn (insn, bb)
/* When a new insn has been inserted into an existing block, it will /* When a new insn has been inserted into an existing block, it will
sometimes emit more than a single insn. This routine will set the sometimes emit more than a single insn. This routine will set the
block number for the specified insn, and look backwards in the insn block number for the specified insn, and look backwards in the insn
chain to see if there are any other uninitialized insns immediately chain to see if there are any other uninitialized insns immediately
previous to this one, and set the block number for them too. */ previous to this one, and set the block number for them too. */
void void
...@@ -8287,14 +8287,14 @@ set_block_for_new_insns (insn, bb) ...@@ -8287,14 +8287,14 @@ set_block_for_new_insns (insn, bb)
{ {
set_block_for_insn (insn, bb); set_block_for_insn (insn, bb);
/* Scan the previous instructions setting the block number until we find /* Scan the previous instructions setting the block number until we find
an instruction that has the block number set, or we find a note an instruction that has the block number set, or we find a note
of any kind. */ of any kind. */
for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn)) for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
{ {
if (GET_CODE (insn) == NOTE) if (GET_CODE (insn) == NOTE)
break; break;
if (INSN_UID (insn) >= basic_block_for_insn->num_elements if ((unsigned) INSN_UID (insn) >= basic_block_for_insn->num_elements
|| BLOCK_FOR_INSN (insn) == 0) || BLOCK_FOR_INSN (insn) == 0)
set_block_for_insn (insn, bb); set_block_for_insn (insn, bb);
else else
...@@ -8556,7 +8556,7 @@ verify_flow_info () ...@@ -8556,7 +8556,7 @@ verify_flow_info ()
num_bb_notes++; num_bb_notes++;
if (bb->index != last_bb_num_seen + 1) if (bb->index != last_bb_num_seen + 1)
internal_error ("Basic blocks not numbered consecutively."); internal_error ("Basic blocks not numbered consecutively.");
last_bb_num_seen = bb->index; last_bb_num_seen = bb->index;
} }
...@@ -9891,13 +9891,13 @@ flow_loop_scan (loops, loop, flags) ...@@ -9891,13 +9891,13 @@ flow_loop_scan (loops, loop, flags)
for (j = 0; j < loop->num_exits; j++) for (j = 0; j < loop->num_exits; j++)
sbitmap_a_and_b (loop->exits_doms, loop->exits_doms, sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
loops->cfg.dom[loop->exit_edges[j]->src->index]); loops->cfg.dom[loop->exit_edges[j]->src->index]);
/* The header of a natural loop must dominate /* The header of a natural loop must dominate
all exits. */ all exits. */
if (! TEST_BIT (loop->exits_doms, loop->header->index)) if (! TEST_BIT (loop->exits_doms, loop->header->index))
abort (); abort ();
} }
if (flags & LOOP_PRE_HEADER) if (flags & LOOP_PRE_HEADER)
{ {
/* Look to see if the loop has a pre-header node. */ /* Look to see if the loop has a pre-header node. */
...@@ -10301,7 +10301,7 @@ purge_dead_edges (bb) ...@@ -10301,7 +10301,7 @@ purge_dead_edges (bb)
} }
/* Search all basic blocks for potentionally dead edges and purge them. /* Search all basic blocks for potentionally dead edges and purge them.
Return true ifif some edge has been elliminated. Return true ifif some edge has been elliminated.
*/ */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment