Commit e9d03821 by Jeff Law Committed by Jeff Law

reorg.c (stop_search_p): Handle DEBUG_INSN.

	* reorg.c (stop_search_p): Handle DEBUG_INSN.
	(redundant_insn, fill_simple_delay_slots): Likewise.
	(fill_slots_from_thread): Likewise.
	* resource.c (mark_referenced_resources): Likewise.
	(mark_set_resources, find_dead_or_set_registers): Likewise.

From-SVN: r258158
parent 2f03003d
2018-03-02 Jeff Law <law@redhat.com>
* reorg.c (stop_search_p): Handle DEBUG_INSN.
(redundant_insn, fill_simple_delay_slots): Likewise.
(fill_slots_from_thread): Likewise.
* resource.c (mark_referenced_resources): Likewise.
(mark_set_resources, find_dead_or_set_registers): Likewise.
2018-03-02 Jakub Jelinek <jakub@redhat.com> 2018-03-02 Jakub Jelinek <jakub@redhat.com>
* substring-locations.h (format_warning_va): Formatting fix for * substring-locations.h (format_warning_va): Formatting fix for
......
...@@ -276,6 +276,7 @@ stop_search_p (rtx_insn *insn, int labels_p) ...@@ -276,6 +276,7 @@ stop_search_p (rtx_insn *insn, int labels_p)
{ {
case NOTE: case NOTE:
case CALL_INSN: case CALL_INSN:
case DEBUG_INSN:
return 0; return 0;
case CODE_LABEL: case CODE_LABEL:
...@@ -1493,6 +1494,9 @@ redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list) ...@@ -1493,6 +1494,9 @@ redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list)
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue; continue;
if (GET_CODE (trial) == DEBUG_INSN)
continue;
if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat)) if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
{ {
/* Stop for a CALL and its delay slots because it is difficult to /* Stop for a CALL and its delay slots because it is difficult to
...@@ -1588,6 +1592,9 @@ redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list) ...@@ -1588,6 +1592,9 @@ redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list)
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue; continue;
if (GET_CODE (trial) == DEBUG_INSN)
continue;
if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat)) if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
{ {
bool annul_p = false; bool annul_p = false;
...@@ -2020,6 +2027,10 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2020,6 +2027,10 @@ fill_simple_delay_slots (int non_jumps_p)
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue; continue;
/* And DEBUG_INSNs never go into delay slots. */
if (GET_CODE (trial) == DEBUG_INSN)
continue;
/* Check for resource conflict first, to avoid unnecessary /* Check for resource conflict first, to avoid unnecessary
splitting. */ splitting. */
if (! insn_references_resource_p (trial, &set, true) if (! insn_references_resource_p (trial, &set, true)
...@@ -2142,6 +2153,10 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2142,6 +2153,10 @@ fill_simple_delay_slots (int non_jumps_p)
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue; continue;
/* And DEBUG_INSNs do not go in delay slots. */
if (GET_CODE (trial) == DEBUG_INSN)
continue;
/* If this already has filled delay slots, get the insn needing /* If this already has filled delay slots, get the insn needing
the delay slots. */ the delay slots. */
if (GET_CODE (pat) == SEQUENCE) if (GET_CODE (pat) == SEQUENCE)
...@@ -2211,8 +2226,8 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2211,8 +2226,8 @@ fill_simple_delay_slots (int non_jumps_p)
&& ! can_throw_internal (trial)) && ! can_throw_internal (trial))
{ {
/* See comment in relax_delay_slots about necessity of using /* See comment in relax_delay_slots about necessity of using
next_real_insn here. */ next_real_nondebug_insn here. */
rtx_insn *new_label = next_real_insn (next_trial); rtx_insn *new_label = next_real_nondebug_insn (next_trial);
if (new_label != 0) if (new_label != 0)
new_label = get_label_before (new_label, JUMP_LABEL (trial)); new_label = get_label_before (new_label, JUMP_LABEL (trial));
...@@ -2406,6 +2421,9 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2406,6 +2421,9 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue; continue;
if (GET_CODE (trial) == DEBUG_INSN)
continue;
/* If TRIAL conflicts with the insns ahead of it, we lose. Also, /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
don't separate or copy insns that set and use CC0. */ don't separate or copy insns that set and use CC0. */
if (! insn_references_resource_p (trial, &set, true) if (! insn_references_resource_p (trial, &set, true)
...@@ -3309,10 +3327,10 @@ relax_delay_slots (rtx_insn *first) ...@@ -3309,10 +3327,10 @@ relax_delay_slots (rtx_insn *first)
/* If the first insn at TARGET_LABEL is redundant with a previous /* If the first insn at TARGET_LABEL is redundant with a previous
insn, redirect the jump to the following insn and process again. insn, redirect the jump to the following insn and process again.
We use next_real_insn instead of next_active_insn so we We use next_real_nondebug_insn instead of next_active_insn so we
don't skip USE-markers, or we'll end up with incorrect don't skip USE-markers, or we'll end up with incorrect
liveness info. */ liveness info. */
trial = next_real_insn (target_label); trial = next_real_nondebug_insn (target_label);
if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
&& redundant_insn (trial, insn, vNULL) && redundant_insn (trial, insn, vNULL)
&& ! can_throw_internal (trial)) && ! can_throw_internal (trial))
...@@ -3327,7 +3345,7 @@ relax_delay_slots (rtx_insn *first) ...@@ -3327,7 +3345,7 @@ relax_delay_slots (rtx_insn *first)
{ {
/* Insert the special USE insn and update dataflow info. /* Insert the special USE insn and update dataflow info.
We know "trial" is an insn here as it is the output of We know "trial" is an insn here as it is the output of
next_real_insn () above. */ next_real_nondebug_insn () above. */
update_block (as_a <rtx_insn *> (trial), tmp); update_block (as_a <rtx_insn *> (trial), tmp);
/* Now emit a label before the special USE insn, and /* Now emit a label before the special USE insn, and
......
...@@ -212,6 +212,7 @@ mark_referenced_resources (rtx x, struct resources *res, ...@@ -212,6 +212,7 @@ mark_referenced_resources (rtx x, struct resources *res,
case PC: case PC:
case SYMBOL_REF: case SYMBOL_REF:
case LABEL_REF: case LABEL_REF:
case DEBUG_INSN:
return; return;
case SUBREG: case SUBREG:
...@@ -451,6 +452,7 @@ find_dead_or_set_registers (rtx_insn *target, struct resources *res, ...@@ -451,6 +452,7 @@ find_dead_or_set_registers (rtx_insn *target, struct resources *res,
case BARRIER: case BARRIER:
case NOTE: case NOTE:
case DEBUG_INSN:
continue; continue;
case INSN: case INSN:
...@@ -639,6 +641,7 @@ mark_set_resources (rtx x, struct resources *res, int in_dest, ...@@ -639,6 +641,7 @@ mark_set_resources (rtx x, struct resources *res, int in_dest,
case SYMBOL_REF: case SYMBOL_REF:
case CONST: case CONST:
case PC: case PC:
case DEBUG_INSN:
/* These don't set any resources. */ /* These don't set any resources. */
return; return;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment