jump.c 48.7 KB
Newer Older
Richard Kenner committed
1
/* Optimize jump instructions, for GNU compiler.
Jeff Law committed
2
   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 4
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010,
   2011 Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
21

Jan Hubicka committed
22
/* This is the pathetic reminder of old fame of the jump-optimization pass
23
   of the compiler.  Now it contains basically a set of utility functions to
Jan Hubicka committed
24
   operate with jumps.
Richard Kenner committed
25 26 27 28 29 30 31

   Each CODE_LABEL has a count of the times it is used
   stored in the LABEL_NUSES internal field, and each JUMP_INSN
   has one label that it refers to stored in the
   JUMP_LABEL internal field.  With this we can detect labels that
   become unused because of the deletion of all the jumps that
   formerly used them.  The JUMP_LABEL info is sometimes looked
32 33
   at by later passes.  For return insns, it contains either a
   RETURN or a SIMPLE_RETURN rtx.
Richard Kenner committed
34

35
   The subroutines redirect_jump and invert_jump are used
Richard Kenner committed
36 37 38
   from other passes as well.  */

#include "config.h"
39
#include "system.h"
40 41
#include "coretypes.h"
#include "tm.h"
Richard Kenner committed
42
#include "rtl.h"
43
#include "tm_p.h"
Richard Kenner committed
44 45 46 47
#include "flags.h"
#include "hard-reg-set.h"
#include "regs.h"
#include "insn-config.h"
48
#include "insn-attr.h"
49
#include "recog.h"
50
#include "function.h"
51
#include "basic-block.h"
52
#include "expr.h"
Mike Stump committed
53
#include "except.h"
Joseph Myers committed
54
#include "diagnostic-core.h"
55
#include "reload.h"
Jan Hubicka committed
56
#include "predict.h"
57
#include "tree-pass.h"
58
#include "target.h"
Richard Kenner committed
59 60 61 62 63 64 65 66

/* Optimize jump y; x: ... y: jumpif... x?
   Don't know if it is worth bothering with.  */
/* Optimize two cases of conditional jump to conditional jump?
   This can never delete any instruction or make anything dead,
   or even change what is live at any point.
   So perhaps let combiner do it.  */

67 68
static void init_label_info (rtx);
static void mark_all_labels (rtx);
69
static void mark_jump_label_1 (rtx, rtx, bool, bool);
70
static void mark_jump_label_asm (rtx, rtx);
71
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
72
static int invert_exp_1 (rtx, rtx);
73
static int returnjump_p_1 (rtx *, void *);
74

75 76 77
/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
static void
rebuild_jump_labels_1 (rtx f, bool count_forced)
78
{
79
  rtx insn;
Richard Kenner committed
80

81
  timevar_push (TV_REBUILD_JUMP);
82
  init_label_info (f);
83
  mark_all_labels (f);
Richard Kenner committed
84

85 86 87
  /* Keep track of labels used from static data; we don't track them
     closely enough to delete them here, so make sure their reference
     count doesn't drop to zero.  */
Richard Kenner committed
88

89 90 91 92
  if (count_forced)
    for (insn = forced_labels; insn; insn = XEXP (insn, 1))
      if (LABEL_P (XEXP (insn, 0)))
	LABEL_NUSES (XEXP (insn, 0))++;
93
  timevar_pop (TV_REBUILD_JUMP);
Jan Hubicka committed
94
}
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113

/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
   instructions and jumping insns that have labels as operands
   (e.g. cbranchsi4).  */
void
rebuild_jump_labels (rtx f)
{
  rebuild_jump_labels_1 (f, true);
}

/* This function is like rebuild_jump_labels, but doesn't run over
   forced_labels.  It can be used on insn chains that aren't the 
   main function chain.  */
void
rebuild_jump_labels_chain (rtx chain)
{
  rebuild_jump_labels_1 (chain, false);
}
Jan Hubicka committed
114

115 116 117 118 119 120 121 122
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
   non-fallthru insn.  This is not generally true, as multiple barriers
   may have crept in, or the BARRIER may be separated from the last
   real insn by one or more NOTEs.

   This simple pass moves barriers and removes duplicates so that the
   old code is happy.
 */
123
unsigned int
124
cleanup_barriers (void)
125 126 127 128 129
{
  rtx insn, next, prev;
  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);
130
      if (BARRIER_P (insn))
131 132
	{
	  prev = prev_nonnote_insn (insn);
133 134
	  if (!prev)
	    continue;
135
	  if (BARRIER_P (prev))
136
	    delete_insn (insn);
137 138 139 140
	  else if (prev != PREV_INSN (insn))
	    reorder_insns (insn, insn, prev);
	}
    }
141
  return 0;
142
}
Richard Kenner committed
143

144
struct rtl_opt_pass pass_cleanup_barriers =
145
{
146 147
 {
  RTL_PASS,
148
  "barriers",                           /* name */
149 150 151 152 153
  NULL,                                 /* gate */
  cleanup_barriers,                     /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
154
  TV_NONE,                              /* tv_id */
155 156 157 158
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
159
  0                                     /* todo_flags_finish */
160
 }
161 162
};

163

164 165 166 167
/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
   notes whose labels don't occur in the insn any more.  */

168
static void
169
init_label_info (rtx f)
170 171 172 173
{
  rtx insn;

  for (insn = f; insn; insn = NEXT_INSN (insn))
174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
    {
      if (LABEL_P (insn))
	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);

      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
	 sticky and not reset here; that way we won't lose association
	 with a label when e.g. the source for a target register
	 disappears out of reach for targets that may use jump-target
	 registers.  Jump transformations are supposed to transform
	 any REG_LABEL_TARGET notes.  The target label reference in a
	 branch may disappear from the branch (and from the
	 instruction before it) for other reasons, like register
	 allocation.  */

      if (INSN_P (insn))
	{
	  rtx note, next;
191

192 193 194 195 196 197 198 199 200
	  for (note = REG_NOTES (insn); note; note = next)
	    {
	      next = XEXP (note, 1);
	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
		remove_note (insn, note);
	    }
	}
    }
201 202
}

203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250
/* A subroutine of mark_all_labels.  Trivially propagate a simple label
   load into a jump_insn that uses it.  */

static void
maybe_propagate_label_ref (rtx jump_insn, rtx prev_nonjump_insn)
{
  rtx label_note, pc, pc_src;

  pc = pc_set (jump_insn);
  pc_src = pc != NULL ? SET_SRC (pc) : NULL;
  label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);

  /* If the previous non-jump insn sets something to a label,
     something that this jump insn uses, make that label the primary
     target of this insn if we don't yet have any.  That previous
     insn must be a single_set and not refer to more than one label.
     The jump insn must not refer to other labels as jump targets
     and must be a plain (set (pc) ...), maybe in a parallel, and
     may refer to the item being set only directly or as one of the
     arms in an IF_THEN_ELSE.  */

  if (label_note != NULL && pc_src != NULL)
    {
      rtx label_set = single_set (prev_nonjump_insn);
      rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;

      if (label_set != NULL
	  /* The source must be the direct LABEL_REF, not a
	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
	  && (rtx_equal_p (label_dest, pc_src)
	      || (GET_CODE (pc_src) == IF_THEN_ELSE
		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
	{
	  /* The CODE_LABEL referred to in the note must be the
	     CODE_LABEL in the LABEL_REF of the "set".  We can
	     conveniently use it for the marker function, which
	     requires a LABEL_REF wrapping.  */
	  gcc_assert (XEXP (label_note, 0) == XEXP (SET_SRC (label_set), 0));

	  mark_jump_label_1 (label_set, jump_insn, false, true);

	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
	}
    }
}

251
/* Mark the label each jump jumps to.
Jan Hubicka committed
252
   Combine consecutive labels, and count uses of labels.  */
253 254

static void
255
mark_all_labels (rtx f)
256 257 258
{
  rtx insn;

259 260 261 262 263
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    {
      basic_block bb;
      FOR_EACH_BB (bb)
	{
264 265 266 267 268 269 270 271 272 273 274 275 276
	  /* In cfglayout mode, we don't bother with trivial next-insn
	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
	     handled by other optimizers using better algorithms.  */
	  FOR_BB_INSNS (bb, insn)
	    {
	      gcc_assert (! INSN_DELETED_P (insn));
	      if (NONDEBUG_INSN_P (insn))
	        mark_jump_label (PATTERN (insn), insn, 0);
	    }

	  /* In cfglayout mode, there may be non-insns between the
	     basic blocks.  If those non-insns represent tablejump data,
	     they contain label references that we must record.  */
277
	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
278 279 280 281 282
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
283
	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
284 285 286 287 288 289 290
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	}
    }
291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312
  else
    {
      rtx prev_nonjump_insn = NULL;
      for (insn = f; insn; insn = NEXT_INSN (insn))
	{
	  if (INSN_DELETED_P (insn))
	    ;
	  else if (LABEL_P (insn))
	    prev_nonjump_insn = NULL;
	  else if (NONDEBUG_INSN_P (insn))
	    {
	      mark_jump_label (PATTERN (insn), insn, 0);
	      if (JUMP_P (insn))
		{
		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
		}
	      else
		prev_nonjump_insn = insn;
	    }
	}
    }
313
}
Richard Kenner committed
314

315
/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
316 317 318 319 320 321
   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
   know whether it's source is floating point or integer comparison.  Machine
   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
   to help this function avoid overhead in these cases.  */
enum rtx_code
322 323
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
				const_rtx arg1, const_rtx insn)
Richard Kenner committed
324
{
325
  enum machine_mode mode;
Richard Kenner committed
326 327

  /* If this is not actually a comparison, we can't reverse it.  */
328 329
  if (GET_RTX_CLASS (code) != RTX_COMPARE
      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
330 331 332 333 334 335
    return UNKNOWN;

  mode = GET_MODE (arg0);
  if (mode == VOIDmode)
    mode = GET_MODE (arg1);

336 337 338
  /* First see if machine description supplies us way to reverse the
     comparison.  Give it priority over everything else to allow
     machine description to do tricks.  */
339
  if (GET_MODE_CLASS (mode) == MODE_CC
340 341 342
      && REVERSIBLE_CC_MODE (mode))
    {
#ifdef REVERSE_CONDITION
Kazu Hirata committed
343
      return REVERSE_CONDITION (code, mode);
344
#else
Kazu Hirata committed
345
      return reverse_condition (code);
346
#endif
Kazu Hirata committed
347
    }
Richard Kenner committed
348

349
  /* Try a few special cases based on the comparison code.  */
350 351
  switch (code)
    {
Kazu Hirata committed
352 353 354 355 356 357 358
    case GEU:
    case GTU:
    case LEU:
    case LTU:
    case NE:
    case EQ:
      /* It is always safe to reverse EQ and NE, even for the floating
359
	 point.  Similarly the unsigned comparisons are never used for
Kazu Hirata committed
360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376
	 floating point so we can reverse them in the default way.  */
      return reverse_condition (code);
    case ORDERED:
    case UNORDERED:
    case LTGT:
    case UNEQ:
      /* In case we already see unordered comparison, we can be sure to
	 be dealing with floating point so we don't need any more tests.  */
      return reverse_condition_maybe_unordered (code);
    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
      /* We don't have safe way to reverse these yet.  */
      return UNKNOWN;
    default:
      break;
377 378
    }

379
  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
Richard Kenner committed
380
    {
381
      const_rtx prev;
382 383 384 385
      /* Try to search for the comparison to determine the real mode.
         This code is expensive, but with sane machine description it
         will be never used, since REVERSIBLE_CC_MODE will return true
         in all cases.  */
386
      if (! insn)
387
	return UNKNOWN;
Kazu Hirata committed
388

389
      /* These CONST_CAST's are okay because prev_nonnote_insn just
390
	 returns its argument and we assign it to a const_rtx
391
	 variable.  */
392
      for (prev = prev_nonnote_insn (CONST_CAST_RTX(insn));
393
	   prev != 0 && !LABEL_P (prev);
394
	   prev = prev_nonnote_insn (CONST_CAST_RTX(prev)))
395
	{
396
	  const_rtx set = set_of (arg0, prev);
397 398 399 400
	  if (set && GET_CODE (set) == SET
	      && rtx_equal_p (SET_DEST (set), arg0))
	    {
	      rtx src = SET_SRC (set);
Richard Kenner committed
401

402 403 404 405 406 407 408 409 410
	      if (GET_CODE (src) == COMPARE)
		{
		  rtx comparison = src;
		  arg0 = XEXP (src, 0);
		  mode = GET_MODE (arg0);
		  if (mode == VOIDmode)
		    mode = GET_MODE (XEXP (comparison, 1));
		  break;
		}
411
	      /* We can get past reg-reg moves.  This may be useful for model
412 413 414 415 416 417 418 419 420 421 422 423
	         of i387 comparisons that first move flag registers around.  */
	      if (REG_P (src))
		{
		  arg0 = src;
		  continue;
		}
	    }
	  /* If register is clobbered in some ununderstandable way,
	     give up.  */
	  if (set)
	    return UNKNOWN;
	}
Richard Kenner committed
424 425
    }

426 427
  /* Test for an integer condition, or a floating-point comparison
     in which NaNs can be ignored.  */
Shujing Zhao committed
428
  if (CONST_INT_P (arg0)
429 430
      || (GET_MODE (arg0) != VOIDmode
	  && GET_MODE_CLASS (mode) != MODE_CC
431
	  && !HONOR_NANS (mode)))
432 433 434 435 436
    return reverse_condition (code);

  return UNKNOWN;
}

Kazu Hirata committed
437
/* A wrapper around the previous function to take COMPARISON as rtx
438 439
   expression.  This simplifies many callers.  */
enum rtx_code
440
reversed_comparison_code (const_rtx comparison, const_rtx insn)
441
{
442
  if (!COMPARISON_P (comparison))
443 444 445 446 447
    return UNKNOWN;
  return reversed_comparison_code_parts (GET_CODE (comparison),
					 XEXP (comparison, 0),
					 XEXP (comparison, 1), insn);
}
448 449 450 451

/* Return comparison with reversed code of EXP.
   Return NULL_RTX in case we fail to do the reversal.  */
rtx
452
reversed_comparison (const_rtx exp, enum machine_mode mode)
453 454 455 456 457 458 459 460 461
{
  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
  if (reversed_code == UNKNOWN)
    return NULL_RTX;
  else
    return simplify_gen_relational (reversed_code, mode, VOIDmode,
                                    XEXP (exp, 0), XEXP (exp, 1));
}

462

463 464 465 466 467
/* Given an rtx-code for a comparison, return the code for the negated
   comparison.  If no such code exists, return UNKNOWN.

   WATCH OUT!  reverse_condition is not safe to use on a jump that might
   be acting on the results of an IEEE floating point comparison, because
Kazu Hirata committed
468
   of the special treatment of non-signaling nans in comparisons.
469
   Use reversed_comparison_code instead.  */
Richard Kenner committed
470 471

enum rtx_code
472
reverse_condition (enum rtx_code code)
Richard Kenner committed
473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return LE;
    case GE:
      return LT;
    case LT:
      return GE;
    case LE:
      return GT;
    case GTU:
      return LEU;
    case GEU:
      return LTU;
    case LTU:
      return GEU;
    case LEU:
      return GTU;
496 497 498 499 500 501 502 503 504 505
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;

    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
    case UNEQ:
506
    case LTGT:
507
      return UNKNOWN;
Richard Kenner committed
508 509

    default:
510
      gcc_unreachable ();
Richard Kenner committed
511 512 513
    }
}

514 515 516 517 518
/* Similar, but we're allowed to generate unordered comparisons, which
   makes it safe for IEEE floating-point.  Of course, we have to recognize
   that the target will support them too...  */

enum rtx_code
519
reverse_condition_maybe_unordered (enum rtx_code code)
520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return UNLE;
    case GE:
      return UNLT;
    case LT:
      return UNGE;
    case LE:
      return UNGT;
    case LTGT:
      return UNEQ;
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;
    case UNLT:
      return GE;
    case UNLE:
      return GT;
    case UNGT:
      return LE;
    case UNGE:
      return LT;
    case UNEQ:
      return LTGT;

    default:
553
      gcc_unreachable ();
554 555 556
    }
}

Richard Kenner committed
557 558 559 560
/* Similar, but return the code when two operands of a comparison are swapped.
   This IS safe for IEEE floating-point.  */

enum rtx_code
561
swap_condition (enum rtx_code code)
Richard Kenner committed
562 563 564 565 566
{
  switch (code)
    {
    case EQ:
    case NE:
567 568 569
    case UNORDERED:
    case ORDERED:
    case UNEQ:
570
    case LTGT:
Richard Kenner committed
571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588
      return code;

    case GT:
      return LT;
    case GE:
      return LE;
    case LT:
      return GT;
    case LE:
      return GE;
    case GTU:
      return LTU;
    case GEU:
      return LEU;
    case LTU:
      return GTU;
    case LEU:
      return GEU;
589 590 591 592 593 594 595 596 597
    case UNLT:
      return UNGT;
    case UNLE:
      return UNGE;
    case UNGT:
      return UNLT;
    case UNGE:
      return UNLE;

Richard Kenner committed
598
    default:
599
      gcc_unreachable ();
Richard Kenner committed
600 601 602 603 604 605 606 607
    }
}

/* Given a comparison CODE, return the corresponding unsigned comparison.
   If CODE is an equality comparison or already an unsigned comparison,
   CODE is returned.  */

enum rtx_code
608
unsigned_condition (enum rtx_code code)
Richard Kenner committed
609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629
{
  switch (code)
    {
    case EQ:
    case NE:
    case GTU:
    case GEU:
    case LTU:
    case LEU:
      return code;

    case GT:
      return GTU;
    case GE:
      return GEU;
    case LT:
      return LTU;
    case LE:
      return LEU;

    default:
630
      gcc_unreachable ();
Richard Kenner committed
631 632 633 634 635 636
    }
}

/* Similarly, return the signed version of a comparison.  */

enum rtx_code
637
signed_condition (enum rtx_code code)
Richard Kenner committed
638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658
{
  switch (code)
    {
    case EQ:
    case NE:
    case GT:
    case GE:
    case LT:
    case LE:
      return code;

    case GTU:
      return GT;
    case GEU:
      return GE;
    case LTU:
      return LT;
    case LEU:
      return LE;

    default:
659
      gcc_unreachable ();
Richard Kenner committed
660 661 662
    }
}

663
/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
Richard Kenner committed
664 665 666
   truth of CODE1 implies the truth of CODE2.  */

int
667
comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
Richard Kenner committed
668
{
669 670 671 672 673 674
  /* UNKNOWN comparison codes can happen as a result of trying to revert
     comparison codes.
     They can't match anything, so we have to reject them here.  */
  if (code1 == UNKNOWN || code2 == UNKNOWN)
    return 0;

Richard Kenner committed
675 676 677 678 679
  if (code1 == code2)
    return 1;

  switch (code1)
    {
680 681 682 683 684
    case UNEQ:
      if (code2 == UNLE || code2 == UNGE)
	return 1;
      break;

Richard Kenner committed
685
    case EQ:
686 687
      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
	  || code2 == ORDERED)
Richard Kenner committed
688 689 690
	return 1;
      break;

691 692 693 694 695
    case UNLT:
      if (code2 == UNLE || code2 == NE)
	return 1;
      break;

Richard Kenner committed
696
    case LT:
697 698 699 700 701 702
      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
	return 1;
      break;

    case UNGT:
      if (code2 == UNGE || code2 == NE)
Richard Kenner committed
703 704 705 706
	return 1;
      break;

    case GT:
707
      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
708 709 710 711 712 713 714 715 716 717 718
	return 1;
      break;

    case GE:
    case LE:
      if (code2 == ORDERED)
	return 1;
      break;

    case LTGT:
      if (code2 == NE || code2 == ORDERED)
Richard Kenner committed
719 720 721 722
	return 1;
      break;

    case LTU:
723
      if (code2 == LEU || code2 == NE)
Richard Kenner committed
724 725 726 727
	return 1;
      break;

    case GTU:
728
      if (code2 == GEU || code2 == NE)
Richard Kenner committed
729 730
	return 1;
      break;
731 732

    case UNORDERED:
733 734
      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
	  || code2 == UNGE || code2 == UNGT)
735 736
	return 1;
      break;
Kazu Hirata committed
737

738 739
    default:
      break;
Richard Kenner committed
740 741 742 743 744 745 746 747
    }

  return 0;
}

/* Return 1 if INSN is an unconditional jump and nothing else.  */

int
748
simplejump_p (const_rtx insn)
Richard Kenner committed
749
{
750
  return (JUMP_P (insn)
751 752 753
	  && GET_CODE (PATTERN (insn)) == SET
	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
Richard Kenner committed
754 755 756
}

/* Return nonzero if INSN is a (possibly) conditional jump
Kazu Hirata committed
757 758
   and nothing more.

759
   Use of this function is deprecated, since we need to support combined
760
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
Richard Kenner committed
761 762

int
763
condjump_p (const_rtx insn)
Richard Kenner committed
764
{
765
  const_rtx x = PATTERN (insn);
Jeff Law committed
766 767 768

  if (GET_CODE (x) != SET
      || GET_CODE (SET_DEST (x)) != PC)
Richard Kenner committed
769
    return 0;
Jeff Law committed
770 771 772

  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
773
    return 1;
Kazu Hirata committed
774 775 776 777
  else
    return (GET_CODE (x) == IF_THEN_ELSE
	    && ((GET_CODE (XEXP (x, 2)) == PC
		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
778
		     || ANY_RETURN_P (XEXP (x, 1))))
Kazu Hirata committed
779 780
		|| (GET_CODE (XEXP (x, 1)) == PC
		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
781
			|| ANY_RETURN_P (XEXP (x, 2))))));
782 783
}

Jeff Law committed
784
/* Return nonzero if INSN is a (possibly) conditional jump inside a
785
   PARALLEL.
Kazu Hirata committed
786

787 788
   Use this function is deprecated, since we need to support combined
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
789 790

int
791
condjump_in_parallel_p (const_rtx insn)
792
{
793
  const_rtx x = PATTERN (insn);
794 795 796 797 798 799 800 801 802 803 804 805 806 807 808

  if (GET_CODE (x) != PARALLEL)
    return 0;
  else
    x = XVECEXP (x, 0, 0);

  if (GET_CODE (x) != SET)
    return 0;
  if (GET_CODE (SET_DEST (x)) != PC)
    return 0;
  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
    return 1;
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
  if (XEXP (SET_SRC (x), 2) == pc_rtx
Richard Kenner committed
809
      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
810
	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
Richard Kenner committed
811 812 813
    return 1;
  if (XEXP (SET_SRC (x), 1) == pc_rtx
      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
814
	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
Richard Kenner committed
815 816 817 818
    return 1;
  return 0;
}

819 820
/* Return set of PC, otherwise NULL.  */

821
rtx
822
pc_set (const_rtx insn)
823 824
{
  rtx pat;
825
  if (!JUMP_P (insn))
826
    return NULL_RTX;
827
  pat = PATTERN (insn);
828 829 830 831 832

  /* The set is allowed to appear either as the insn pattern or
     the first set in a PARALLEL.  */
  if (GET_CODE (pat) == PARALLEL)
    pat = XVECEXP (pat, 0, 0);
833 834
  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
    return pat;
835 836

  return NULL_RTX;
837 838
}

839 840 841
/* Return true when insn is an unconditional direct jump,
   possibly bundled inside a PARALLEL.  */

842
int
843
any_uncondjump_p (const_rtx insn)
844
{
845
  const_rtx x = pc_set (insn);
846 847 848 849
  if (!x)
    return 0;
  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
    return 0;
850 851
  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
    return 0;
852 853 854
  return 1;
}

855
/* Return true when insn is a conditional jump.  This function works for
856 857
   instructions containing PC sets in PARALLELs.  The instruction may have
   various other effects so before removing the jump you must verify
858
   onlyjump_p.
859

860 861
   Note that unlike condjump_p it returns false for unconditional jumps.  */

862
int
863
any_condjump_p (const_rtx insn)
864
{
865
  const_rtx x = pc_set (insn);
866 867
  enum rtx_code a, b;

868 869
  if (!x)
    return 0;
870 871
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
872

873 874
  a = GET_CODE (XEXP (SET_SRC (x), 1));
  b = GET_CODE (XEXP (SET_SRC (x), 2));
875

876 877 878
  return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
	  || (a == PC
	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
879 880
}

881 882 883
/* Return the label of a conditional jump.  */

rtx
884
condjump_label (const_rtx insn)
885
{
886
  rtx x = pc_set (insn);
887

888
  if (!x)
889 890 891 892 893 894 895 896 897 898 899 900 901
    return NULL_RTX;
  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
    return x;
  if (GET_CODE (x) != IF_THEN_ELSE)
    return NULL_RTX;
  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
    return XEXP (x, 1);
  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
    return XEXP (x, 2);
  return NULL_RTX;
}

902 903 904
/* Return true if INSN is a (possibly conditional) return insn.  */

static int
905
returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
906 907
{
  rtx x = *loc;
908

909 910 911 912 913 914
  if (x == NULL)
    return false;

  switch (GET_CODE (x))
    {
    case RETURN:
915
    case SIMPLE_RETURN:
916 917 918 919 920 921 922 923 924
    case EH_RETURN:
      return true;

    case SET:
      return SET_IS_RETURN_P (x);

    default:
      return false;
    }
925 926
}

927 928
/* Return TRUE if INSN is a return jump.  */

929
int
930
returnjump_p (rtx insn)
931
{
932
  if (!JUMP_P (insn))
933
    return 0;
934 935 936
  return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
}

937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952
/* Return true if INSN is a (possibly conditional) return insn.  */

static int
eh_returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
{
  return *loc && GET_CODE (*loc) == EH_RETURN;
}

int
eh_returnjump_p (rtx insn)
{
  if (!JUMP_P (insn))
    return 0;
  return for_each_rtx (&PATTERN (insn), eh_returnjump_p_1, NULL);
}

953 954 955 956
/* Return true if INSN is a jump that only transfers control and
   nothing more.  */

int
957
onlyjump_p (const_rtx insn)
958 959 960
{
  rtx set;

961
  if (!JUMP_P (insn))
962 963 964 965 966 967 968 969 970 971 972 973 974
    return 0;

  set = single_set (insn);
  if (set == NULL)
    return 0;
  if (GET_CODE (SET_DEST (set)) != PC)
    return 0;
  if (side_effects_p (SET_SRC (set)))
    return 0;

  return 1;
}

975 976 977 978 979 980 981 982 983
/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
   NULL or a return.  */
bool
jump_to_label_p (rtx insn)
{
  return (JUMP_P (insn)
	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
}

984 985
#ifdef HAVE_cc0

986
/* Return nonzero if X is an RTX that only sets the condition codes
987 988 989
   and has no side effects.  */

int
990
only_sets_cc0_p (const_rtx x)
991 992 993 994 995 996 997 998 999 1000
{
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
}

Richard Kenner committed
1001 1002 1003 1004 1005 1006
/* Return 1 if X is an RTX that does nothing but set the condition codes
   and CLOBBER or USE registers.
   Return -1 if X does explicitly set the condition codes,
   but also does other things.  */

int
1007
sets_cc0_p (const_rtx x)
Richard Kenner committed
1008
{
1009 1010 1011 1012 1013 1014
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

Richard Kenner committed
1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033
  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
    return 1;
  if (GET_CODE (x) == PARALLEL)
    {
      int i;
      int sets_cc0 = 0;
      int other_things = 0;
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
	{
	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
	    sets_cc0 = 1;
	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
	    other_things = 1;
	}
      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
    }
  return 0;
}
1034
#endif
Richard Kenner committed
1035

1036 1037 1038 1039 1040 1041 1042 1043
/* Find all CODE_LABELs referred to in X, and increment their use
   counts.  If INSN is a JUMP_INSN and there is at least one
   CODE_LABEL referenced in INSN as a jump target, then store the last
   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1044
   For returnjumps, the JUMP_LABEL will also be set as appropriate.
Richard Kenner committed
1045 1046 1047 1048 1049

   Note that two labels separated by a loop-beginning note
   must be kept distinct if we have not yet done loop-optimization,
   because the gap between them is where loop-optimize
   will want to move invariant code to.  CROSS_JUMP tells us
1050
   that loop-optimization is done with.  */
Richard Kenner committed
1051

1052
void
1053
mark_jump_label (rtx x, rtx insn, int in_mem)
Richard Kenner committed
1054
{
1055 1056 1057 1058 1059 1060
  rtx asmop = extract_asm_operands (x);
  if (asmop)
    mark_jump_label_asm (asmop, insn);
  else
    mark_jump_label_1 (x, insn, in_mem != 0,
		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1061 1062
}

1063
/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1064 1065 1066 1067 1068 1069 1070 1071
   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
   jump-target; when the JUMP_LABEL field of INSN should be set or a
   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
   note.  */

static void
mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target)
{
1072 1073 1074
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1075 1076 1077 1078 1079 1080 1081 1082 1083 1084

  switch (code)
    {
    case PC:
    case CC0:
    case REG:
    case CLOBBER:
    case CALL:
      return;

1085
    case RETURN:
1086
    case SIMPLE_RETURN:
1087 1088 1089 1090 1091 1092 1093
      if (is_target)
	{
	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
	  JUMP_LABEL (insn) = x;
	}
      return;

1094
    case MEM:
1095
      in_mem = true;
1096 1097
      break;

1098 1099 1100 1101 1102 1103
    case SEQUENCE:
      for (i = 0; i < XVECLEN (x, 0); i++)
	mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
			 XVECEXP (x, 0, i), 0);
      return;

1104 1105
    case SYMBOL_REF:
      if (!in_mem)
Kazu Hirata committed
1106
	return;
1107

1108
      /* If this is a constant-pool reference, see if it is a label.  */
1109
      if (CONSTANT_POOL_ADDRESS_P (x))
1110
	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1111 1112
      break;

1113 1114 1115 1116 1117 1118 1119 1120 1121 1122
      /* Handle operands in the condition of an if-then-else as for a
	 non-jump insn.  */
    case IF_THEN_ELSE:
      if (!is_target)
	break;
      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
      return;

Richard Kenner committed
1123 1124
    case LABEL_REF:
      {
1125 1126
	rtx label = XEXP (x, 0);

1127 1128
	/* Ignore remaining references to unreachable labels that
	   have been deleted.  */
1129
	if (NOTE_P (label)
1130
	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1131 1132
	  break;

1133
	gcc_assert (LABEL_P (label));
1134

Richard Stallman committed
1135 1136 1137
	/* Ignore references to labels of containing functions.  */
	if (LABEL_REF_NONLOCAL_P (x))
	  break;
1138

Richard Kenner committed
1139
	XEXP (x, 0) = label;
1140 1141
	if (! insn || ! INSN_DELETED_P (insn))
	  ++LABEL_NUSES (label);
1142

Richard Kenner committed
1143 1144
	if (insn)
	  {
1145
	    if (is_target
1146 1147 1148
		/* Do not change a previous setting of JUMP_LABEL.  If the
		   JUMP_LABEL slot is occupied by a different label,
		   create a note for this label.  */
1149
		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
Richard Kenner committed
1150
	      JUMP_LABEL (insn) = label;
1151
	    else
1152
	      {
1153 1154 1155 1156 1157 1158 1159 1160
		enum reg_note kind
		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;

		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
		   for LABEL unless there already is one.  All uses of
		   a label, except for the primary target of a jump,
		   must have such a note.  */
		if (! find_reg_note (insn, kind, label))
1161
		  add_reg_note (insn, kind, label);
Richard Kenner committed
1162 1163 1164 1165 1166 1167 1168 1169 1170
	      }
	  }
	return;
      }

  /* Do walk the labels in a vector, but not the first operand of an
     ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
1171 1172 1173
      if (! INSN_DELETED_P (insn))
	{
	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
Richard Kenner committed
1174

1175
	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1176 1177
	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem,
			       is_target);
1178
	}
1179
      return;
Kazu Hirata committed
1180

1181 1182
    default:
      break;
Richard Kenner committed
1183 1184 1185
    }

  fmt = GET_RTX_FORMAT (code);
1186 1187 1188 1189

  /* The primary target of a tablejump is the label of the ADDR_VEC,
     which is canonically mentioned *last* in the insn.  To get it
     marked as JUMP_LABEL, we iterate over items in reverse order.  */
Richard Kenner committed
1190 1191 1192
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1193
	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
Richard Kenner committed
1194 1195
      else if (fmt[i] == 'E')
	{
1196
	  int j;
1197 1198 1199 1200

	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
			       is_target);
Richard Kenner committed
1201 1202 1203 1204
	}
    }
}

1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220
/* Worker function for mark_jump_label.  Handle asm insns specially.
   In particular, output operands need not be considered so we can
   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
   need to be considered targets.  */

static void
mark_jump_label_asm (rtx asmop, rtx insn)
{
  int i;

  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);

  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
}
Richard Kenner committed
1221

1222
/* Delete insn INSN from the chain of insns and update label ref counts
1223
   and delete insns now unreachable.
1224

1225
   Returns the first insn after INSN that was not deleted.
Richard Kenner committed
1226

1227 1228
   Usage of this instruction is deprecated.  Use delete_insn instead and
   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
Richard Kenner committed
1229 1230

rtx
1231
delete_related_insns (rtx insn)
Richard Kenner committed
1232
{
1233
  int was_code_label = (LABEL_P (insn));
1234
  rtx note;
1235
  rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
Richard Kenner committed
1236 1237 1238 1239 1240 1241 1242 1243

  while (next && INSN_DELETED_P (next))
    next = NEXT_INSN (next);

  /* This insn is already deleted => return first following nondeleted.  */
  if (INSN_DELETED_P (insn))
    return next;

1244
  delete_insn (insn);
Richard Kenner committed
1245 1246 1247 1248

  /* If instruction is followed by a barrier,
     delete the barrier too.  */

1249
  if (next != 0 && BARRIER_P (next))
1250
    delete_insn (next);
Richard Kenner committed
1251

1252 1253 1254 1255 1256 1257 1258 1259
  /* If this is a call, then we have to remove the var tracking note
     for the call arguments.  */

  if (CALL_P (insn)
      || (NONJUMP_INSN_P (insn)
	  && GET_CODE (PATTERN (insn)) == SEQUENCE
	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
    {
1260
      rtx p;
1261

1262
      for (p = next && INSN_DELETED_P (next) ? NEXT_INSN (next) : next;
1263 1264 1265 1266 1267 1268 1269 1270 1271
	   p && NOTE_P (p);
	   p = NEXT_INSN (p))
	if (NOTE_KIND (p) == NOTE_INSN_CALL_ARG_LOCATION)
	  {
	    remove_insn (p);
	    break;
	  }
    }

Richard Kenner committed
1272 1273 1274
  /* If deleting a jump, decrement the count of the label,
     and delete the label if it is now unused.  */

1275
  if (jump_to_label_p (insn))
1276 1277 1278
    {
      rtx lab = JUMP_LABEL (insn), lab_next;

1279
      if (LABEL_NUSES (lab) == 0)
1280 1281 1282 1283
	/* This can delete NEXT or PREV,
	   either directly if NEXT is JUMP_LABEL (INSN),
	   or indirectly through more levels of jumps.  */
	delete_related_insns (lab);
1284
      else if (tablejump_p (insn, NULL, &lab_next))
1285 1286
	{
	  /* If we're deleting the tablejump, delete the dispatch table.
1287
	     We may not be able to kill the label immediately preceding
1288 1289
	     just yet, as it might be referenced in code leading up to
	     the tablejump.  */
1290
	  delete_related_insns (lab_next);
1291 1292
	}
    }
Richard Kenner committed
1293

1294 1295
  /* Likewise if we're deleting a dispatch table.  */

Shujing Zhao committed
1296
  if (JUMP_TABLE_DATA_P (insn))
1297 1298 1299 1300 1301 1302
    {
      rtx pat = PATTERN (insn);
      int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
      int len = XVECLEN (pat, diff_vec_p);

      for (i = 0; i < len; i++)
1303 1304
	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1305 1306 1307 1308 1309
      while (next && INSN_DELETED_P (next))
	next = NEXT_INSN (next);
      return next;
    }

1310 1311 1312
  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
  if (INSN_P (insn))
1313
    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1314 1315
      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1316
	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1317
	  && LABEL_P (XEXP (note, 0)))
1318 1319
	if (LABEL_NUSES (XEXP (note, 0)) == 0)
	  delete_related_insns (XEXP (note, 0));
1320

1321
  while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
Richard Kenner committed
1322 1323 1324 1325 1326 1327
    prev = PREV_INSN (prev);

  /* If INSN was a label and a dispatch table follows it,
     delete the dispatch table.  The tablejump must have gone already.
     It isn't useful to fall through into a table.  */

1328
  if (was_code_label
Richard Kenner committed
1329
      && NEXT_INSN (insn) != 0
Shujing Zhao committed
1330
      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1331
    next = delete_related_insns (NEXT_INSN (insn));
Richard Kenner committed
1332 1333 1334

  /* If INSN was a label, delete insns following it if now unreachable.  */

1335
  if (was_code_label && prev && BARRIER_P (prev))
Richard Kenner committed
1336
    {
1337 1338
      enum rtx_code code;
      while (next)
Richard Kenner committed
1339
	{
1340
	  code = GET_CODE (next);
1341
	  if (code == NOTE)
Richard Kenner committed
1342
	    next = NEXT_INSN (next);
1343 1344 1345
	  /* Keep going past other deleted labels to delete what follows.  */
	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
	    next = NEXT_INSN (next);
1346
	  else if (code == BARRIER || INSN_P (next))
Richard Kenner committed
1347 1348 1349 1350
	    /* Note: if this deletes a jump, it can cause more
	       deletion of unreachable code, after a different label.
	       As long as the value from this recursive call is correct,
	       this invocation functions correctly.  */
1351
	    next = delete_related_insns (next);
1352 1353
	  else
	    break;
Richard Kenner committed
1354 1355 1356
	}
    }

1357 1358 1359 1360 1361 1362
  /* I feel a little doubtful about this loop,
     but I see no clean and sure alternative way
     to find the first insn after INSN that is not now deleted.
     I hope this works.  */
  while (next && INSN_DELETED_P (next))
    next = NEXT_INSN (next);
Richard Kenner committed
1363 1364 1365 1366 1367 1368 1369 1370 1371
  return next;
}

/* Delete a range of insns from FROM to TO, inclusive.
   This is for the sake of peephole optimization, so assume
   that whatever these insns do will still be done by a new
   peephole insn that will replace them.  */

void
1372
delete_for_peephole (rtx from, rtx to)
Richard Kenner committed
1373
{
1374
  rtx insn = from;
Richard Kenner committed
1375 1376 1377

  while (1)
    {
1378 1379
      rtx next = NEXT_INSN (insn);
      rtx prev = PREV_INSN (insn);
Richard Kenner committed
1380

1381
      if (!NOTE_P (insn))
Richard Kenner committed
1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405
	{
	  INSN_DELETED_P (insn) = 1;

	  /* Patch this insn out of the chain.  */
	  /* We don't do this all at once, because we
	     must preserve all NOTEs.  */
	  if (prev)
	    NEXT_INSN (prev) = next;

	  if (next)
	    PREV_INSN (next) = prev;
	}

      if (insn == to)
	break;
      insn = next;
    }

  /* Note that if TO is an unconditional jump
     we *do not* delete the BARRIER that follows,
     since the peephole that replaces this sequence
     is also an unconditional jump in that case.  */
}

1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417
/* A helper function for redirect_exp_1; examines its input X and returns
   either a LABEL_REF around a label, or a RETURN if X was NULL.  */
static rtx
redirect_target (rtx x)
{
  if (x == NULL_RTX)
    return ret_rtx;
  if (!ANY_RETURN_P (x))
    return gen_rtx_LABEL_REF (Pmode, x);
  return x;
}

1418 1419
/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
   NLABEL as a return.  Accrue modifications into the change group.  */
Richard Kenner committed
1420

1421
static void
1422
redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
Richard Kenner committed
1423
{
1424 1425 1426 1427
  rtx x = *loc;
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1428

1429
  if ((code == LABEL_REF && XEXP (x, 0) == olabel)
1430
      || x == olabel)
Richard Kenner committed
1431
    {
1432 1433 1434
      x = redirect_target (nlabel);
      if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1435 1436 1437
      validate_change (insn, loc, x, 1);
      return;
    }
Richard Kenner committed
1438

1439 1440
  if (code == SET && SET_DEST (x) == pc_rtx
      && ANY_RETURN_P (nlabel)
1441 1442 1443
      && GET_CODE (SET_SRC (x)) == LABEL_REF
      && XEXP (SET_SRC (x), 0) == olabel)
    {
1444
      validate_change (insn, loc, nlabel, 1);
1445
      return;
Richard Kenner committed
1446 1447
    }

1448 1449 1450 1451 1452 1453 1454 1455 1456
  if (code == IF_THEN_ELSE)
    {
      /* Skip the condition of an IF_THEN_ELSE.  We only want to
         change jump destinations, not eventual label comparisons.  */
      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
      return;
    }

Richard Kenner committed
1457 1458 1459 1460
  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1461
	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1462
      else if (fmt[i] == 'E')
Richard Kenner committed
1463
	{
1464
	  int j;
Richard Kenner committed
1465
	  for (j = 0; j < XVECLEN (x, i); j++)
1466
	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
Richard Kenner committed
1467 1468
	}
    }
1469
}
Richard Kenner committed
1470

1471 1472 1473 1474 1475
/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
   the modifications into the change group.  Return false if we did
   not see how to do that.  */

int
1476
redirect_jump_1 (rtx jump, rtx nlabel)
1477 1478
{
  int ochanges = num_validated_changes ();
1479
  rtx *loc, asmop;
Jan Hubicka committed
1480

1481
  gcc_assert (nlabel != NULL_RTX);
1482 1483 1484 1485 1486 1487 1488 1489 1490
  asmop = extract_asm_operands (PATTERN (jump));
  if (asmop)
    {
      if (nlabel == NULL)
	return 0;
      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
      loc = &ASM_OPERANDS_LABEL (asmop, 0);
    }
  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
Jan Hubicka committed
1491 1492 1493 1494 1495
    loc = &XVECEXP (PATTERN (jump), 0, 0);
  else
    loc = &PATTERN (jump);

  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1496 1497 1498 1499 1500 1501
  return num_validated_changes () > ochanges;
}

/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
   jump target label is unused as a result, it and the code following
   it may be deleted.
Richard Kenner committed
1502

1503 1504 1505
   Normally, NLABEL will be a label, but it may also be a RETURN rtx;
   in that case we are to turn the jump into a (possibly conditional)
   return insn.
Richard Kenner committed
1506

1507
   The return value will be 1 if the change was made, 0 if it wasn't
1508
   (this can only occur when trying to produce return insns).  */
Richard Kenner committed
1509 1510

int
1511
redirect_jump (rtx jump, rtx nlabel, int delete_unused)
Richard Kenner committed
1512
{
1513
  rtx olabel = JUMP_LABEL (jump);
Richard Kenner committed
1514

1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526
  if (!nlabel)
    {
      /* If there is no label, we are asked to redirect to the EXIT block.
	 When before the epilogue is emitted, return/simple_return cannot be
	 created so we return 0 immediately.  After the epilogue is emitted,
	 we always expect a label, either a non-null label, or a
	 return/simple_return RTX.  */

      if (!epilogue_completed)
	return 0;
      gcc_unreachable ();
    }
1527

Richard Kenner committed
1528 1529 1530
  if (nlabel == olabel)
    return 1;

1531
  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
Richard Kenner committed
1532 1533
    return 0;

1534 1535 1536 1537 1538
  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
  return 1;
}

/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
H.J. Lu committed
1539
   NLABEL in JUMP.
1540 1541 1542 1543 1544 1545 1546 1547
   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
   count has dropped to zero.  */
void
redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
		 int invert)
{
  rtx note;

1548 1549
  gcc_assert (JUMP_LABEL (jump) == olabel);

1550
  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1551 1552 1553
     moving FUNCTION_END note.  Just sanity check that no user still worry
     about this.  */
  gcc_assert (delete_unused >= 0);
Richard Kenner committed
1554
  JUMP_LABEL (jump) = nlabel;
1555
  if (!ANY_RETURN_P (nlabel))
Richard Kenner committed
1556 1557
    ++LABEL_NUSES (nlabel);

1558 1559 1560
  /* Update labels in any REG_EQUAL note.  */
  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
    {
1561 1562
      if (ANY_RETURN_P (nlabel)
	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1563 1564
	remove_note (jump, note);
      else
1565
	{
1566 1567
	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
	  confirm_change_group ();
1568 1569 1570
	}
    }

1571 1572
  if (!ANY_RETURN_P (olabel)
      && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1573 1574
      /* Undefined labels will remain outside the insn stream.  */
      && INSN_UID (olabel))
1575
    delete_related_insns (olabel);
1576 1577
  if (invert)
    invert_br_probabilities (jump);
Richard Kenner committed
1578 1579
}

1580 1581 1582 1583
/* Invert the jump condition X contained in jump insn INSN.  Accrue the
   modifications into the change group.  Return nonzero for success.  */
static int
invert_exp_1 (rtx x, rtx insn)
1584
{
1585
  RTX_CODE code = GET_CODE (x);
1586 1587 1588

  if (code == IF_THEN_ELSE)
    {
1589 1590
      rtx comp = XEXP (x, 0);
      rtx tem;
1591
      enum rtx_code reversed_code;
1592 1593 1594 1595 1596 1597

      /* We can do this in two ways:  The preferable way, which can only
	 be done if this is not an integer comparison, is to reverse
	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
	 of the IF_THEN_ELSE.  If we can't do either, fail.  */

1598 1599 1600
      reversed_code = reversed_comparison_code (comp, insn);

      if (reversed_code != UNKNOWN)
1601 1602
	{
	  validate_change (insn, &XEXP (x, 0),
1603
			   gen_rtx_fmt_ee (reversed_code,
1604 1605 1606
					   GET_MODE (comp), XEXP (comp, 0),
					   XEXP (comp, 1)),
			   1);
1607
	  return 1;
1608
	}
Kazu Hirata committed
1609

1610 1611 1612
      tem = XEXP (x, 1);
      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
      validate_change (insn, &XEXP (x, 2), tem, 1);
1613
      return 1;
1614
    }
Jan Hubicka committed
1615
  else
1616 1617 1618 1619 1620 1621 1622 1623 1624
    return 0;
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Accrue changes into the
   change group.  Return false if we didn't see how to perform the
   inversion and redirection.  */

int
1625
invert_jump_1 (rtx jump, rtx nlabel)
1626
{
1627
  rtx x = pc_set (jump);
1628
  int ochanges;
1629
  int ok;
1630 1631

  ochanges = num_validated_changes ();
1632 1633
  if (x == NULL)
    return 0;
1634 1635
  ok = invert_exp_1 (SET_SRC (x), jump);
  gcc_assert (ok);
H.J. Lu committed
1636

1637 1638 1639
  if (num_validated_changes () == ochanges)
    return 0;

1640 1641 1642
  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
     in Pmode, so checking this is not merely an optimization.  */
  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1643 1644 1645 1646 1647 1648
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Return true if successful.  */

int
1649
invert_jump (rtx jump, rtx nlabel, int delete_unused)
1650
{
1651
  rtx olabel = JUMP_LABEL (jump);
1652

1653
  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1654
    {
1655
      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1656 1657
      return 1;
    }
1658
  cancel_changes (0);
1659 1660 1661
  return 0;
}

Richard Kenner committed
1662 1663

/* Like rtx_equal_p except that it considers two REGs as equal
1664 1665
   if they renumber to the same value and considers two commutative
   operations to be the same if the order of the operands has been
1666
   reversed.  */
Richard Kenner committed
1667 1668

int
1669
rtx_renumbered_equal_p (const_rtx x, const_rtx y)
Richard Kenner committed
1670
{
1671
  int i;
1672
  const enum rtx_code code = GET_CODE (x);
1673
  const char *fmt;
Kazu Hirata committed
1674

Richard Kenner committed
1675 1676
  if (x == y)
    return 1;
1677

1678 1679 1680
  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
      && (REG_P (y) || (GET_CODE (y) == SUBREG
				  && REG_P (SUBREG_REG (y)))))
Richard Kenner committed
1681
    {
1682
      int reg_x = -1, reg_y = -1;
1683
      int byte_x = 0, byte_y = 0;
1684
      struct subreg_info info;
Richard Kenner committed
1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695

      if (GET_MODE (x) != GET_MODE (y))
	return 0;

      /* If we haven't done any renumbering, don't
	 make any assumptions.  */
      if (reg_renumber == 0)
	return rtx_equal_p (x, y);

      if (code == SUBREG)
	{
1696
	  reg_x = REGNO (SUBREG_REG (x));
1697
	  byte_x = SUBREG_BYTE (x);
1698 1699 1700

	  if (reg_renumber[reg_x] >= 0)
	    {
1701 1702 1703 1704
	      subreg_get_info (reg_renumber[reg_x],
			       GET_MODE (SUBREG_REG (x)), byte_x,
			       GET_MODE (x), &info);
	      if (!info.representable_p)
1705
		return 0;
1706
	      reg_x = info.offset;
1707
	      byte_x = 0;
1708
	    }
Richard Kenner committed
1709 1710 1711
	}
      else
	{
1712 1713 1714
	  reg_x = REGNO (x);
	  if (reg_renumber[reg_x] >= 0)
	    reg_x = reg_renumber[reg_x];
Richard Kenner committed
1715
	}
1716

Richard Kenner committed
1717 1718
      if (GET_CODE (y) == SUBREG)
	{
1719
	  reg_y = REGNO (SUBREG_REG (y));
1720
	  byte_y = SUBREG_BYTE (y);
1721 1722 1723

	  if (reg_renumber[reg_y] >= 0)
	    {
1724 1725 1726 1727
	      subreg_get_info (reg_renumber[reg_y],
			       GET_MODE (SUBREG_REG (y)), byte_y,
			       GET_MODE (y), &info);
	      if (!info.representable_p)
1728
		return 0;
1729
	      reg_y = info.offset;
1730
	      byte_y = 0;
1731
	    }
Richard Kenner committed
1732 1733 1734
	}
      else
	{
1735 1736 1737
	  reg_y = REGNO (y);
	  if (reg_renumber[reg_y] >= 0)
	    reg_y = reg_renumber[reg_y];
Richard Kenner committed
1738
	}
1739

1740
      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
Richard Kenner committed
1741
    }
1742

Kazu Hirata committed
1743
  /* Now we have disposed of all the cases
Richard Kenner committed
1744 1745 1746
     in which different rtx codes can match.  */
  if (code != GET_CODE (y))
    return 0;
1747

Richard Kenner committed
1748 1749 1750 1751 1752 1753
  switch (code)
    {
    case PC:
    case CC0:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
1754
    CASE_CONST_UNIQUE:
1755
      return 0;
Richard Kenner committed
1756 1757

    case LABEL_REF:
Richard Stallman committed
1758 1759 1760
      /* We can't assume nonlocal labels have their following insns yet.  */
      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
	return XEXP (x, 0) == XEXP (y, 0);
1761

Richard Kenner committed
1762 1763 1764 1765 1766 1767 1768
      /* Two label-refs are equivalent if they point at labels
	 in the same position in the instruction stream.  */
      return (next_real_insn (XEXP (x, 0))
	      == next_real_insn (XEXP (y, 0)));

    case SYMBOL_REF:
      return XSTR (x, 0) == XSTR (y, 0);
1769

1770 1771 1772 1773
    case CODE_LABEL:
      /* If we didn't match EQ equality above, they aren't the same.  */
      return 0;

1774 1775
    default:
      break;
Richard Kenner committed
1776 1777 1778 1779 1780 1781 1782
    }

  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */

  if (GET_MODE (x) != GET_MODE (y))
    return 0;

1783
  /* MEMs referring to different address space are not equivalent.  */
1784 1785 1786
  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
    return 0;

1787
  /* For commutative operations, the RTX match if the operand match in any
1788 1789
     order.  Also handle the simple binary and unary cases without a loop.  */
  if (targetm.commutative_p (x, UNKNOWN))
1790 1791 1792 1793
    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1794
  else if (NON_COMMUTATIVE_P (x))
1795 1796
    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1797
  else if (UNARY_P (x))
1798 1799
    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));

Richard Kenner committed
1800 1801 1802 1803 1804 1805
  /* Compare the elements.  If any pair of corresponding elements
     fail to match, return 0 for the whole things.  */

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
1806
      int j;
Richard Kenner committed
1807 1808
      switch (fmt[i])
	{
1809 1810 1811 1812 1813
	case 'w':
	  if (XWINT (x, i) != XWINT (y, i))
	    return 0;
	  break;

Richard Kenner committed
1814 1815
	case 'i':
	  if (XINT (x, i) != XINT (y, i))
1816 1817
	    {
	      if (((code == ASM_OPERANDS && i == 6)
1818
		   || (code == ASM_INPUT && i == 1)))
1819 1820 1821
		break;
	      return 0;
	    }
Richard Kenner committed
1822 1823
	  break;

1824 1825 1826 1827 1828
	case 't':
	  if (XTREE (x, i) != XTREE (y, i))
	    return 0;
	  break;

Richard Kenner committed
1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841
	case 's':
	  if (strcmp (XSTR (x, i), XSTR (y, i)))
	    return 0;
	  break;

	case 'e':
	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
	    return 0;
	  break;

	case 'u':
	  if (XEXP (x, i) != XEXP (y, i))
	    return 0;
1842
	  /* Fall through.  */
Richard Kenner committed
1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854
	case '0':
	  break;

	case 'E':
	  if (XVECLEN (x, i) != XVECLEN (y, i))
	    return 0;
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
	      return 0;
	  break;

	default:
1855
	  gcc_unreachable ();
Richard Kenner committed
1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866
	}
    }
  return 1;
}

/* If X is a hard register or equivalent to one or a subregister of one,
   return the hard register number.  If X is a pseudo register that was not
   assigned a hard register, return the pseudo register number.  Otherwise,
   return -1.  Any rtx is valid for X.  */

int
1867
true_regnum (const_rtx x)
Richard Kenner committed
1868
{
1869
  if (REG_P (x))
Richard Kenner committed
1870 1871 1872 1873 1874 1875 1876 1877
    {
      if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
	return reg_renumber[REGNO (x)];
      return REGNO (x);
    }
  if (GET_CODE (x) == SUBREG)
    {
      int base = true_regnum (SUBREG_REG (x));
1878
      if (base >= 0
1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889
	  && base < FIRST_PSEUDO_REGISTER)
	{
	  struct subreg_info info;

	  subreg_get_info (REGNO (SUBREG_REG (x)),
			   GET_MODE (SUBREG_REG (x)),
			   SUBREG_BYTE (x), GET_MODE (x), &info);

	  if (info.representable_p)
	    return base + info.offset;
	}
Richard Kenner committed
1890 1891 1892
    }
  return -1;
}
1893 1894 1895

/* Return regno of the register REG and handle subregs too.  */
unsigned int
1896
reg_or_subregno (const_rtx reg)
1897 1898
{
  if (GET_CODE (reg) == SUBREG)
1899 1900 1901
    reg = SUBREG_REG (reg);
  gcc_assert (REG_P (reg));
  return REGNO (reg);
1902
}