jump.c 48.4 KB
Newer Older
Richard Kenner committed
1
/* Optimize jump instructions, for GNU compiler.
Jeff Law committed
2
   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 4
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010,
   2011 Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
21

Jan Hubicka committed
22
/* This is the pathetic reminder of old fame of the jump-optimization pass
23
   of the compiler.  Now it contains basically a set of utility functions to
Jan Hubicka committed
24
   operate with jumps.
Richard Kenner committed
25 26 27 28 29 30 31

   Each CODE_LABEL has a count of the times it is used
   stored in the LABEL_NUSES internal field, and each JUMP_INSN
   has one label that it refers to stored in the
   JUMP_LABEL internal field.  With this we can detect labels that
   become unused because of the deletion of all the jumps that
   formerly used them.  The JUMP_LABEL info is sometimes looked
32 33
   at by later passes.  For return insns, it contains either a
   RETURN or a SIMPLE_RETURN rtx.
Richard Kenner committed
34

35
   The subroutines redirect_jump and invert_jump are used
Richard Kenner committed
36 37 38
   from other passes as well.  */

#include "config.h"
39
#include "system.h"
40 41
#include "coretypes.h"
#include "tm.h"
Richard Kenner committed
42
#include "rtl.h"
43
#include "tm_p.h"
Richard Kenner committed
44 45 46 47
#include "flags.h"
#include "hard-reg-set.h"
#include "regs.h"
#include "insn-config.h"
48
#include "insn-attr.h"
49
#include "recog.h"
50
#include "function.h"
51
#include "basic-block.h"
52
#include "expr.h"
Mike Stump committed
53
#include "except.h"
Joseph Myers committed
54
#include "diagnostic-core.h"
55
#include "reload.h"
Jan Hubicka committed
56
#include "predict.h"
57
#include "timevar.h"
58
#include "tree-pass.h"
59
#include "target.h"
Richard Kenner committed
60 61 62 63 64 65 66 67

/* Optimize jump y; x: ... y: jumpif... x?
   Don't know if it is worth bothering with.  */
/* Optimize two cases of conditional jump to conditional jump?
   This can never delete any instruction or make anything dead,
   or even change what is live at any point.
   So perhaps let combiner do it.  */

68 69
static void init_label_info (rtx);
static void mark_all_labels (rtx);
70
static void mark_jump_label_1 (rtx, rtx, bool, bool);
71
static void mark_jump_label_asm (rtx, rtx);
72
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
73
static int invert_exp_1 (rtx, rtx);
74
static int returnjump_p_1 (rtx *, void *);
75

76 77 78
/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
static void
rebuild_jump_labels_1 (rtx f, bool count_forced)
79
{
80
  rtx insn;
Richard Kenner committed
81

82
  timevar_push (TV_REBUILD_JUMP);
83
  init_label_info (f);
84
  mark_all_labels (f);
Richard Kenner committed
85

86 87 88
  /* Keep track of labels used from static data; we don't track them
     closely enough to delete them here, so make sure their reference
     count doesn't drop to zero.  */
Richard Kenner committed
89

90 91 92 93
  if (count_forced)
    for (insn = forced_labels; insn; insn = XEXP (insn, 1))
      if (LABEL_P (XEXP (insn, 0)))
	LABEL_NUSES (XEXP (insn, 0))++;
94
  timevar_pop (TV_REBUILD_JUMP);
Jan Hubicka committed
95
}
96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114

/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
   instructions and jumping insns that have labels as operands
   (e.g. cbranchsi4).  */
void
rebuild_jump_labels (rtx f)
{
  rebuild_jump_labels_1 (f, true);
}

/* This function is like rebuild_jump_labels, but doesn't run over
   forced_labels.  It can be used on insn chains that aren't the 
   main function chain.  */
void
rebuild_jump_labels_chain (rtx chain)
{
  rebuild_jump_labels_1 (chain, false);
}
Jan Hubicka committed
115

116 117 118 119 120 121 122 123
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
   non-fallthru insn.  This is not generally true, as multiple barriers
   may have crept in, or the BARRIER may be separated from the last
   real insn by one or more NOTEs.

   This simple pass moves barriers and removes duplicates so that the
   old code is happy.
 */
124
unsigned int
125
cleanup_barriers (void)
126 127 128 129 130
{
  rtx insn, next, prev;
  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);
131
      if (BARRIER_P (insn))
132 133
	{
	  prev = prev_nonnote_insn (insn);
134 135
	  if (!prev)
	    continue;
136
	  if (BARRIER_P (prev))
137
	    delete_insn (insn);
138 139 140 141
	  else if (prev != PREV_INSN (insn))
	    reorder_insns (insn, insn, prev);
	}
    }
142
  return 0;
143
}
Richard Kenner committed
144

145
struct rtl_opt_pass pass_cleanup_barriers =
146
{
147 148
 {
  RTL_PASS,
149
  "barriers",                           /* name */
150 151 152 153 154
  NULL,                                 /* gate */
  cleanup_barriers,                     /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
155
  TV_NONE,                              /* tv_id */
156 157 158 159
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
160
  0                                     /* todo_flags_finish */
161
 }
162 163
};

164

165 166 167 168
/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
   notes whose labels don't occur in the insn any more.  */

169
static void
170
init_label_info (rtx f)
171 172 173 174
{
  rtx insn;

  for (insn = f; insn; insn = NEXT_INSN (insn))
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191
    {
      if (LABEL_P (insn))
	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);

      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
	 sticky and not reset here; that way we won't lose association
	 with a label when e.g. the source for a target register
	 disappears out of reach for targets that may use jump-target
	 registers.  Jump transformations are supposed to transform
	 any REG_LABEL_TARGET notes.  The target label reference in a
	 branch may disappear from the branch (and from the
	 instruction before it) for other reasons, like register
	 allocation.  */

      if (INSN_P (insn))
	{
	  rtx note, next;
192

193 194 195 196 197 198 199 200 201
	  for (note = REG_NOTES (insn); note; note = next)
	    {
	      next = XEXP (note, 1);
	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
		remove_note (insn, note);
	    }
	}
    }
202 203
}

204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251
/* A subroutine of mark_all_labels.  Trivially propagate a simple label
   load into a jump_insn that uses it.  */

static void
maybe_propagate_label_ref (rtx jump_insn, rtx prev_nonjump_insn)
{
  rtx label_note, pc, pc_src;

  pc = pc_set (jump_insn);
  pc_src = pc != NULL ? SET_SRC (pc) : NULL;
  label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);

  /* If the previous non-jump insn sets something to a label,
     something that this jump insn uses, make that label the primary
     target of this insn if we don't yet have any.  That previous
     insn must be a single_set and not refer to more than one label.
     The jump insn must not refer to other labels as jump targets
     and must be a plain (set (pc) ...), maybe in a parallel, and
     may refer to the item being set only directly or as one of the
     arms in an IF_THEN_ELSE.  */

  if (label_note != NULL && pc_src != NULL)
    {
      rtx label_set = single_set (prev_nonjump_insn);
      rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;

      if (label_set != NULL
	  /* The source must be the direct LABEL_REF, not a
	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
	  && (rtx_equal_p (label_dest, pc_src)
	      || (GET_CODE (pc_src) == IF_THEN_ELSE
		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
	{
	  /* The CODE_LABEL referred to in the note must be the
	     CODE_LABEL in the LABEL_REF of the "set".  We can
	     conveniently use it for the marker function, which
	     requires a LABEL_REF wrapping.  */
	  gcc_assert (XEXP (label_note, 0) == XEXP (SET_SRC (label_set), 0));

	  mark_jump_label_1 (label_set, jump_insn, false, true);

	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
	}
    }
}

252
/* Mark the label each jump jumps to.
Jan Hubicka committed
253
   Combine consecutive labels, and count uses of labels.  */
254 255

static void
256
mark_all_labels (rtx f)
257 258 259
{
  rtx insn;

260 261 262 263 264
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    {
      basic_block bb;
      FOR_EACH_BB (bb)
	{
265 266 267 268 269 270 271 272 273 274 275 276 277
	  /* In cfglayout mode, we don't bother with trivial next-insn
	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
	     handled by other optimizers using better algorithms.  */
	  FOR_BB_INSNS (bb, insn)
	    {
	      gcc_assert (! INSN_DELETED_P (insn));
	      if (NONDEBUG_INSN_P (insn))
	        mark_jump_label (PATTERN (insn), insn, 0);
	    }

	  /* In cfglayout mode, there may be non-insns between the
	     basic blocks.  If those non-insns represent tablejump data,
	     they contain label references that we must record.  */
278 279 280 281 282 283 284 285 286 287 288 289 290 291
	  for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	  for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	}
    }
292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313
  else
    {
      rtx prev_nonjump_insn = NULL;
      for (insn = f; insn; insn = NEXT_INSN (insn))
	{
	  if (INSN_DELETED_P (insn))
	    ;
	  else if (LABEL_P (insn))
	    prev_nonjump_insn = NULL;
	  else if (NONDEBUG_INSN_P (insn))
	    {
	      mark_jump_label (PATTERN (insn), insn, 0);
	      if (JUMP_P (insn))
		{
		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
		}
	      else
		prev_nonjump_insn = insn;
	    }
	}
    }
314
}
Richard Kenner committed
315

316
/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
317 318 319 320 321 322
   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
   know whether it's source is floating point or integer comparison.  Machine
   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
   to help this function avoid overhead in these cases.  */
enum rtx_code
323 324
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
				const_rtx arg1, const_rtx insn)
Richard Kenner committed
325
{
326
  enum machine_mode mode;
Richard Kenner committed
327 328

  /* If this is not actually a comparison, we can't reverse it.  */
329 330
  if (GET_RTX_CLASS (code) != RTX_COMPARE
      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
331 332 333 334 335 336
    return UNKNOWN;

  mode = GET_MODE (arg0);
  if (mode == VOIDmode)
    mode = GET_MODE (arg1);

337 338 339
  /* First see if machine description supplies us way to reverse the
     comparison.  Give it priority over everything else to allow
     machine description to do tricks.  */
340
  if (GET_MODE_CLASS (mode) == MODE_CC
341 342 343
      && REVERSIBLE_CC_MODE (mode))
    {
#ifdef REVERSE_CONDITION
Kazu Hirata committed
344
      return REVERSE_CONDITION (code, mode);
345
#else
Kazu Hirata committed
346
      return reverse_condition (code);
347
#endif
Kazu Hirata committed
348
    }
Richard Kenner committed
349

350
  /* Try a few special cases based on the comparison code.  */
351 352
  switch (code)
    {
Kazu Hirata committed
353 354 355 356 357 358 359
    case GEU:
    case GTU:
    case LEU:
    case LTU:
    case NE:
    case EQ:
      /* It is always safe to reverse EQ and NE, even for the floating
360
	 point.  Similarly the unsigned comparisons are never used for
Kazu Hirata committed
361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377
	 floating point so we can reverse them in the default way.  */
      return reverse_condition (code);
    case ORDERED:
    case UNORDERED:
    case LTGT:
    case UNEQ:
      /* In case we already see unordered comparison, we can be sure to
	 be dealing with floating point so we don't need any more tests.  */
      return reverse_condition_maybe_unordered (code);
    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
      /* We don't have safe way to reverse these yet.  */
      return UNKNOWN;
    default:
      break;
378 379
    }

380
  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
Richard Kenner committed
381
    {
382
      const_rtx prev;
383 384 385 386
      /* Try to search for the comparison to determine the real mode.
         This code is expensive, but with sane machine description it
         will be never used, since REVERSIBLE_CC_MODE will return true
         in all cases.  */
387
      if (! insn)
388
	return UNKNOWN;
Kazu Hirata committed
389

390
      /* These CONST_CAST's are okay because prev_nonnote_insn just
391
	 returns its argument and we assign it to a const_rtx
392
	 variable.  */
393
      for (prev = prev_nonnote_insn (CONST_CAST_RTX(insn));
394
	   prev != 0 && !LABEL_P (prev);
395
	   prev = prev_nonnote_insn (CONST_CAST_RTX(prev)))
396
	{
397
	  const_rtx set = set_of (arg0, prev);
398 399 400 401
	  if (set && GET_CODE (set) == SET
	      && rtx_equal_p (SET_DEST (set), arg0))
	    {
	      rtx src = SET_SRC (set);
Richard Kenner committed
402

403 404 405 406 407 408 409 410 411
	      if (GET_CODE (src) == COMPARE)
		{
		  rtx comparison = src;
		  arg0 = XEXP (src, 0);
		  mode = GET_MODE (arg0);
		  if (mode == VOIDmode)
		    mode = GET_MODE (XEXP (comparison, 1));
		  break;
		}
412
	      /* We can get past reg-reg moves.  This may be useful for model
413 414 415 416 417 418 419 420 421 422 423 424
	         of i387 comparisons that first move flag registers around.  */
	      if (REG_P (src))
		{
		  arg0 = src;
		  continue;
		}
	    }
	  /* If register is clobbered in some ununderstandable way,
	     give up.  */
	  if (set)
	    return UNKNOWN;
	}
Richard Kenner committed
425 426
    }

427 428
  /* Test for an integer condition, or a floating-point comparison
     in which NaNs can be ignored.  */
Shujing Zhao committed
429
  if (CONST_INT_P (arg0)
430 431
      || (GET_MODE (arg0) != VOIDmode
	  && GET_MODE_CLASS (mode) != MODE_CC
432
	  && !HONOR_NANS (mode)))
433 434 435 436 437
    return reverse_condition (code);

  return UNKNOWN;
}

Kazu Hirata committed
438
/* A wrapper around the previous function to take COMPARISON as rtx
439 440
   expression.  This simplifies many callers.  */
enum rtx_code
441
reversed_comparison_code (const_rtx comparison, const_rtx insn)
442
{
443
  if (!COMPARISON_P (comparison))
444 445 446 447 448
    return UNKNOWN;
  return reversed_comparison_code_parts (GET_CODE (comparison),
					 XEXP (comparison, 0),
					 XEXP (comparison, 1), insn);
}
449 450 451 452

/* Return comparison with reversed code of EXP.
   Return NULL_RTX in case we fail to do the reversal.  */
rtx
453
reversed_comparison (const_rtx exp, enum machine_mode mode)
454 455 456 457 458 459 460 461 462
{
  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
  if (reversed_code == UNKNOWN)
    return NULL_RTX;
  else
    return simplify_gen_relational (reversed_code, mode, VOIDmode,
                                    XEXP (exp, 0), XEXP (exp, 1));
}

463

464 465 466 467 468
/* Given an rtx-code for a comparison, return the code for the negated
   comparison.  If no such code exists, return UNKNOWN.

   WATCH OUT!  reverse_condition is not safe to use on a jump that might
   be acting on the results of an IEEE floating point comparison, because
Kazu Hirata committed
469
   of the special treatment of non-signaling nans in comparisons.
470
   Use reversed_comparison_code instead.  */
Richard Kenner committed
471 472

enum rtx_code
473
reverse_condition (enum rtx_code code)
Richard Kenner committed
474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return LE;
    case GE:
      return LT;
    case LT:
      return GE;
    case LE:
      return GT;
    case GTU:
      return LEU;
    case GEU:
      return LTU;
    case LTU:
      return GEU;
    case LEU:
      return GTU;
497 498 499 500 501 502 503 504 505 506
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;

    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
    case UNEQ:
507
    case LTGT:
508
      return UNKNOWN;
Richard Kenner committed
509 510

    default:
511
      gcc_unreachable ();
Richard Kenner committed
512 513 514
    }
}

515 516 517 518 519
/* Similar, but we're allowed to generate unordered comparisons, which
   makes it safe for IEEE floating-point.  Of course, we have to recognize
   that the target will support them too...  */

enum rtx_code
520
reverse_condition_maybe_unordered (enum rtx_code code)
521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return UNLE;
    case GE:
      return UNLT;
    case LT:
      return UNGE;
    case LE:
      return UNGT;
    case LTGT:
      return UNEQ;
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;
    case UNLT:
      return GE;
    case UNLE:
      return GT;
    case UNGT:
      return LE;
    case UNGE:
      return LT;
    case UNEQ:
      return LTGT;

    default:
554
      gcc_unreachable ();
555 556 557
    }
}

Richard Kenner committed
558 559 560 561
/* Similar, but return the code when two operands of a comparison are swapped.
   This IS safe for IEEE floating-point.  */

enum rtx_code
562
swap_condition (enum rtx_code code)
Richard Kenner committed
563 564 565 566 567
{
  switch (code)
    {
    case EQ:
    case NE:
568 569 570
    case UNORDERED:
    case ORDERED:
    case UNEQ:
571
    case LTGT:
Richard Kenner committed
572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589
      return code;

    case GT:
      return LT;
    case GE:
      return LE;
    case LT:
      return GT;
    case LE:
      return GE;
    case GTU:
      return LTU;
    case GEU:
      return LEU;
    case LTU:
      return GTU;
    case LEU:
      return GEU;
590 591 592 593 594 595 596 597 598
    case UNLT:
      return UNGT;
    case UNLE:
      return UNGE;
    case UNGT:
      return UNLT;
    case UNGE:
      return UNLE;

Richard Kenner committed
599
    default:
600
      gcc_unreachable ();
Richard Kenner committed
601 602 603 604 605 606 607 608
    }
}

/* Given a comparison CODE, return the corresponding unsigned comparison.
   If CODE is an equality comparison or already an unsigned comparison,
   CODE is returned.  */

enum rtx_code
609
unsigned_condition (enum rtx_code code)
Richard Kenner committed
610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630
{
  switch (code)
    {
    case EQ:
    case NE:
    case GTU:
    case GEU:
    case LTU:
    case LEU:
      return code;

    case GT:
      return GTU;
    case GE:
      return GEU;
    case LT:
      return LTU;
    case LE:
      return LEU;

    default:
631
      gcc_unreachable ();
Richard Kenner committed
632 633 634 635 636 637
    }
}

/* Similarly, return the signed version of a comparison.  */

enum rtx_code
638
signed_condition (enum rtx_code code)
Richard Kenner committed
639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659
{
  switch (code)
    {
    case EQ:
    case NE:
    case GT:
    case GE:
    case LT:
    case LE:
      return code;

    case GTU:
      return GT;
    case GEU:
      return GE;
    case LTU:
      return LT;
    case LEU:
      return LE;

    default:
660
      gcc_unreachable ();
Richard Kenner committed
661 662 663
    }
}

664
/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
Richard Kenner committed
665 666 667
   truth of CODE1 implies the truth of CODE2.  */

int
668
comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
Richard Kenner committed
669
{
670 671 672 673 674 675
  /* UNKNOWN comparison codes can happen as a result of trying to revert
     comparison codes.
     They can't match anything, so we have to reject them here.  */
  if (code1 == UNKNOWN || code2 == UNKNOWN)
    return 0;

Richard Kenner committed
676 677 678 679 680
  if (code1 == code2)
    return 1;

  switch (code1)
    {
681 682 683 684 685
    case UNEQ:
      if (code2 == UNLE || code2 == UNGE)
	return 1;
      break;

Richard Kenner committed
686
    case EQ:
687 688
      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
	  || code2 == ORDERED)
Richard Kenner committed
689 690 691
	return 1;
      break;

692 693 694 695 696
    case UNLT:
      if (code2 == UNLE || code2 == NE)
	return 1;
      break;

Richard Kenner committed
697
    case LT:
698 699 700 701 702 703
      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
	return 1;
      break;

    case UNGT:
      if (code2 == UNGE || code2 == NE)
Richard Kenner committed
704 705 706 707
	return 1;
      break;

    case GT:
708
      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
709 710 711 712 713 714 715 716 717 718 719
	return 1;
      break;

    case GE:
    case LE:
      if (code2 == ORDERED)
	return 1;
      break;

    case LTGT:
      if (code2 == NE || code2 == ORDERED)
Richard Kenner committed
720 721 722 723
	return 1;
      break;

    case LTU:
724
      if (code2 == LEU || code2 == NE)
Richard Kenner committed
725 726 727 728
	return 1;
      break;

    case GTU:
729
      if (code2 == GEU || code2 == NE)
Richard Kenner committed
730 731
	return 1;
      break;
732 733

    case UNORDERED:
734 735
      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
	  || code2 == UNGE || code2 == UNGT)
736 737
	return 1;
      break;
Kazu Hirata committed
738

739 740
    default:
      break;
Richard Kenner committed
741 742 743 744 745 746 747 748
    }

  return 0;
}

/* Return 1 if INSN is an unconditional jump and nothing else.  */

int
749
simplejump_p (const_rtx insn)
Richard Kenner committed
750
{
751
  return (JUMP_P (insn)
752 753 754
	  && GET_CODE (PATTERN (insn)) == SET
	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
Richard Kenner committed
755 756 757
}

/* Return nonzero if INSN is a (possibly) conditional jump
Kazu Hirata committed
758 759
   and nothing more.

760
   Use of this function is deprecated, since we need to support combined
761
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
Richard Kenner committed
762 763

int
764
condjump_p (const_rtx insn)
Richard Kenner committed
765
{
766
  const_rtx x = PATTERN (insn);
Jeff Law committed
767 768 769

  if (GET_CODE (x) != SET
      || GET_CODE (SET_DEST (x)) != PC)
Richard Kenner committed
770
    return 0;
Jeff Law committed
771 772 773

  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
774
    return 1;
Kazu Hirata committed
775 776 777 778
  else
    return (GET_CODE (x) == IF_THEN_ELSE
	    && ((GET_CODE (XEXP (x, 2)) == PC
		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
779
		     || ANY_RETURN_P (XEXP (x, 1))))
Kazu Hirata committed
780 781
		|| (GET_CODE (XEXP (x, 1)) == PC
		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
782
			|| ANY_RETURN_P (XEXP (x, 2))))));
783 784
}

Jeff Law committed
785
/* Return nonzero if INSN is a (possibly) conditional jump inside a
786
   PARALLEL.
Kazu Hirata committed
787

788 789
   Use this function is deprecated, since we need to support combined
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
790 791

int
792
condjump_in_parallel_p (const_rtx insn)
793
{
794
  const_rtx x = PATTERN (insn);
795 796 797 798 799 800 801 802 803 804 805 806 807 808 809

  if (GET_CODE (x) != PARALLEL)
    return 0;
  else
    x = XVECEXP (x, 0, 0);

  if (GET_CODE (x) != SET)
    return 0;
  if (GET_CODE (SET_DEST (x)) != PC)
    return 0;
  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
    return 1;
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
  if (XEXP (SET_SRC (x), 2) == pc_rtx
Richard Kenner committed
810
      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
811
	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
Richard Kenner committed
812 813 814
    return 1;
  if (XEXP (SET_SRC (x), 1) == pc_rtx
      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
815
	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
Richard Kenner committed
816 817 818 819
    return 1;
  return 0;
}

820 821
/* Return set of PC, otherwise NULL.  */

822
rtx
823
pc_set (const_rtx insn)
824 825
{
  rtx pat;
826
  if (!JUMP_P (insn))
827
    return NULL_RTX;
828
  pat = PATTERN (insn);
829 830 831 832 833

  /* The set is allowed to appear either as the insn pattern or
     the first set in a PARALLEL.  */
  if (GET_CODE (pat) == PARALLEL)
    pat = XVECEXP (pat, 0, 0);
834 835
  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
    return pat;
836 837

  return NULL_RTX;
838 839
}

840 841 842
/* Return true when insn is an unconditional direct jump,
   possibly bundled inside a PARALLEL.  */

843
int
844
any_uncondjump_p (const_rtx insn)
845
{
846
  const_rtx x = pc_set (insn);
847 848 849 850
  if (!x)
    return 0;
  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
    return 0;
851 852
  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
    return 0;
853 854 855
  return 1;
}

856
/* Return true when insn is a conditional jump.  This function works for
857 858
   instructions containing PC sets in PARALLELs.  The instruction may have
   various other effects so before removing the jump you must verify
859
   onlyjump_p.
860

861 862
   Note that unlike condjump_p it returns false for unconditional jumps.  */

863
int
864
any_condjump_p (const_rtx insn)
865
{
866
  const_rtx x = pc_set (insn);
867 868
  enum rtx_code a, b;

869 870
  if (!x)
    return 0;
871 872
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
873

874 875
  a = GET_CODE (XEXP (SET_SRC (x), 1));
  b = GET_CODE (XEXP (SET_SRC (x), 2));
876

877 878 879
  return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
	  || (a == PC
	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
880 881
}

882 883 884
/* Return the label of a conditional jump.  */

rtx
885
condjump_label (const_rtx insn)
886
{
887
  rtx x = pc_set (insn);
888

889
  if (!x)
890 891 892 893 894 895 896 897 898 899 900 901 902
    return NULL_RTX;
  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
    return x;
  if (GET_CODE (x) != IF_THEN_ELSE)
    return NULL_RTX;
  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
    return XEXP (x, 1);
  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
    return XEXP (x, 2);
  return NULL_RTX;
}

903 904 905
/* Return true if INSN is a (possibly conditional) return insn.  */

static int
906
returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
907 908
{
  rtx x = *loc;
909

910 911 912 913 914 915
  if (x == NULL)
    return false;

  switch (GET_CODE (x))
    {
    case RETURN:
916
    case SIMPLE_RETURN:
917 918 919 920 921 922 923 924 925
    case EH_RETURN:
      return true;

    case SET:
      return SET_IS_RETURN_P (x);

    default:
      return false;
    }
926 927
}

928 929
/* Return TRUE if INSN is a return jump.  */

930
int
931
returnjump_p (rtx insn)
932
{
933
  if (!JUMP_P (insn))
934
    return 0;
935 936 937
  return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
}

938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953
/* Return true if INSN is a (possibly conditional) return insn.  */

static int
eh_returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
{
  return *loc && GET_CODE (*loc) == EH_RETURN;
}

int
eh_returnjump_p (rtx insn)
{
  if (!JUMP_P (insn))
    return 0;
  return for_each_rtx (&PATTERN (insn), eh_returnjump_p_1, NULL);
}

954 955 956 957
/* Return true if INSN is a jump that only transfers control and
   nothing more.  */

int
958
onlyjump_p (const_rtx insn)
959 960 961
{
  rtx set;

962
  if (!JUMP_P (insn))
963 964 965 966 967 968 969 970 971 972 973 974 975
    return 0;

  set = single_set (insn);
  if (set == NULL)
    return 0;
  if (GET_CODE (SET_DEST (set)) != PC)
    return 0;
  if (side_effects_p (SET_SRC (set)))
    return 0;

  return 1;
}

976 977 978 979 980 981 982 983 984
/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
   NULL or a return.  */
bool
jump_to_label_p (rtx insn)
{
  return (JUMP_P (insn)
	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
}

985 986
#ifdef HAVE_cc0

987
/* Return nonzero if X is an RTX that only sets the condition codes
988 989 990
   and has no side effects.  */

int
991
only_sets_cc0_p (const_rtx x)
992 993 994 995 996 997 998 999 1000 1001
{
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
}

Richard Kenner committed
1002 1003 1004 1005 1006 1007
/* Return 1 if X is an RTX that does nothing but set the condition codes
   and CLOBBER or USE registers.
   Return -1 if X does explicitly set the condition codes,
   but also does other things.  */

int
1008
sets_cc0_p (const_rtx x)
Richard Kenner committed
1009
{
1010 1011 1012 1013 1014 1015
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

Richard Kenner committed
1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034
  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
    return 1;
  if (GET_CODE (x) == PARALLEL)
    {
      int i;
      int sets_cc0 = 0;
      int other_things = 0;
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
	{
	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
	    sets_cc0 = 1;
	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
	    other_things = 1;
	}
      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
    }
  return 0;
}
1035
#endif
Richard Kenner committed
1036

1037 1038 1039 1040 1041 1042 1043 1044
/* Find all CODE_LABELs referred to in X, and increment their use
   counts.  If INSN is a JUMP_INSN and there is at least one
   CODE_LABEL referenced in INSN as a jump target, then store the last
   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1045
   For returnjumps, the JUMP_LABEL will also be set as appropriate.
Richard Kenner committed
1046 1047 1048 1049 1050

   Note that two labels separated by a loop-beginning note
   must be kept distinct if we have not yet done loop-optimization,
   because the gap between them is where loop-optimize
   will want to move invariant code to.  CROSS_JUMP tells us
1051
   that loop-optimization is done with.  */
Richard Kenner committed
1052

1053
void
1054
mark_jump_label (rtx x, rtx insn, int in_mem)
Richard Kenner committed
1055
{
1056 1057 1058 1059 1060 1061
  rtx asmop = extract_asm_operands (x);
  if (asmop)
    mark_jump_label_asm (asmop, insn);
  else
    mark_jump_label_1 (x, insn, in_mem != 0,
		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1062 1063
}

1064
/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1065 1066 1067 1068 1069 1070 1071 1072
   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
   jump-target; when the JUMP_LABEL field of INSN should be set or a
   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
   note.  */

static void
mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target)
{
1073 1074 1075
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087

  switch (code)
    {
    case PC:
    case CC0:
    case REG:
    case CONST_INT:
    case CONST_DOUBLE:
    case CLOBBER:
    case CALL:
      return;

1088
    case RETURN:
1089
    case SIMPLE_RETURN:
1090 1091 1092 1093 1094 1095 1096
      if (is_target)
	{
	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
	  JUMP_LABEL (insn) = x;
	}
      return;

1097
    case MEM:
1098
      in_mem = true;
1099 1100
      break;

1101 1102 1103 1104 1105 1106
    case SEQUENCE:
      for (i = 0; i < XVECLEN (x, 0); i++)
	mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
			 XVECEXP (x, 0, i), 0);
      return;

1107 1108
    case SYMBOL_REF:
      if (!in_mem)
Kazu Hirata committed
1109
	return;
1110

1111
      /* If this is a constant-pool reference, see if it is a label.  */
1112
      if (CONSTANT_POOL_ADDRESS_P (x))
1113
	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1114 1115
      break;

1116 1117 1118 1119 1120 1121 1122 1123 1124 1125
      /* Handle operands in the condition of an if-then-else as for a
	 non-jump insn.  */
    case IF_THEN_ELSE:
      if (!is_target)
	break;
      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
      return;

Richard Kenner committed
1126 1127
    case LABEL_REF:
      {
1128 1129
	rtx label = XEXP (x, 0);

1130 1131
	/* Ignore remaining references to unreachable labels that
	   have been deleted.  */
1132
	if (NOTE_P (label)
1133
	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1134 1135
	  break;

1136
	gcc_assert (LABEL_P (label));
1137

Richard Stallman committed
1138 1139 1140
	/* Ignore references to labels of containing functions.  */
	if (LABEL_REF_NONLOCAL_P (x))
	  break;
1141

Richard Kenner committed
1142
	XEXP (x, 0) = label;
1143 1144
	if (! insn || ! INSN_DELETED_P (insn))
	  ++LABEL_NUSES (label);
1145

Richard Kenner committed
1146 1147
	if (insn)
	  {
1148
	    if (is_target
1149 1150 1151
		/* Do not change a previous setting of JUMP_LABEL.  If the
		   JUMP_LABEL slot is occupied by a different label,
		   create a note for this label.  */
1152
		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
Richard Kenner committed
1153
	      JUMP_LABEL (insn) = label;
1154
	    else
1155
	      {
1156 1157 1158 1159 1160 1161 1162 1163
		enum reg_note kind
		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;

		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
		   for LABEL unless there already is one.  All uses of
		   a label, except for the primary target of a jump,
		   must have such a note.  */
		if (! find_reg_note (insn, kind, label))
1164
		  add_reg_note (insn, kind, label);
Richard Kenner committed
1165 1166 1167 1168 1169 1170 1171 1172 1173
	      }
	  }
	return;
      }

  /* Do walk the labels in a vector, but not the first operand of an
     ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
1174 1175 1176
      if (! INSN_DELETED_P (insn))
	{
	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
Richard Kenner committed
1177

1178
	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1179 1180
	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem,
			       is_target);
1181
	}
1182
      return;
Kazu Hirata committed
1183

1184 1185
    default:
      break;
Richard Kenner committed
1186 1187 1188
    }

  fmt = GET_RTX_FORMAT (code);
1189 1190 1191 1192

  /* The primary target of a tablejump is the label of the ADDR_VEC,
     which is canonically mentioned *last* in the insn.  To get it
     marked as JUMP_LABEL, we iterate over items in reverse order.  */
Richard Kenner committed
1193 1194 1195
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1196
	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
Richard Kenner committed
1197 1198
      else if (fmt[i] == 'E')
	{
1199
	  int j;
1200 1201 1202 1203

	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
			       is_target);
Richard Kenner committed
1204 1205 1206 1207
	}
    }
}

1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223
/* Worker function for mark_jump_label.  Handle asm insns specially.
   In particular, output operands need not be considered so we can
   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
   need to be considered targets.  */

static void
mark_jump_label_asm (rtx asmop, rtx insn)
{
  int i;

  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);

  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
}
Richard Kenner committed
1224

1225
/* Delete insn INSN from the chain of insns and update label ref counts
1226
   and delete insns now unreachable.
1227

1228
   Returns the first insn after INSN that was not deleted.
Richard Kenner committed
1229

1230 1231
   Usage of this instruction is deprecated.  Use delete_insn instead and
   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
Richard Kenner committed
1232 1233

rtx
1234
delete_related_insns (rtx insn)
Richard Kenner committed
1235
{
1236
  int was_code_label = (LABEL_P (insn));
1237
  rtx note;
1238
  rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
Richard Kenner committed
1239 1240 1241 1242 1243 1244 1245 1246

  while (next && INSN_DELETED_P (next))
    next = NEXT_INSN (next);

  /* This insn is already deleted => return first following nondeleted.  */
  if (INSN_DELETED_P (insn))
    return next;

1247
  delete_insn (insn);
Richard Kenner committed
1248 1249 1250 1251

  /* If instruction is followed by a barrier,
     delete the barrier too.  */

1252
  if (next != 0 && BARRIER_P (next))
1253
    delete_insn (next);
Richard Kenner committed
1254 1255 1256 1257

  /* If deleting a jump, decrement the count of the label,
     and delete the label if it is now unused.  */

1258
  if (jump_to_label_p (insn))
1259 1260 1261
    {
      rtx lab = JUMP_LABEL (insn), lab_next;

1262
      if (LABEL_NUSES (lab) == 0)
1263 1264 1265 1266
	/* This can delete NEXT or PREV,
	   either directly if NEXT is JUMP_LABEL (INSN),
	   or indirectly through more levels of jumps.  */
	delete_related_insns (lab);
1267
      else if (tablejump_p (insn, NULL, &lab_next))
1268 1269
	{
	  /* If we're deleting the tablejump, delete the dispatch table.
1270
	     We may not be able to kill the label immediately preceding
1271 1272
	     just yet, as it might be referenced in code leading up to
	     the tablejump.  */
1273
	  delete_related_insns (lab_next);
1274 1275
	}
    }
Richard Kenner committed
1276

1277 1278
  /* Likewise if we're deleting a dispatch table.  */

Shujing Zhao committed
1279
  if (JUMP_TABLE_DATA_P (insn))
1280 1281 1282 1283 1284 1285
    {
      rtx pat = PATTERN (insn);
      int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
      int len = XVECLEN (pat, diff_vec_p);

      for (i = 0; i < len; i++)
1286 1287
	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1288 1289 1290 1291 1292
      while (next && INSN_DELETED_P (next))
	next = NEXT_INSN (next);
      return next;
    }

1293 1294 1295
  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
  if (INSN_P (insn))
1296
    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1297 1298
      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1299
	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1300
	  && LABEL_P (XEXP (note, 0)))
1301 1302
	if (LABEL_NUSES (XEXP (note, 0)) == 0)
	  delete_related_insns (XEXP (note, 0));
1303

1304
  while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
Richard Kenner committed
1305 1306 1307 1308 1309 1310
    prev = PREV_INSN (prev);

  /* If INSN was a label and a dispatch table follows it,
     delete the dispatch table.  The tablejump must have gone already.
     It isn't useful to fall through into a table.  */

1311
  if (was_code_label
Richard Kenner committed
1312
      && NEXT_INSN (insn) != 0
Shujing Zhao committed
1313
      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1314
    next = delete_related_insns (NEXT_INSN (insn));
Richard Kenner committed
1315 1316 1317

  /* If INSN was a label, delete insns following it if now unreachable.  */

1318
  if (was_code_label && prev && BARRIER_P (prev))
Richard Kenner committed
1319
    {
1320 1321
      enum rtx_code code;
      while (next)
Richard Kenner committed
1322
	{
1323
	  code = GET_CODE (next);
1324
	  if (code == NOTE)
Richard Kenner committed
1325
	    next = NEXT_INSN (next);
1326 1327 1328
	  /* Keep going past other deleted labels to delete what follows.  */
	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
	    next = NEXT_INSN (next);
1329
	  else if (code == BARRIER || INSN_P (next))
Richard Kenner committed
1330 1331 1332 1333
	    /* Note: if this deletes a jump, it can cause more
	       deletion of unreachable code, after a different label.
	       As long as the value from this recursive call is correct,
	       this invocation functions correctly.  */
1334
	    next = delete_related_insns (next);
1335 1336
	  else
	    break;
Richard Kenner committed
1337 1338 1339
	}
    }

1340 1341 1342 1343 1344 1345
  /* I feel a little doubtful about this loop,
     but I see no clean and sure alternative way
     to find the first insn after INSN that is not now deleted.
     I hope this works.  */
  while (next && INSN_DELETED_P (next))
    next = NEXT_INSN (next);
Richard Kenner committed
1346 1347 1348 1349 1350 1351 1352 1353 1354
  return next;
}

/* Delete a range of insns from FROM to TO, inclusive.
   This is for the sake of peephole optimization, so assume
   that whatever these insns do will still be done by a new
   peephole insn that will replace them.  */

void
1355
delete_for_peephole (rtx from, rtx to)
Richard Kenner committed
1356
{
1357
  rtx insn = from;
Richard Kenner committed
1358 1359 1360

  while (1)
    {
1361 1362
      rtx next = NEXT_INSN (insn);
      rtx prev = PREV_INSN (insn);
Richard Kenner committed
1363

1364
      if (!NOTE_P (insn))
Richard Kenner committed
1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388
	{
	  INSN_DELETED_P (insn) = 1;

	  /* Patch this insn out of the chain.  */
	  /* We don't do this all at once, because we
	     must preserve all NOTEs.  */
	  if (prev)
	    NEXT_INSN (prev) = next;

	  if (next)
	    PREV_INSN (next) = prev;
	}

      if (insn == to)
	break;
      insn = next;
    }

  /* Note that if TO is an unconditional jump
     we *do not* delete the BARRIER that follows,
     since the peephole that replaces this sequence
     is also an unconditional jump in that case.  */
}

1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400
/* A helper function for redirect_exp_1; examines its input X and returns
   either a LABEL_REF around a label, or a RETURN if X was NULL.  */
static rtx
redirect_target (rtx x)
{
  if (x == NULL_RTX)
    return ret_rtx;
  if (!ANY_RETURN_P (x))
    return gen_rtx_LABEL_REF (Pmode, x);
  return x;
}

1401 1402
/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
   NLABEL as a return.  Accrue modifications into the change group.  */
Richard Kenner committed
1403

1404
static void
1405
redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
Richard Kenner committed
1406
{
1407 1408 1409 1410
  rtx x = *loc;
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1411

1412
  if ((code == LABEL_REF && XEXP (x, 0) == olabel)
1413
      || x == olabel)
Richard Kenner committed
1414
    {
1415 1416 1417
      x = redirect_target (nlabel);
      if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1418 1419 1420
      validate_change (insn, loc, x, 1);
      return;
    }
Richard Kenner committed
1421

1422 1423
  if (code == SET && SET_DEST (x) == pc_rtx
      && ANY_RETURN_P (nlabel)
1424 1425 1426
      && GET_CODE (SET_SRC (x)) == LABEL_REF
      && XEXP (SET_SRC (x), 0) == olabel)
    {
1427
      validate_change (insn, loc, nlabel, 1);
1428
      return;
Richard Kenner committed
1429 1430
    }

1431 1432 1433 1434 1435 1436 1437 1438 1439
  if (code == IF_THEN_ELSE)
    {
      /* Skip the condition of an IF_THEN_ELSE.  We only want to
         change jump destinations, not eventual label comparisons.  */
      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
      return;
    }

Richard Kenner committed
1440 1441 1442 1443
  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1444
	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1445
      else if (fmt[i] == 'E')
Richard Kenner committed
1446
	{
1447
	  int j;
Richard Kenner committed
1448
	  for (j = 0; j < XVECLEN (x, i); j++)
1449
	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
Richard Kenner committed
1450 1451
	}
    }
1452
}
Richard Kenner committed
1453

1454 1455 1456 1457 1458
/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
   the modifications into the change group.  Return false if we did
   not see how to do that.  */

int
1459
redirect_jump_1 (rtx jump, rtx nlabel)
1460 1461
{
  int ochanges = num_validated_changes ();
1462
  rtx *loc, asmop;
Jan Hubicka committed
1463

1464
  gcc_assert (nlabel != NULL_RTX);
1465 1466 1467 1468 1469 1470 1471 1472 1473
  asmop = extract_asm_operands (PATTERN (jump));
  if (asmop)
    {
      if (nlabel == NULL)
	return 0;
      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
      loc = &ASM_OPERANDS_LABEL (asmop, 0);
    }
  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
Jan Hubicka committed
1474 1475 1476 1477 1478
    loc = &XVECEXP (PATTERN (jump), 0, 0);
  else
    loc = &PATTERN (jump);

  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1479 1480 1481 1482 1483 1484
  return num_validated_changes () > ochanges;
}

/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
   jump target label is unused as a result, it and the code following
   it may be deleted.
Richard Kenner committed
1485

1486 1487 1488
   Normally, NLABEL will be a label, but it may also be a RETURN rtx;
   in that case we are to turn the jump into a (possibly conditional)
   return insn.
Richard Kenner committed
1489

1490
   The return value will be 1 if the change was made, 0 if it wasn't
1491
   (this can only occur when trying to produce return insns).  */
Richard Kenner committed
1492 1493

int
1494
redirect_jump (rtx jump, rtx nlabel, int delete_unused)
Richard Kenner committed
1495
{
1496
  rtx olabel = JUMP_LABEL (jump);
Richard Kenner committed
1497

1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509
  if (!nlabel)
    {
      /* If there is no label, we are asked to redirect to the EXIT block.
	 When before the epilogue is emitted, return/simple_return cannot be
	 created so we return 0 immediately.  After the epilogue is emitted,
	 we always expect a label, either a non-null label, or a
	 return/simple_return RTX.  */

      if (!epilogue_completed)
	return 0;
      gcc_unreachable ();
    }
1510

Richard Kenner committed
1511 1512 1513
  if (nlabel == olabel)
    return 1;

1514
  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
Richard Kenner committed
1515 1516
    return 0;

1517 1518 1519 1520 1521
  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
  return 1;
}

/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
H.J. Lu committed
1522
   NLABEL in JUMP.
1523 1524 1525 1526 1527 1528 1529 1530
   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
   count has dropped to zero.  */
void
redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
		 int invert)
{
  rtx note;

1531 1532
  gcc_assert (JUMP_LABEL (jump) == olabel);

1533
  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1534 1535 1536
     moving FUNCTION_END note.  Just sanity check that no user still worry
     about this.  */
  gcc_assert (delete_unused >= 0);
Richard Kenner committed
1537
  JUMP_LABEL (jump) = nlabel;
1538
  if (!ANY_RETURN_P (nlabel))
Richard Kenner committed
1539 1540
    ++LABEL_NUSES (nlabel);

1541 1542 1543
  /* Update labels in any REG_EQUAL note.  */
  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
    {
1544 1545
      if (ANY_RETURN_P (nlabel)
	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1546 1547
	remove_note (jump, note);
      else
1548
	{
1549 1550
	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
	  confirm_change_group ();
1551 1552 1553
	}
    }

1554 1555
  if (!ANY_RETURN_P (olabel)
      && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1556 1557
      /* Undefined labels will remain outside the insn stream.  */
      && INSN_UID (olabel))
1558
    delete_related_insns (olabel);
1559 1560
  if (invert)
    invert_br_probabilities (jump);
Richard Kenner committed
1561 1562
}

1563 1564 1565 1566
/* Invert the jump condition X contained in jump insn INSN.  Accrue the
   modifications into the change group.  Return nonzero for success.  */
static int
invert_exp_1 (rtx x, rtx insn)
1567
{
1568
  RTX_CODE code = GET_CODE (x);
1569 1570 1571

  if (code == IF_THEN_ELSE)
    {
1572 1573
      rtx comp = XEXP (x, 0);
      rtx tem;
1574
      enum rtx_code reversed_code;
1575 1576 1577 1578 1579 1580

      /* We can do this in two ways:  The preferable way, which can only
	 be done if this is not an integer comparison, is to reverse
	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
	 of the IF_THEN_ELSE.  If we can't do either, fail.  */

1581 1582 1583
      reversed_code = reversed_comparison_code (comp, insn);

      if (reversed_code != UNKNOWN)
1584 1585
	{
	  validate_change (insn, &XEXP (x, 0),
1586
			   gen_rtx_fmt_ee (reversed_code,
1587 1588 1589
					   GET_MODE (comp), XEXP (comp, 0),
					   XEXP (comp, 1)),
			   1);
1590
	  return 1;
1591
	}
Kazu Hirata committed
1592

1593 1594 1595
      tem = XEXP (x, 1);
      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
      validate_change (insn, &XEXP (x, 2), tem, 1);
1596
      return 1;
1597
    }
Jan Hubicka committed
1598
  else
1599 1600 1601 1602 1603 1604 1605 1606 1607
    return 0;
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Accrue changes into the
   change group.  Return false if we didn't see how to perform the
   inversion and redirection.  */

int
1608
invert_jump_1 (rtx jump, rtx nlabel)
1609
{
1610
  rtx x = pc_set (jump);
1611
  int ochanges;
1612
  int ok;
1613 1614

  ochanges = num_validated_changes ();
1615 1616
  if (x == NULL)
    return 0;
1617 1618
  ok = invert_exp_1 (SET_SRC (x), jump);
  gcc_assert (ok);
H.J. Lu committed
1619

1620 1621 1622
  if (num_validated_changes () == ochanges)
    return 0;

1623 1624 1625
  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
     in Pmode, so checking this is not merely an optimization.  */
  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1626 1627 1628 1629 1630 1631
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Return true if successful.  */

int
1632
invert_jump (rtx jump, rtx nlabel, int delete_unused)
1633
{
1634
  rtx olabel = JUMP_LABEL (jump);
1635

1636
  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1637
    {
1638
      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1639 1640
      return 1;
    }
1641
  cancel_changes (0);
1642 1643 1644
  return 0;
}

Richard Kenner committed
1645 1646

/* Like rtx_equal_p except that it considers two REGs as equal
1647 1648
   if they renumber to the same value and considers two commutative
   operations to be the same if the order of the operands has been
1649
   reversed.  */
Richard Kenner committed
1650 1651

int
1652
rtx_renumbered_equal_p (const_rtx x, const_rtx y)
Richard Kenner committed
1653
{
1654
  int i;
1655
  const enum rtx_code code = GET_CODE (x);
1656
  const char *fmt;
Kazu Hirata committed
1657

Richard Kenner committed
1658 1659
  if (x == y)
    return 1;
1660

1661 1662 1663
  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
      && (REG_P (y) || (GET_CODE (y) == SUBREG
				  && REG_P (SUBREG_REG (y)))))
Richard Kenner committed
1664
    {
1665
      int reg_x = -1, reg_y = -1;
1666
      int byte_x = 0, byte_y = 0;
1667
      struct subreg_info info;
Richard Kenner committed
1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678

      if (GET_MODE (x) != GET_MODE (y))
	return 0;

      /* If we haven't done any renumbering, don't
	 make any assumptions.  */
      if (reg_renumber == 0)
	return rtx_equal_p (x, y);

      if (code == SUBREG)
	{
1679
	  reg_x = REGNO (SUBREG_REG (x));
1680
	  byte_x = SUBREG_BYTE (x);
1681 1682 1683

	  if (reg_renumber[reg_x] >= 0)
	    {
1684 1685 1686 1687
	      subreg_get_info (reg_renumber[reg_x],
			       GET_MODE (SUBREG_REG (x)), byte_x,
			       GET_MODE (x), &info);
	      if (!info.representable_p)
1688
		return 0;
1689
	      reg_x = info.offset;
1690
	      byte_x = 0;
1691
	    }
Richard Kenner committed
1692 1693 1694
	}
      else
	{
1695 1696 1697
	  reg_x = REGNO (x);
	  if (reg_renumber[reg_x] >= 0)
	    reg_x = reg_renumber[reg_x];
Richard Kenner committed
1698
	}
1699

Richard Kenner committed
1700 1701
      if (GET_CODE (y) == SUBREG)
	{
1702
	  reg_y = REGNO (SUBREG_REG (y));
1703
	  byte_y = SUBREG_BYTE (y);
1704 1705 1706

	  if (reg_renumber[reg_y] >= 0)
	    {
1707 1708 1709 1710
	      subreg_get_info (reg_renumber[reg_y],
			       GET_MODE (SUBREG_REG (y)), byte_y,
			       GET_MODE (y), &info);
	      if (!info.representable_p)
1711
		return 0;
1712
	      reg_y = info.offset;
1713
	      byte_y = 0;
1714
	    }
Richard Kenner committed
1715 1716 1717
	}
      else
	{
1718 1719 1720
	  reg_y = REGNO (y);
	  if (reg_renumber[reg_y] >= 0)
	    reg_y = reg_renumber[reg_y];
Richard Kenner committed
1721
	}
1722

1723
      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
Richard Kenner committed
1724
    }
1725

Kazu Hirata committed
1726
  /* Now we have disposed of all the cases
Richard Kenner committed
1727 1728 1729
     in which different rtx codes can match.  */
  if (code != GET_CODE (y))
    return 0;
1730

Richard Kenner committed
1731 1732 1733 1734 1735 1736 1737
  switch (code)
    {
    case PC:
    case CC0:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
    case CONST_INT:
1738
    case CONST_DOUBLE:
1739
      return 0;
Richard Kenner committed
1740 1741

    case LABEL_REF:
Richard Stallman committed
1742 1743 1744
      /* We can't assume nonlocal labels have their following insns yet.  */
      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
	return XEXP (x, 0) == XEXP (y, 0);
1745

Richard Kenner committed
1746 1747 1748 1749 1750 1751 1752
      /* Two label-refs are equivalent if they point at labels
	 in the same position in the instruction stream.  */
      return (next_real_insn (XEXP (x, 0))
	      == next_real_insn (XEXP (y, 0)));

    case SYMBOL_REF:
      return XSTR (x, 0) == XSTR (y, 0);
1753

1754 1755 1756 1757
    case CODE_LABEL:
      /* If we didn't match EQ equality above, they aren't the same.  */
      return 0;

1758 1759
    default:
      break;
Richard Kenner committed
1760 1761 1762 1763 1764 1765 1766
    }

  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */

  if (GET_MODE (x) != GET_MODE (y))
    return 0;

1767 1768 1769 1770
  /* MEMs refering to different address space are not equivalent.  */
  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
    return 0;

1771
  /* For commutative operations, the RTX match if the operand match in any
1772 1773
     order.  Also handle the simple binary and unary cases without a loop.  */
  if (targetm.commutative_p (x, UNKNOWN))
1774 1775 1776 1777
    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1778
  else if (NON_COMMUTATIVE_P (x))
1779 1780
    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1781
  else if (UNARY_P (x))
1782 1783
    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));

Richard Kenner committed
1784 1785 1786 1787 1788 1789
  /* Compare the elements.  If any pair of corresponding elements
     fail to match, return 0 for the whole things.  */

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
1790
      int j;
Richard Kenner committed
1791 1792
      switch (fmt[i])
	{
1793 1794 1795 1796 1797
	case 'w':
	  if (XWINT (x, i) != XWINT (y, i))
	    return 0;
	  break;

Richard Kenner committed
1798 1799
	case 'i':
	  if (XINT (x, i) != XINT (y, i))
1800 1801 1802 1803 1804 1805 1806
	    {
	      if (((code == ASM_OPERANDS && i == 6)
		   || (code == ASM_INPUT && i == 1))
		  && locator_eq (XINT (x, i), XINT (y, i)))
		break;
	      return 0;
	    }
Richard Kenner committed
1807 1808
	  break;

1809 1810 1811 1812 1813
	case 't':
	  if (XTREE (x, i) != XTREE (y, i))
	    return 0;
	  break;

Richard Kenner committed
1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826
	case 's':
	  if (strcmp (XSTR (x, i), XSTR (y, i)))
	    return 0;
	  break;

	case 'e':
	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
	    return 0;
	  break;

	case 'u':
	  if (XEXP (x, i) != XEXP (y, i))
	    return 0;
1827
	  /* Fall through.  */
Richard Kenner committed
1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839
	case '0':
	  break;

	case 'E':
	  if (XVECLEN (x, i) != XVECLEN (y, i))
	    return 0;
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
	      return 0;
	  break;

	default:
1840
	  gcc_unreachable ();
Richard Kenner committed
1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851
	}
    }
  return 1;
}

/* If X is a hard register or equivalent to one or a subregister of one,
   return the hard register number.  If X is a pseudo register that was not
   assigned a hard register, return the pseudo register number.  Otherwise,
   return -1.  Any rtx is valid for X.  */

int
1852
true_regnum (const_rtx x)
Richard Kenner committed
1853
{
1854
  if (REG_P (x))
Richard Kenner committed
1855 1856 1857 1858 1859 1860 1861 1862
    {
      if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
	return reg_renumber[REGNO (x)];
      return REGNO (x);
    }
  if (GET_CODE (x) == SUBREG)
    {
      int base = true_regnum (SUBREG_REG (x));
1863
      if (base >= 0
1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874
	  && base < FIRST_PSEUDO_REGISTER)
	{
	  struct subreg_info info;

	  subreg_get_info (REGNO (SUBREG_REG (x)),
			   GET_MODE (SUBREG_REG (x)),
			   SUBREG_BYTE (x), GET_MODE (x), &info);

	  if (info.representable_p)
	    return base + info.offset;
	}
Richard Kenner committed
1875 1876 1877
    }
  return -1;
}
1878 1879 1880

/* Return regno of the register REG and handle subregs too.  */
unsigned int
1881
reg_or_subregno (const_rtx reg)
1882 1883
{
  if (GET_CODE (reg) == SUBREG)
1884 1885 1886
    reg = SUBREG_REG (reg);
  gcc_assert (REG_P (reg));
  return REGNO (reg);
1887
}