jump.c 46 KB
Newer Older
Richard Kenner committed
1
/* Optimize jump instructions, for GNU compiler.
Jeff Law committed
2
   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
4
   Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
21

Jan Hubicka committed
22
/* This is the pathetic reminder of old fame of the jump-optimization pass
23
   of the compiler.  Now it contains basically a set of utility functions to
Jan Hubicka committed
24
   operate with jumps.
Richard Kenner committed
25 26 27 28 29 30 31 32 33

   Each CODE_LABEL has a count of the times it is used
   stored in the LABEL_NUSES internal field, and each JUMP_INSN
   has one label that it refers to stored in the
   JUMP_LABEL internal field.  With this we can detect labels that
   become unused because of the deletion of all the jumps that
   formerly used them.  The JUMP_LABEL info is sometimes looked
   at by later passes.

34
   The subroutines redirect_jump and invert_jump are used
Richard Kenner committed
35 36 37
   from other passes as well.  */

#include "config.h"
38
#include "system.h"
39 40
#include "coretypes.h"
#include "tm.h"
Richard Kenner committed
41
#include "rtl.h"
42
#include "tm_p.h"
Richard Kenner committed
43 44 45 46
#include "flags.h"
#include "hard-reg-set.h"
#include "regs.h"
#include "insn-config.h"
47
#include "insn-attr.h"
48
#include "recog.h"
49
#include "function.h"
50
#include "basic-block.h"
51
#include "expr.h"
Mike Stump committed
52
#include "except.h"
Joseph Myers committed
53
#include "diagnostic-core.h"
Graham Stott committed
54
#include "toplev.h"
55
#include "reload.h"
Jan Hubicka committed
56
#include "predict.h"
57
#include "timevar.h"
58
#include "tree-pass.h"
59
#include "target.h"
Richard Kenner committed
60 61 62 63 64 65 66 67

/* Optimize jump y; x: ... y: jumpif... x?
   Don't know if it is worth bothering with.  */
/* Optimize two cases of conditional jump to conditional jump?
   This can never delete any instruction or make anything dead,
   or even change what is live at any point.
   So perhaps let combiner do it.  */

68 69
static void init_label_info (rtx);
static void mark_all_labels (rtx);
70
static void mark_jump_label_1 (rtx, rtx, bool, bool);
71
static void mark_jump_label_asm (rtx, rtx);
72
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
73
static int invert_exp_1 (rtx, rtx);
74
static int returnjump_p_1 (rtx *, void *);
75

76 77 78 79
/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
   instructions and jumping insns that have labels as operands
   (e.g. cbranchsi4).  */
80
void
81
rebuild_jump_labels (rtx f)
82
{
83
  rtx insn;
Richard Kenner committed
84

85
  timevar_push (TV_REBUILD_JUMP);
86
  init_label_info (f);
87
  mark_all_labels (f);
Richard Kenner committed
88

89 90 91
  /* Keep track of labels used from static data; we don't track them
     closely enough to delete them here, so make sure their reference
     count doesn't drop to zero.  */
Richard Kenner committed
92 93

  for (insn = forced_labels; insn; insn = XEXP (insn, 1))
94
    if (LABEL_P (XEXP (insn, 0)))
95
      LABEL_NUSES (XEXP (insn, 0))++;
96
  timevar_pop (TV_REBUILD_JUMP);
Jan Hubicka committed
97 98
}

99 100 101 102 103 104 105 106
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
   non-fallthru insn.  This is not generally true, as multiple barriers
   may have crept in, or the BARRIER may be separated from the last
   real insn by one or more NOTEs.

   This simple pass moves barriers and removes duplicates so that the
   old code is happy.
 */
107
unsigned int
108
cleanup_barriers (void)
109 110 111 112 113
{
  rtx insn, next, prev;
  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);
114
      if (BARRIER_P (insn))
115 116
	{
	  prev = prev_nonnote_insn (insn);
117 118
	  if (!prev)
	    continue;
119
	  if (BARRIER_P (prev))
120
	    delete_insn (insn);
121 122 123 124
	  else if (prev != PREV_INSN (insn))
	    reorder_insns (insn, insn, prev);
	}
    }
125
  return 0;
126
}
Richard Kenner committed
127

128
struct rtl_opt_pass pass_cleanup_barriers =
129
{
130 131
 {
  RTL_PASS,
132
  "barriers",                           /* name */
133 134 135 136 137
  NULL,                                 /* gate */
  cleanup_barriers,                     /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
138
  TV_NONE,                              /* tv_id */
139 140 141 142
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
143 144
  TODO_dump_func                        /* todo_flags_finish */
 }
145 146
};

147

148 149 150 151
/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
   notes whose labels don't occur in the insn any more.  */

152
static void
153
init_label_info (rtx f)
154 155 156 157
{
  rtx insn;

  for (insn = f; insn; insn = NEXT_INSN (insn))
158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174
    {
      if (LABEL_P (insn))
	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);

      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
	 sticky and not reset here; that way we won't lose association
	 with a label when e.g. the source for a target register
	 disappears out of reach for targets that may use jump-target
	 registers.  Jump transformations are supposed to transform
	 any REG_LABEL_TARGET notes.  The target label reference in a
	 branch may disappear from the branch (and from the
	 instruction before it) for other reasons, like register
	 allocation.  */

      if (INSN_P (insn))
	{
	  rtx note, next;
175

176 177 178 179 180 181 182 183 184
	  for (note = REG_NOTES (insn); note; note = next)
	    {
	      next = XEXP (note, 1);
	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
		remove_note (insn, note);
	    }
	}
    }
185 186 187
}

/* Mark the label each jump jumps to.
Jan Hubicka committed
188
   Combine consecutive labels, and count uses of labels.  */
189 190

static void
191
mark_all_labels (rtx f)
192 193
{
  rtx insn;
194
  rtx prev_nonjump_insn = NULL;
195 196

  for (insn = f; insn; insn = NEXT_INSN (insn))
197
    if (INSN_P (insn))
198
      {
199
	mark_jump_label (PATTERN (insn), insn, 0);
200 201 202 203 204 205 206 207 208 209 210 211

	/* If the previous non-jump insn sets something to a label,
	   something that this jump insn uses, make that label the primary
	   target of this insn if we don't yet have any.  That previous
	   insn must be a single_set and not refer to more than one label.
	   The jump insn must not refer to other labels as jump targets
	   and must be a plain (set (pc) ...), maybe in a parallel, and
	   may refer to the item being set only directly or as one of the
	   arms in an IF_THEN_ELSE.  */
	if (! INSN_DELETED_P (insn)
	    && JUMP_P (insn)
	    && JUMP_LABEL (insn) == NULL)
212
	  {
213 214 215 216 217 218 219 220 221
	    rtx label_note = NULL;
	    rtx pc = pc_set (insn);
	    rtx pc_src = pc != NULL ? SET_SRC (pc) : NULL;

	    if (prev_nonjump_insn != NULL)
	      label_note
		= find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);

	    if (label_note != NULL && pc_src != NULL)
222
	      {
223 224 225 226 227 228 229 230 231 232 233 234 235
		rtx label_set = single_set (prev_nonjump_insn);
		rtx label_dest
		  = label_set != NULL ? SET_DEST (label_set) : NULL;

		if (label_set != NULL
		    /* The source must be the direct LABEL_REF, not a
		       PLUS, UNSPEC, IF_THEN_ELSE etc.  */
		    && GET_CODE (SET_SRC (label_set)) == LABEL_REF
		    && (rtx_equal_p (label_dest, pc_src)
			|| (GET_CODE (pc_src) == IF_THEN_ELSE
			    && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
				|| rtx_equal_p (label_dest,
						XEXP (pc_src, 2))))))
H.J. Lu committed
236

237
		  {
238 239 240 241 242 243 244 245 246 247
		    /* The CODE_LABEL referred to in the note must be the
		       CODE_LABEL in the LABEL_REF of the "set".  We can
		       conveniently use it for the marker function, which
		       requires a LABEL_REF wrapping.  */
		    gcc_assert (XEXP (label_note, 0)
				== XEXP (SET_SRC (label_set), 0));

		    mark_jump_label_1 (label_set, insn, false, true);
		    gcc_assert (JUMP_LABEL (insn)
				== XEXP (SET_SRC (label_set), 0));
248 249
		  }
	      }
250
	  }
251 252
	else if (! INSN_DELETED_P (insn))
	  prev_nonjump_insn = insn;
253
      }
254 255 256
    else if (LABEL_P (insn))
      prev_nonjump_insn = NULL;

257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280
  /* If we are in cfglayout mode, there may be non-insns between the
     basic blocks.  If those non-insns represent tablejump data, they
     contain label references that we must record.  */
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    {
      basic_block bb;
      rtx insn;
      FOR_EACH_BB (bb)
	{
	  for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }

	  for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	}
    }
281
}
Richard Kenner committed
282

283
/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
284 285 286 287 288 289
   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
   know whether it's source is floating point or integer comparison.  Machine
   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
   to help this function avoid overhead in these cases.  */
enum rtx_code
290 291
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
				const_rtx arg1, const_rtx insn)
Richard Kenner committed
292
{
293
  enum machine_mode mode;
Richard Kenner committed
294 295

  /* If this is not actually a comparison, we can't reverse it.  */
296 297
  if (GET_RTX_CLASS (code) != RTX_COMPARE
      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
298 299 300 301 302 303
    return UNKNOWN;

  mode = GET_MODE (arg0);
  if (mode == VOIDmode)
    mode = GET_MODE (arg1);

304 305 306
  /* First see if machine description supplies us way to reverse the
     comparison.  Give it priority over everything else to allow
     machine description to do tricks.  */
307
  if (GET_MODE_CLASS (mode) == MODE_CC
308 309 310
      && REVERSIBLE_CC_MODE (mode))
    {
#ifdef REVERSE_CONDITION
Kazu Hirata committed
311
      return REVERSE_CONDITION (code, mode);
312
#endif
Kazu Hirata committed
313 314
      return reverse_condition (code);
    }
Richard Kenner committed
315

316
  /* Try a few special cases based on the comparison code.  */
317 318
  switch (code)
    {
Kazu Hirata committed
319 320 321 322 323 324 325
    case GEU:
    case GTU:
    case LEU:
    case LTU:
    case NE:
    case EQ:
      /* It is always safe to reverse EQ and NE, even for the floating
326
	 point.  Similarly the unsigned comparisons are never used for
Kazu Hirata committed
327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343
	 floating point so we can reverse them in the default way.  */
      return reverse_condition (code);
    case ORDERED:
    case UNORDERED:
    case LTGT:
    case UNEQ:
      /* In case we already see unordered comparison, we can be sure to
	 be dealing with floating point so we don't need any more tests.  */
      return reverse_condition_maybe_unordered (code);
    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
      /* We don't have safe way to reverse these yet.  */
      return UNKNOWN;
    default:
      break;
344 345
    }

346
  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
Richard Kenner committed
347
    {
348
      const_rtx prev;
349 350 351 352
      /* Try to search for the comparison to determine the real mode.
         This code is expensive, but with sane machine description it
         will be never used, since REVERSIBLE_CC_MODE will return true
         in all cases.  */
353
      if (! insn)
354
	return UNKNOWN;
Kazu Hirata committed
355

356
      /* These CONST_CAST's are okay because prev_nonnote_insn just
357
	 returns its argument and we assign it to a const_rtx
358
	 variable.  */
359
      for (prev = prev_nonnote_insn (CONST_CAST_RTX(insn));
360
	   prev != 0 && !LABEL_P (prev);
361
	   prev = prev_nonnote_insn (CONST_CAST_RTX(prev)))
362
	{
363
	  const_rtx set = set_of (arg0, prev);
364 365 366 367
	  if (set && GET_CODE (set) == SET
	      && rtx_equal_p (SET_DEST (set), arg0))
	    {
	      rtx src = SET_SRC (set);
Richard Kenner committed
368

369 370 371 372 373 374 375 376 377
	      if (GET_CODE (src) == COMPARE)
		{
		  rtx comparison = src;
		  arg0 = XEXP (src, 0);
		  mode = GET_MODE (arg0);
		  if (mode == VOIDmode)
		    mode = GET_MODE (XEXP (comparison, 1));
		  break;
		}
378
	      /* We can get past reg-reg moves.  This may be useful for model
379 380 381 382 383 384 385 386 387 388 389 390
	         of i387 comparisons that first move flag registers around.  */
	      if (REG_P (src))
		{
		  arg0 = src;
		  continue;
		}
	    }
	  /* If register is clobbered in some ununderstandable way,
	     give up.  */
	  if (set)
	    return UNKNOWN;
	}
Richard Kenner committed
391 392
    }

393 394
  /* Test for an integer condition, or a floating-point comparison
     in which NaNs can be ignored.  */
Shujing Zhao committed
395
  if (CONST_INT_P (arg0)
396 397
      || (GET_MODE (arg0) != VOIDmode
	  && GET_MODE_CLASS (mode) != MODE_CC
398
	  && !HONOR_NANS (mode)))
399 400 401 402 403
    return reverse_condition (code);

  return UNKNOWN;
}

Kazu Hirata committed
404
/* A wrapper around the previous function to take COMPARISON as rtx
405 406
   expression.  This simplifies many callers.  */
enum rtx_code
407
reversed_comparison_code (const_rtx comparison, const_rtx insn)
408
{
409
  if (!COMPARISON_P (comparison))
410 411 412 413 414
    return UNKNOWN;
  return reversed_comparison_code_parts (GET_CODE (comparison),
					 XEXP (comparison, 0),
					 XEXP (comparison, 1), insn);
}
415 416 417 418

/* Return comparison with reversed code of EXP.
   Return NULL_RTX in case we fail to do the reversal.  */
rtx
419
reversed_comparison (const_rtx exp, enum machine_mode mode)
420 421 422 423 424 425 426 427 428
{
  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
  if (reversed_code == UNKNOWN)
    return NULL_RTX;
  else
    return simplify_gen_relational (reversed_code, mode, VOIDmode,
                                    XEXP (exp, 0), XEXP (exp, 1));
}

429

430 431 432 433 434
/* Given an rtx-code for a comparison, return the code for the negated
   comparison.  If no such code exists, return UNKNOWN.

   WATCH OUT!  reverse_condition is not safe to use on a jump that might
   be acting on the results of an IEEE floating point comparison, because
Kazu Hirata committed
435
   of the special treatment of non-signaling nans in comparisons.
436
   Use reversed_comparison_code instead.  */
Richard Kenner committed
437 438

enum rtx_code
439
reverse_condition (enum rtx_code code)
Richard Kenner committed
440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return LE;
    case GE:
      return LT;
    case LT:
      return GE;
    case LE:
      return GT;
    case GTU:
      return LEU;
    case GEU:
      return LTU;
    case LTU:
      return GEU;
    case LEU:
      return GTU;
463 464 465 466 467 468 469 470 471 472
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;

    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
    case UNEQ:
473
    case LTGT:
474
      return UNKNOWN;
Richard Kenner committed
475 476

    default:
477
      gcc_unreachable ();
Richard Kenner committed
478 479 480
    }
}

481 482 483 484 485
/* Similar, but we're allowed to generate unordered comparisons, which
   makes it safe for IEEE floating-point.  Of course, we have to recognize
   that the target will support them too...  */

enum rtx_code
486
reverse_condition_maybe_unordered (enum rtx_code code)
487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return UNLE;
    case GE:
      return UNLT;
    case LT:
      return UNGE;
    case LE:
      return UNGT;
    case LTGT:
      return UNEQ;
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;
    case UNLT:
      return GE;
    case UNLE:
      return GT;
    case UNGT:
      return LE;
    case UNGE:
      return LT;
    case UNEQ:
      return LTGT;

    default:
520
      gcc_unreachable ();
521 522 523
    }
}

Richard Kenner committed
524 525 526 527
/* Similar, but return the code when two operands of a comparison are swapped.
   This IS safe for IEEE floating-point.  */

enum rtx_code
528
swap_condition (enum rtx_code code)
Richard Kenner committed
529 530 531 532 533
{
  switch (code)
    {
    case EQ:
    case NE:
534 535 536
    case UNORDERED:
    case ORDERED:
    case UNEQ:
537
    case LTGT:
Richard Kenner committed
538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555
      return code;

    case GT:
      return LT;
    case GE:
      return LE;
    case LT:
      return GT;
    case LE:
      return GE;
    case GTU:
      return LTU;
    case GEU:
      return LEU;
    case LTU:
      return GTU;
    case LEU:
      return GEU;
556 557 558 559 560 561 562 563 564
    case UNLT:
      return UNGT;
    case UNLE:
      return UNGE;
    case UNGT:
      return UNLT;
    case UNGE:
      return UNLE;

Richard Kenner committed
565
    default:
566
      gcc_unreachable ();
Richard Kenner committed
567 568 569 570 571 572 573 574
    }
}

/* Given a comparison CODE, return the corresponding unsigned comparison.
   If CODE is an equality comparison or already an unsigned comparison,
   CODE is returned.  */

enum rtx_code
575
unsigned_condition (enum rtx_code code)
Richard Kenner committed
576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596
{
  switch (code)
    {
    case EQ:
    case NE:
    case GTU:
    case GEU:
    case LTU:
    case LEU:
      return code;

    case GT:
      return GTU;
    case GE:
      return GEU;
    case LT:
      return LTU;
    case LE:
      return LEU;

    default:
597
      gcc_unreachable ();
Richard Kenner committed
598 599 600 601 602 603
    }
}

/* Similarly, return the signed version of a comparison.  */

enum rtx_code
604
signed_condition (enum rtx_code code)
Richard Kenner committed
605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625
{
  switch (code)
    {
    case EQ:
    case NE:
    case GT:
    case GE:
    case LT:
    case LE:
      return code;

    case GTU:
      return GT;
    case GEU:
      return GE;
    case LTU:
      return LT;
    case LEU:
      return LE;

    default:
626
      gcc_unreachable ();
Richard Kenner committed
627 628 629
    }
}

630
/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
Richard Kenner committed
631 632 633
   truth of CODE1 implies the truth of CODE2.  */

int
634
comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
Richard Kenner committed
635
{
636 637 638 639 640 641
  /* UNKNOWN comparison codes can happen as a result of trying to revert
     comparison codes.
     They can't match anything, so we have to reject them here.  */
  if (code1 == UNKNOWN || code2 == UNKNOWN)
    return 0;

Richard Kenner committed
642 643 644 645 646
  if (code1 == code2)
    return 1;

  switch (code1)
    {
647 648 649 650 651
    case UNEQ:
      if (code2 == UNLE || code2 == UNGE)
	return 1;
      break;

Richard Kenner committed
652
    case EQ:
653 654
      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
	  || code2 == ORDERED)
Richard Kenner committed
655 656 657
	return 1;
      break;

658 659 660 661 662
    case UNLT:
      if (code2 == UNLE || code2 == NE)
	return 1;
      break;

Richard Kenner committed
663
    case LT:
664 665 666 667 668 669
      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
	return 1;
      break;

    case UNGT:
      if (code2 == UNGE || code2 == NE)
Richard Kenner committed
670 671 672 673
	return 1;
      break;

    case GT:
674
      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
675 676 677 678 679 680 681 682 683 684 685
	return 1;
      break;

    case GE:
    case LE:
      if (code2 == ORDERED)
	return 1;
      break;

    case LTGT:
      if (code2 == NE || code2 == ORDERED)
Richard Kenner committed
686 687 688 689
	return 1;
      break;

    case LTU:
690
      if (code2 == LEU || code2 == NE)
Richard Kenner committed
691 692 693 694
	return 1;
      break;

    case GTU:
695
      if (code2 == GEU || code2 == NE)
Richard Kenner committed
696 697
	return 1;
      break;
698 699

    case UNORDERED:
700 701
      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
	  || code2 == UNGE || code2 == UNGT)
702 703
	return 1;
      break;
Kazu Hirata committed
704

705 706
    default:
      break;
Richard Kenner committed
707 708 709 710 711 712 713 714
    }

  return 0;
}

/* Return 1 if INSN is an unconditional jump and nothing else.  */

int
715
simplejump_p (const_rtx insn)
Richard Kenner committed
716
{
717
  return (JUMP_P (insn)
718 719 720
	  && GET_CODE (PATTERN (insn)) == SET
	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
Richard Kenner committed
721 722 723
}

/* Return nonzero if INSN is a (possibly) conditional jump
Kazu Hirata committed
724 725
   and nothing more.

726
   Use of this function is deprecated, since we need to support combined
727
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
Richard Kenner committed
728 729

int
730
condjump_p (const_rtx insn)
Richard Kenner committed
731
{
732
  const_rtx x = PATTERN (insn);
Jeff Law committed
733 734 735

  if (GET_CODE (x) != SET
      || GET_CODE (SET_DEST (x)) != PC)
Richard Kenner committed
736
    return 0;
Jeff Law committed
737 738 739

  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
740
    return 1;
Kazu Hirata committed
741 742 743 744 745 746 747 748
  else
    return (GET_CODE (x) == IF_THEN_ELSE
	    && ((GET_CODE (XEXP (x, 2)) == PC
		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
		     || GET_CODE (XEXP (x, 1)) == RETURN))
		|| (GET_CODE (XEXP (x, 1)) == PC
		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
			|| GET_CODE (XEXP (x, 2)) == RETURN))));
749 750
}

Jeff Law committed
751
/* Return nonzero if INSN is a (possibly) conditional jump inside a
752
   PARALLEL.
Kazu Hirata committed
753

754 755
   Use this function is deprecated, since we need to support combined
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
756 757

int
758
condjump_in_parallel_p (const_rtx insn)
759
{
760
  const_rtx x = PATTERN (insn);
761 762 763 764 765 766 767 768 769 770 771 772 773 774 775

  if (GET_CODE (x) != PARALLEL)
    return 0;
  else
    x = XVECEXP (x, 0, 0);

  if (GET_CODE (x) != SET)
    return 0;
  if (GET_CODE (SET_DEST (x)) != PC)
    return 0;
  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
    return 1;
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
  if (XEXP (SET_SRC (x), 2) == pc_rtx
Richard Kenner committed
776 777 778 779 780 781 782 783 784 785
      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
	  || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
    return 1;
  if (XEXP (SET_SRC (x), 1) == pc_rtx
      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
	  || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
    return 1;
  return 0;
}

786 787
/* Return set of PC, otherwise NULL.  */

788
rtx
789
pc_set (const_rtx insn)
790 791
{
  rtx pat;
792
  if (!JUMP_P (insn))
793
    return NULL_RTX;
794
  pat = PATTERN (insn);
795 796 797 798 799

  /* The set is allowed to appear either as the insn pattern or
     the first set in a PARALLEL.  */
  if (GET_CODE (pat) == PARALLEL)
    pat = XVECEXP (pat, 0, 0);
800 801
  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
    return pat;
802 803

  return NULL_RTX;
804 805
}

806 807 808
/* Return true when insn is an unconditional direct jump,
   possibly bundled inside a PARALLEL.  */

809
int
810
any_uncondjump_p (const_rtx insn)
811
{
812
  const_rtx x = pc_set (insn);
813 814 815 816
  if (!x)
    return 0;
  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
    return 0;
817 818
  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
    return 0;
819 820 821
  return 1;
}

822
/* Return true when insn is a conditional jump.  This function works for
823 824
   instructions containing PC sets in PARALLELs.  The instruction may have
   various other effects so before removing the jump you must verify
825
   onlyjump_p.
826

827 828
   Note that unlike condjump_p it returns false for unconditional jumps.  */

829
int
830
any_condjump_p (const_rtx insn)
831
{
832
  const_rtx x = pc_set (insn);
833 834
  enum rtx_code a, b;

835 836
  if (!x)
    return 0;
837 838
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
839

840 841
  a = GET_CODE (XEXP (SET_SRC (x), 1));
  b = GET_CODE (XEXP (SET_SRC (x), 2));
842

843
  return ((b == PC && (a == LABEL_REF || a == RETURN))
Kazu Hirata committed
844
	  || (a == PC && (b == LABEL_REF || b == RETURN)));
845 846
}

847 848 849
/* Return the label of a conditional jump.  */

rtx
850
condjump_label (const_rtx insn)
851
{
852
  rtx x = pc_set (insn);
853

854
  if (!x)
855 856 857 858 859 860 861 862 863 864 865 866 867
    return NULL_RTX;
  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
    return x;
  if (GET_CODE (x) != IF_THEN_ELSE)
    return NULL_RTX;
  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
    return XEXP (x, 1);
  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
    return XEXP (x, 2);
  return NULL_RTX;
}

868 869 870
/* Return true if INSN is a (possibly conditional) return insn.  */

static int
871
returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
872 873
{
  rtx x = *loc;
874

875 876 877 878 879 880 881 882 883 884 885 886 887 888 889
  if (x == NULL)
    return false;

  switch (GET_CODE (x))
    {
    case RETURN:
    case EH_RETURN:
      return true;

    case SET:
      return SET_IS_RETURN_P (x);

    default:
      return false;
    }
890 891
}

892 893
/* Return TRUE if INSN is a return jump.  */

894
int
895
returnjump_p (rtx insn)
896
{
897
  if (!JUMP_P (insn))
898
    return 0;
899 900 901
  return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
}

902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917
/* Return true if INSN is a (possibly conditional) return insn.  */

static int
eh_returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
{
  return *loc && GET_CODE (*loc) == EH_RETURN;
}

int
eh_returnjump_p (rtx insn)
{
  if (!JUMP_P (insn))
    return 0;
  return for_each_rtx (&PATTERN (insn), eh_returnjump_p_1, NULL);
}

918 919 920 921
/* Return true if INSN is a jump that only transfers control and
   nothing more.  */

int
922
onlyjump_p (const_rtx insn)
923 924 925
{
  rtx set;

926
  if (!JUMP_P (insn))
927 928 929 930 931 932 933 934 935 936 937 938 939
    return 0;

  set = single_set (insn);
  if (set == NULL)
    return 0;
  if (GET_CODE (SET_DEST (set)) != PC)
    return 0;
  if (side_effects_p (SET_SRC (set)))
    return 0;

  return 1;
}

940 941
#ifdef HAVE_cc0

942
/* Return nonzero if X is an RTX that only sets the condition codes
943 944 945
   and has no side effects.  */

int
946
only_sets_cc0_p (const_rtx x)
947 948 949 950 951 952 953 954 955 956
{
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
}

Richard Kenner committed
957 958 959 960 961 962
/* Return 1 if X is an RTX that does nothing but set the condition codes
   and CLOBBER or USE registers.
   Return -1 if X does explicitly set the condition codes,
   but also does other things.  */

int
963
sets_cc0_p (const_rtx x)
Richard Kenner committed
964
{
965 966 967 968 969 970
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

Richard Kenner committed
971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989
  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
    return 1;
  if (GET_CODE (x) == PARALLEL)
    {
      int i;
      int sets_cc0 = 0;
      int other_things = 0;
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
	{
	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
	    sets_cc0 = 1;
	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
	    other_things = 1;
	}
      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
    }
  return 0;
}
990
#endif
Richard Kenner committed
991

992 993 994 995 996 997 998 999
/* Find all CODE_LABELs referred to in X, and increment their use
   counts.  If INSN is a JUMP_INSN and there is at least one
   CODE_LABEL referenced in INSN as a jump target, then store the last
   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
Richard Kenner committed
1000 1001 1002 1003 1004

   Note that two labels separated by a loop-beginning note
   must be kept distinct if we have not yet done loop-optimization,
   because the gap between them is where loop-optimize
   will want to move invariant code to.  CROSS_JUMP tells us
1005
   that loop-optimization is done with.  */
Richard Kenner committed
1006

1007
void
1008
mark_jump_label (rtx x, rtx insn, int in_mem)
Richard Kenner committed
1009
{
1010 1011 1012 1013 1014 1015
  rtx asmop = extract_asm_operands (x);
  if (asmop)
    mark_jump_label_asm (asmop, insn);
  else
    mark_jump_label_1 (x, insn, in_mem != 0,
		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1016 1017
}

1018
/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1019 1020 1021 1022 1023 1024 1025 1026
   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
   jump-target; when the JUMP_LABEL field of INSN should be set or a
   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
   note.  */

static void
mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target)
{
1027 1028 1029
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041

  switch (code)
    {
    case PC:
    case CC0:
    case REG:
    case CONST_INT:
    case CONST_DOUBLE:
    case CLOBBER:
    case CALL:
      return;

1042
    case MEM:
1043
      in_mem = true;
1044 1045
      break;

1046 1047 1048 1049 1050 1051
    case SEQUENCE:
      for (i = 0; i < XVECLEN (x, 0); i++)
	mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
			 XVECEXP (x, 0, i), 0);
      return;

1052 1053
    case SYMBOL_REF:
      if (!in_mem)
Kazu Hirata committed
1054
	return;
1055

1056
      /* If this is a constant-pool reference, see if it is a label.  */
1057
      if (CONSTANT_POOL_ADDRESS_P (x))
1058
	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1059 1060
      break;

1061 1062 1063 1064 1065 1066 1067 1068 1069 1070
      /* Handle operands in the condition of an if-then-else as for a
	 non-jump insn.  */
    case IF_THEN_ELSE:
      if (!is_target)
	break;
      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
      return;

Richard Kenner committed
1071 1072
    case LABEL_REF:
      {
1073 1074
	rtx label = XEXP (x, 0);

1075 1076
	/* Ignore remaining references to unreachable labels that
	   have been deleted.  */
1077
	if (NOTE_P (label)
1078
	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1079 1080
	  break;

1081
	gcc_assert (LABEL_P (label));
1082

Richard Stallman committed
1083 1084 1085
	/* Ignore references to labels of containing functions.  */
	if (LABEL_REF_NONLOCAL_P (x))
	  break;
1086

Richard Kenner committed
1087
	XEXP (x, 0) = label;
1088 1089
	if (! insn || ! INSN_DELETED_P (insn))
	  ++LABEL_NUSES (label);
1090

Richard Kenner committed
1091 1092
	if (insn)
	  {
1093
	    if (is_target
1094 1095 1096
		/* Do not change a previous setting of JUMP_LABEL.  If the
		   JUMP_LABEL slot is occupied by a different label,
		   create a note for this label.  */
1097
		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
Richard Kenner committed
1098
	      JUMP_LABEL (insn) = label;
1099
	    else
1100
	      {
1101 1102 1103 1104 1105 1106 1107 1108
		enum reg_note kind
		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;

		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
		   for LABEL unless there already is one.  All uses of
		   a label, except for the primary target of a jump,
		   must have such a note.  */
		if (! find_reg_note (insn, kind, label))
1109
		  add_reg_note (insn, kind, label);
Richard Kenner committed
1110 1111 1112 1113 1114 1115 1116 1117 1118
	      }
	  }
	return;
      }

  /* Do walk the labels in a vector, but not the first operand of an
     ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
1119 1120 1121
      if (! INSN_DELETED_P (insn))
	{
	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
Richard Kenner committed
1122

1123
	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1124 1125
	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem,
			       is_target);
1126
	}
1127
      return;
Kazu Hirata committed
1128

1129 1130
    default:
      break;
Richard Kenner committed
1131 1132 1133
    }

  fmt = GET_RTX_FORMAT (code);
1134 1135 1136 1137

  /* The primary target of a tablejump is the label of the ADDR_VEC,
     which is canonically mentioned *last* in the insn.  To get it
     marked as JUMP_LABEL, we iterate over items in reverse order.  */
Richard Kenner committed
1138 1139 1140
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1141
	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
Richard Kenner committed
1142 1143
      else if (fmt[i] == 'E')
	{
1144
	  int j;
1145 1146 1147 1148

	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
			       is_target);
Richard Kenner committed
1149 1150 1151 1152
	}
    }
}

1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168
/* Worker function for mark_jump_label.  Handle asm insns specially.
   In particular, output operands need not be considered so we can
   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
   need to be considered targets.  */

static void
mark_jump_label_asm (rtx asmop, rtx insn)
{
  int i;

  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);

  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
}
Richard Kenner committed
1169

1170
/* Delete insn INSN from the chain of insns and update label ref counts
1171
   and delete insns now unreachable.
1172

1173
   Returns the first insn after INSN that was not deleted.
Richard Kenner committed
1174

1175 1176
   Usage of this instruction is deprecated.  Use delete_insn instead and
   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
Richard Kenner committed
1177 1178

rtx
1179
delete_related_insns (rtx insn)
Richard Kenner committed
1180
{
1181
  int was_code_label = (LABEL_P (insn));
1182
  rtx note;
1183
  rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
Richard Kenner committed
1184 1185 1186 1187 1188 1189 1190 1191

  while (next && INSN_DELETED_P (next))
    next = NEXT_INSN (next);

  /* This insn is already deleted => return first following nondeleted.  */
  if (INSN_DELETED_P (insn))
    return next;

1192
  delete_insn (insn);
Richard Kenner committed
1193 1194 1195 1196

  /* If instruction is followed by a barrier,
     delete the barrier too.  */

1197
  if (next != 0 && BARRIER_P (next))
1198
    delete_insn (next);
Richard Kenner committed
1199 1200 1201 1202

  /* If deleting a jump, decrement the count of the label,
     and delete the label if it is now unused.  */

1203
  if (JUMP_P (insn) && JUMP_LABEL (insn))
1204 1205 1206
    {
      rtx lab = JUMP_LABEL (insn), lab_next;

1207
      if (LABEL_NUSES (lab) == 0)
1208 1209 1210 1211
	/* This can delete NEXT or PREV,
	   either directly if NEXT is JUMP_LABEL (INSN),
	   or indirectly through more levels of jumps.  */
	delete_related_insns (lab);
1212
      else if (tablejump_p (insn, NULL, &lab_next))
1213 1214
	{
	  /* If we're deleting the tablejump, delete the dispatch table.
1215
	     We may not be able to kill the label immediately preceding
1216 1217
	     just yet, as it might be referenced in code leading up to
	     the tablejump.  */
1218
	  delete_related_insns (lab_next);
1219 1220
	}
    }
Richard Kenner committed
1221

1222 1223
  /* Likewise if we're deleting a dispatch table.  */

Shujing Zhao committed
1224
  if (JUMP_TABLE_DATA_P (insn))
1225 1226 1227 1228 1229 1230
    {
      rtx pat = PATTERN (insn);
      int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
      int len = XVECLEN (pat, diff_vec_p);

      for (i = 0; i < len; i++)
1231 1232
	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1233 1234 1235 1236 1237
      while (next && INSN_DELETED_P (next))
	next = NEXT_INSN (next);
      return next;
    }

1238 1239 1240
  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
  if (INSN_P (insn))
1241
    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1242 1243
      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1244
	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1245
	  && LABEL_P (XEXP (note, 0)))
1246 1247
	if (LABEL_NUSES (XEXP (note, 0)) == 0)
	  delete_related_insns (XEXP (note, 0));
1248

1249
  while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
Richard Kenner committed
1250 1251 1252 1253 1254 1255
    prev = PREV_INSN (prev);

  /* If INSN was a label and a dispatch table follows it,
     delete the dispatch table.  The tablejump must have gone already.
     It isn't useful to fall through into a table.  */

1256
  if (was_code_label
Richard Kenner committed
1257
      && NEXT_INSN (insn) != 0
Shujing Zhao committed
1258
      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1259
    next = delete_related_insns (NEXT_INSN (insn));
Richard Kenner committed
1260 1261 1262

  /* If INSN was a label, delete insns following it if now unreachable.  */

1263
  if (was_code_label && prev && BARRIER_P (prev))
Richard Kenner committed
1264
    {
1265 1266
      enum rtx_code code;
      while (next)
Richard Kenner committed
1267
	{
1268
	  code = GET_CODE (next);
1269
	  if (code == NOTE)
Richard Kenner committed
1270
	    next = NEXT_INSN (next);
1271 1272 1273
	  /* Keep going past other deleted labels to delete what follows.  */
	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
	    next = NEXT_INSN (next);
1274
	  else if (code == BARRIER || INSN_P (next))
Richard Kenner committed
1275 1276 1277 1278
	    /* Note: if this deletes a jump, it can cause more
	       deletion of unreachable code, after a different label.
	       As long as the value from this recursive call is correct,
	       this invocation functions correctly.  */
1279
	    next = delete_related_insns (next);
1280 1281
	  else
	    break;
Richard Kenner committed
1282 1283 1284
	}
    }

1285 1286 1287 1288 1289 1290
  /* I feel a little doubtful about this loop,
     but I see no clean and sure alternative way
     to find the first insn after INSN that is not now deleted.
     I hope this works.  */
  while (next && INSN_DELETED_P (next))
    next = NEXT_INSN (next);
Richard Kenner committed
1291 1292 1293 1294 1295 1296 1297 1298 1299
  return next;
}

/* Delete a range of insns from FROM to TO, inclusive.
   This is for the sake of peephole optimization, so assume
   that whatever these insns do will still be done by a new
   peephole insn that will replace them.  */

void
1300
delete_for_peephole (rtx from, rtx to)
Richard Kenner committed
1301
{
1302
  rtx insn = from;
Richard Kenner committed
1303 1304 1305

  while (1)
    {
1306 1307
      rtx next = NEXT_INSN (insn);
      rtx prev = PREV_INSN (insn);
Richard Kenner committed
1308

1309
      if (!NOTE_P (insn))
Richard Kenner committed
1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333
	{
	  INSN_DELETED_P (insn) = 1;

	  /* Patch this insn out of the chain.  */
	  /* We don't do this all at once, because we
	     must preserve all NOTEs.  */
	  if (prev)
	    NEXT_INSN (prev) = next;

	  if (next)
	    PREV_INSN (next) = prev;
	}

      if (insn == to)
	break;
      insn = next;
    }

  /* Note that if TO is an unconditional jump
     we *do not* delete the BARRIER that follows,
     since the peephole that replaces this sequence
     is also an unconditional jump in that case.  */
}

1334 1335
/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
   NLABEL as a return.  Accrue modifications into the change group.  */
Richard Kenner committed
1336

1337
static void
1338
redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
Richard Kenner committed
1339
{
1340 1341 1342 1343
  rtx x = *loc;
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1344

1345
  if (code == LABEL_REF)
Richard Kenner committed
1346
    {
1347 1348 1349 1350
      if (XEXP (x, 0) == olabel)
	{
	  rtx n;
	  if (nlabel)
1351
	    n = gen_rtx_LABEL_REF (Pmode, nlabel);
1352
	  else
Kazu Hirata committed
1353
	    n = gen_rtx_RETURN (VOIDmode);
Richard Kenner committed
1354

1355 1356 1357 1358 1359 1360
	  validate_change (insn, loc, n, 1);
	  return;
	}
    }
  else if (code == RETURN && olabel == 0)
    {
1361
      if (nlabel)
1362
	x = gen_rtx_LABEL_REF (Pmode, nlabel);
1363 1364
      else
	x = gen_rtx_RETURN (VOIDmode);
1365 1366 1367 1368 1369
      if (loc == &PATTERN (insn))
	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
      validate_change (insn, loc, x, 1);
      return;
    }
Richard Kenner committed
1370

1371 1372 1373 1374 1375 1376
  if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
      && GET_CODE (SET_SRC (x)) == LABEL_REF
      && XEXP (SET_SRC (x), 0) == olabel)
    {
      validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
      return;
Richard Kenner committed
1377 1378
    }

1379 1380 1381 1382 1383 1384 1385 1386 1387
  if (code == IF_THEN_ELSE)
    {
      /* Skip the condition of an IF_THEN_ELSE.  We only want to
         change jump destinations, not eventual label comparisons.  */
      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
      return;
    }

Richard Kenner committed
1388 1389 1390 1391
  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1392
	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1393
      else if (fmt[i] == 'E')
Richard Kenner committed
1394
	{
1395
	  int j;
Richard Kenner committed
1396
	  for (j = 0; j < XVECLEN (x, i); j++)
1397
	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
Richard Kenner committed
1398 1399
	}
    }
1400
}
Richard Kenner committed
1401

1402 1403 1404 1405 1406
/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
   the modifications into the change group.  Return false if we did
   not see how to do that.  */

int
1407
redirect_jump_1 (rtx jump, rtx nlabel)
1408 1409
{
  int ochanges = num_validated_changes ();
1410
  rtx *loc, asmop;
Jan Hubicka committed
1411

1412 1413 1414 1415 1416 1417 1418 1419 1420
  asmop = extract_asm_operands (PATTERN (jump));
  if (asmop)
    {
      if (nlabel == NULL)
	return 0;
      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
      loc = &ASM_OPERANDS_LABEL (asmop, 0);
    }
  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
Jan Hubicka committed
1421 1422 1423 1424 1425
    loc = &XVECEXP (PATTERN (jump), 0, 0);
  else
    loc = &PATTERN (jump);

  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1426 1427 1428 1429 1430 1431
  return num_validated_changes () > ochanges;
}

/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
   jump target label is unused as a result, it and the code following
   it may be deleted.
Richard Kenner committed
1432 1433 1434 1435

   If NLABEL is zero, we are to turn the jump into a (possibly conditional)
   RETURN insn.

1436 1437
   The return value will be 1 if the change was made, 0 if it wasn't
   (this can only occur for NLABEL == 0).  */
Richard Kenner committed
1438 1439

int
1440
redirect_jump (rtx jump, rtx nlabel, int delete_unused)
Richard Kenner committed
1441
{
1442
  rtx olabel = JUMP_LABEL (jump);
Richard Kenner committed
1443 1444 1445 1446

  if (nlabel == olabel)
    return 1;

1447
  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
Richard Kenner committed
1448 1449
    return 0;

1450 1451 1452 1453 1454
  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
  return 1;
}

/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
H.J. Lu committed
1455
   NLABEL in JUMP.
1456 1457 1458 1459 1460 1461 1462 1463
   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
   count has dropped to zero.  */
void
redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
		 int invert)
{
  rtx note;

1464 1465
  gcc_assert (JUMP_LABEL (jump) == olabel);

1466
  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1467 1468 1469
     moving FUNCTION_END note.  Just sanity check that no user still worry
     about this.  */
  gcc_assert (delete_unused >= 0);
Richard Kenner committed
1470 1471 1472 1473
  JUMP_LABEL (jump) = nlabel;
  if (nlabel)
    ++LABEL_NUSES (nlabel);

1474 1475 1476
  /* Update labels in any REG_EQUAL note.  */
  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
    {
1477 1478 1479
      if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
	remove_note (jump, note);
      else
1480
	{
1481 1482
	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
	  confirm_change_group ();
1483 1484 1485
	}
    }

1486
  if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1487 1488
      /* Undefined labels will remain outside the insn stream.  */
      && INSN_UID (olabel))
1489
    delete_related_insns (olabel);
1490 1491
  if (invert)
    invert_br_probabilities (jump);
Richard Kenner committed
1492 1493
}

1494 1495 1496 1497
/* Invert the jump condition X contained in jump insn INSN.  Accrue the
   modifications into the change group.  Return nonzero for success.  */
static int
invert_exp_1 (rtx x, rtx insn)
1498
{
1499
  RTX_CODE code = GET_CODE (x);
1500 1501 1502

  if (code == IF_THEN_ELSE)
    {
1503 1504
      rtx comp = XEXP (x, 0);
      rtx tem;
1505
      enum rtx_code reversed_code;
1506 1507 1508 1509 1510 1511

      /* We can do this in two ways:  The preferable way, which can only
	 be done if this is not an integer comparison, is to reverse
	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
	 of the IF_THEN_ELSE.  If we can't do either, fail.  */

1512 1513 1514
      reversed_code = reversed_comparison_code (comp, insn);

      if (reversed_code != UNKNOWN)
1515 1516
	{
	  validate_change (insn, &XEXP (x, 0),
1517
			   gen_rtx_fmt_ee (reversed_code,
1518 1519 1520
					   GET_MODE (comp), XEXP (comp, 0),
					   XEXP (comp, 1)),
			   1);
1521
	  return 1;
1522
	}
Kazu Hirata committed
1523

1524 1525 1526
      tem = XEXP (x, 1);
      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
      validate_change (insn, &XEXP (x, 2), tem, 1);
1527
      return 1;
1528
    }
Jan Hubicka committed
1529
  else
1530 1531 1532 1533 1534 1535 1536 1537 1538
    return 0;
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Accrue changes into the
   change group.  Return false if we didn't see how to perform the
   inversion and redirection.  */

int
1539
invert_jump_1 (rtx jump, rtx nlabel)
1540
{
1541
  rtx x = pc_set (jump);
1542
  int ochanges;
1543
  int ok;
1544 1545

  ochanges = num_validated_changes ();
1546 1547
  if (x == NULL)
    return 0;
1548 1549
  ok = invert_exp_1 (SET_SRC (x), jump);
  gcc_assert (ok);
H.J. Lu committed
1550

1551 1552 1553
  if (num_validated_changes () == ochanges)
    return 0;

1554 1555 1556
  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
     in Pmode, so checking this is not merely an optimization.  */
  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1557 1558 1559 1560 1561 1562
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Return true if successful.  */

int
1563
invert_jump (rtx jump, rtx nlabel, int delete_unused)
1564
{
1565
  rtx olabel = JUMP_LABEL (jump);
1566

1567
  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1568
    {
1569
      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1570 1571
      return 1;
    }
1572
  cancel_changes (0);
1573 1574 1575
  return 0;
}

Richard Kenner committed
1576 1577

/* Like rtx_equal_p except that it considers two REGs as equal
1578 1579
   if they renumber to the same value and considers two commutative
   operations to be the same if the order of the operands has been
1580
   reversed.  */
Richard Kenner committed
1581 1582

int
1583
rtx_renumbered_equal_p (const_rtx x, const_rtx y)
Richard Kenner committed
1584
{
1585
  int i;
1586
  const enum rtx_code code = GET_CODE (x);
1587
  const char *fmt;
Kazu Hirata committed
1588

Richard Kenner committed
1589 1590
  if (x == y)
    return 1;
1591

1592 1593 1594
  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
      && (REG_P (y) || (GET_CODE (y) == SUBREG
				  && REG_P (SUBREG_REG (y)))))
Richard Kenner committed
1595
    {
1596
      int reg_x = -1, reg_y = -1;
1597
      int byte_x = 0, byte_y = 0;
1598
      struct subreg_info info;
Richard Kenner committed
1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609

      if (GET_MODE (x) != GET_MODE (y))
	return 0;

      /* If we haven't done any renumbering, don't
	 make any assumptions.  */
      if (reg_renumber == 0)
	return rtx_equal_p (x, y);

      if (code == SUBREG)
	{
1610
	  reg_x = REGNO (SUBREG_REG (x));
1611
	  byte_x = SUBREG_BYTE (x);
1612 1613 1614

	  if (reg_renumber[reg_x] >= 0)
	    {
1615 1616 1617 1618
	      subreg_get_info (reg_renumber[reg_x],
			       GET_MODE (SUBREG_REG (x)), byte_x,
			       GET_MODE (x), &info);
	      if (!info.representable_p)
1619
		return 0;
1620
	      reg_x = info.offset;
1621
	      byte_x = 0;
1622
	    }
Richard Kenner committed
1623 1624 1625
	}
      else
	{
1626 1627 1628
	  reg_x = REGNO (x);
	  if (reg_renumber[reg_x] >= 0)
	    reg_x = reg_renumber[reg_x];
Richard Kenner committed
1629
	}
1630

Richard Kenner committed
1631 1632
      if (GET_CODE (y) == SUBREG)
	{
1633
	  reg_y = REGNO (SUBREG_REG (y));
1634
	  byte_y = SUBREG_BYTE (y);
1635 1636 1637

	  if (reg_renumber[reg_y] >= 0)
	    {
1638 1639 1640 1641
	      subreg_get_info (reg_renumber[reg_y],
			       GET_MODE (SUBREG_REG (y)), byte_y,
			       GET_MODE (y), &info);
	      if (!info.representable_p)
1642
		return 0;
1643
	      reg_y = info.offset;
1644
	      byte_y = 0;
1645
	    }
Richard Kenner committed
1646 1647 1648
	}
      else
	{
1649 1650 1651
	  reg_y = REGNO (y);
	  if (reg_renumber[reg_y] >= 0)
	    reg_y = reg_renumber[reg_y];
Richard Kenner committed
1652
	}
1653

1654
      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
Richard Kenner committed
1655
    }
1656

Kazu Hirata committed
1657
  /* Now we have disposed of all the cases
Richard Kenner committed
1658 1659 1660
     in which different rtx codes can match.  */
  if (code != GET_CODE (y))
    return 0;
1661

Richard Kenner committed
1662 1663 1664 1665 1666 1667 1668
  switch (code)
    {
    case PC:
    case CC0:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
    case CONST_INT:
1669
    case CONST_DOUBLE:
1670
      return 0;
Richard Kenner committed
1671 1672

    case LABEL_REF:
Richard Stallman committed
1673 1674 1675
      /* We can't assume nonlocal labels have their following insns yet.  */
      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
	return XEXP (x, 0) == XEXP (y, 0);
1676

Richard Kenner committed
1677 1678 1679 1680 1681 1682 1683
      /* Two label-refs are equivalent if they point at labels
	 in the same position in the instruction stream.  */
      return (next_real_insn (XEXP (x, 0))
	      == next_real_insn (XEXP (y, 0)));

    case SYMBOL_REF:
      return XSTR (x, 0) == XSTR (y, 0);
1684

1685 1686 1687 1688
    case CODE_LABEL:
      /* If we didn't match EQ equality above, they aren't the same.  */
      return 0;

1689 1690
    default:
      break;
Richard Kenner committed
1691 1692 1693 1694 1695 1696 1697
    }

  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */

  if (GET_MODE (x) != GET_MODE (y))
    return 0;

1698 1699 1700 1701
  /* MEMs refering to different address space are not equivalent.  */
  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
    return 0;

1702
  /* For commutative operations, the RTX match if the operand match in any
1703 1704
     order.  Also handle the simple binary and unary cases without a loop.  */
  if (targetm.commutative_p (x, UNKNOWN))
1705 1706 1707 1708
    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1709
  else if (NON_COMMUTATIVE_P (x))
1710 1711
    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1712
  else if (UNARY_P (x))
1713 1714
    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));

Richard Kenner committed
1715 1716 1717 1718 1719 1720
  /* Compare the elements.  If any pair of corresponding elements
     fail to match, return 0 for the whole things.  */

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
1721
      int j;
Richard Kenner committed
1722 1723
      switch (fmt[i])
	{
1724 1725 1726 1727 1728
	case 'w':
	  if (XWINT (x, i) != XWINT (y, i))
	    return 0;
	  break;

Richard Kenner committed
1729 1730 1731 1732 1733
	case 'i':
	  if (XINT (x, i) != XINT (y, i))
	    return 0;
	  break;

1734 1735 1736 1737 1738
	case 't':
	  if (XTREE (x, i) != XTREE (y, i))
	    return 0;
	  break;

Richard Kenner committed
1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751
	case 's':
	  if (strcmp (XSTR (x, i), XSTR (y, i)))
	    return 0;
	  break;

	case 'e':
	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
	    return 0;
	  break;

	case 'u':
	  if (XEXP (x, i) != XEXP (y, i))
	    return 0;
1752
	  /* Fall through.  */
Richard Kenner committed
1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764
	case '0':
	  break;

	case 'E':
	  if (XVECLEN (x, i) != XVECLEN (y, i))
	    return 0;
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
	      return 0;
	  break;

	default:
1765
	  gcc_unreachable ();
Richard Kenner committed
1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776
	}
    }
  return 1;
}

/* If X is a hard register or equivalent to one or a subregister of one,
   return the hard register number.  If X is a pseudo register that was not
   assigned a hard register, return the pseudo register number.  Otherwise,
   return -1.  Any rtx is valid for X.  */

int
1777
true_regnum (const_rtx x)
Richard Kenner committed
1778
{
1779
  if (REG_P (x))
Richard Kenner committed
1780 1781 1782 1783 1784 1785 1786 1787
    {
      if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
	return reg_renumber[REGNO (x)];
      return REGNO (x);
    }
  if (GET_CODE (x) == SUBREG)
    {
      int base = true_regnum (SUBREG_REG (x));
1788
      if (base >= 0
1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799
	  && base < FIRST_PSEUDO_REGISTER)
	{
	  struct subreg_info info;

	  subreg_get_info (REGNO (SUBREG_REG (x)),
			   GET_MODE (SUBREG_REG (x)),
			   SUBREG_BYTE (x), GET_MODE (x), &info);

	  if (info.representable_p)
	    return base + info.offset;
	}
Richard Kenner committed
1800 1801 1802
    }
  return -1;
}
1803 1804 1805

/* Return regno of the register REG and handle subregs too.  */
unsigned int
1806
reg_or_subregno (const_rtx reg)
1807 1808
{
  if (GET_CODE (reg) == SUBREG)
1809 1810 1811
    reg = SUBREG_REG (reg);
  gcc_assert (REG_P (reg));
  return REGNO (reg);
1812
}