jump.c 49.9 KB
Newer Older
Richard Kenner committed
1
/* Optimize jump instructions, for GNU compiler.
Jakub Jelinek committed
2
   Copyright (C) 1987-2015 Free Software Foundation, Inc.
Richard Kenner committed
3

4
This file is part of GCC.
Richard Kenner committed
5

6 7
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9
version.
Richard Kenner committed
10

11 12 13 14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
15 16

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
19

Jan Hubicka committed
20
/* This is the pathetic reminder of old fame of the jump-optimization pass
21
   of the compiler.  Now it contains basically a set of utility functions to
Jan Hubicka committed
22
   operate with jumps.
Richard Kenner committed
23 24 25 26 27 28 29

   Each CODE_LABEL has a count of the times it is used
   stored in the LABEL_NUSES internal field, and each JUMP_INSN
   has one label that it refers to stored in the
   JUMP_LABEL internal field.  With this we can detect labels that
   become unused because of the deletion of all the jumps that
   formerly used them.  The JUMP_LABEL info is sometimes looked
30 31
   at by later passes.  For return insns, it contains either a
   RETURN or a SIMPLE_RETURN rtx.
Richard Kenner committed
32

33
   The subroutines redirect_jump and invert_jump are used
Richard Kenner committed
34 35 36
   from other passes as well.  */

#include "config.h"
37
#include "system.h"
38 39
#include "coretypes.h"
#include "tm.h"
Richard Kenner committed
40
#include "rtl.h"
41
#include "tm_p.h"
Richard Kenner committed
42 43 44 45
#include "flags.h"
#include "hard-reg-set.h"
#include "regs.h"
#include "insn-config.h"
46
#include "insn-attr.h"
47
#include "recog.h"
48 49 50 51 52
#include "hashtab.h"
#include "hash-set.h"
#include "vec.h"
#include "machmode.h"
#include "input.h"
53
#include "function.h"
54 55 56 57
#include "predict.h"
#include "dominance.h"
#include "cfg.h"
#include "cfgrtl.h"
58
#include "basic-block.h"
59
#include "expr.h"
Mike Stump committed
60
#include "except.h"
Joseph Myers committed
61
#include "diagnostic-core.h"
62
#include "reload.h"
63
#include "tree-pass.h"
64
#include "target.h"
65
#include "rtl-iter.h"
Richard Kenner committed
66 67 68 69 70 71 72 73

/* Optimize jump y; x: ... y: jumpif... x?
   Don't know if it is worth bothering with.  */
/* Optimize two cases of conditional jump to conditional jump?
   This can never delete any instruction or make anything dead,
   or even change what is live at any point.
   So perhaps let combiner do it.  */

74 75
static void init_label_info (rtx_insn *);
static void mark_all_labels (rtx_insn *);
Trevor Saunders committed
76 77
static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
static void mark_jump_label_asm (rtx, rtx_insn *);
78
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
79
static int invert_exp_1 (rtx, rtx);
80

81 82
/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
static void
83
rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
84
{
85
  rtx_insn_list *insn;
Richard Kenner committed
86

87
  timevar_push (TV_REBUILD_JUMP);
88
  init_label_info (f);
89
  mark_all_labels (f);
Richard Kenner committed
90

91 92 93
  /* Keep track of labels used from static data; we don't track them
     closely enough to delete them here, so make sure their reference
     count doesn't drop to zero.  */
Richard Kenner committed
94

95
  if (count_forced)
96
    for (insn = forced_labels; insn; insn = insn->next ())
97 98
      if (LABEL_P (insn->insn ()))
	LABEL_NUSES (insn->insn ())++;
99
  timevar_pop (TV_REBUILD_JUMP);
Jan Hubicka committed
100
}
101 102 103 104 105 106

/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
   instructions and jumping insns that have labels as operands
   (e.g. cbranchsi4).  */
void
107
rebuild_jump_labels (rtx_insn *f)
108 109 110 111 112 113 114 115
{
  rebuild_jump_labels_1 (f, true);
}

/* This function is like rebuild_jump_labels, but doesn't run over
   forced_labels.  It can be used on insn chains that aren't the 
   main function chain.  */
void
116
rebuild_jump_labels_chain (rtx_insn *chain)
117 118 119
{
  rebuild_jump_labels_1 (chain, false);
}
Jan Hubicka committed
120

121 122 123 124 125 126 127 128
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
   non-fallthru insn.  This is not generally true, as multiple barriers
   may have crept in, or the BARRIER may be separated from the last
   real insn by one or more NOTEs.

   This simple pass moves barriers and removes duplicates so that the
   old code is happy.
 */
129
static unsigned int
130
cleanup_barriers (void)
131
{
132
  rtx_insn *insn;
133
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
134
    {
135
      if (BARRIER_P (insn))
136
	{
137
	  rtx_insn *prev = prev_nonnote_insn (insn);
138 139
	  if (!prev)
	    continue;
140 141 142 143 144

	  if (CALL_P (prev))
	    {
	      /* Make sure we do not split a call and its corresponding
		 CALL_ARG_LOCATION note.  */
145
	      rtx_insn *next = NEXT_INSN (prev);
146 147 148 149 150 151

	      if (NOTE_P (next)
		  && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
		prev = next;
	    }

152
	  if (BARRIER_P (prev))
153
	    delete_insn (insn);
154
	  else if (prev != PREV_INSN (insn))
155
	    reorder_insns_nobb (insn, insn, prev);
156 157
	}
    }
158
  return 0;
159
}
Richard Kenner committed
160

161 162 163
namespace {

const pass_data pass_data_cleanup_barriers =
164
{
165 166 167 168 169 170 171 172 173
  RTL_PASS, /* type */
  "barriers", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
174 175
};

176 177 178
class pass_cleanup_barriers : public rtl_opt_pass
{
public:
179 180
  pass_cleanup_barriers (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
181 182 183
  {}

  /* opt_pass methods: */
184
  virtual unsigned int execute (function *) { return cleanup_barriers (); }
185 186 187 188 189 190 191 192 193 194 195

}; // class pass_cleanup_barriers

} // anon namespace

rtl_opt_pass *
make_pass_cleanup_barriers (gcc::context *ctxt)
{
  return new pass_cleanup_barriers (ctxt);
}

196

197 198 199 200
/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
   notes whose labels don't occur in the insn any more.  */

201
static void
202
init_label_info (rtx_insn *f)
203
{
204
  rtx_insn *insn;
205

206
  for (insn = f; insn; insn = NEXT_INSN (insn))
207
    {
208 209 210 211 212 213 214 215 216 217 218 219 220 221
      if (LABEL_P (insn))
	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);

      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
	 sticky and not reset here; that way we won't lose association
	 with a label when e.g. the source for a target register
	 disappears out of reach for targets that may use jump-target
	 registers.  Jump transformations are supposed to transform
	 any REG_LABEL_TARGET notes.  The target label reference in a
	 branch may disappear from the branch (and from the
	 instruction before it) for other reasons, like register
	 allocation.  */

      if (INSN_P (insn))
222
	{
223 224 225 226 227 228 229 230 231
	  rtx note, next;

	  for (note = REG_NOTES (insn); note; note = next)
	    {
	      next = XEXP (note, 1);
	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
		remove_note (insn, note);
	    }
232 233
	}
    }
234 235
}

236 237 238 239
/* A subroutine of mark_all_labels.  Trivially propagate a simple label
   load into a jump_insn that uses it.  */

static void
240
maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274
{
  rtx label_note, pc, pc_src;

  pc = pc_set (jump_insn);
  pc_src = pc != NULL ? SET_SRC (pc) : NULL;
  label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);

  /* If the previous non-jump insn sets something to a label,
     something that this jump insn uses, make that label the primary
     target of this insn if we don't yet have any.  That previous
     insn must be a single_set and not refer to more than one label.
     The jump insn must not refer to other labels as jump targets
     and must be a plain (set (pc) ...), maybe in a parallel, and
     may refer to the item being set only directly or as one of the
     arms in an IF_THEN_ELSE.  */

  if (label_note != NULL && pc_src != NULL)
    {
      rtx label_set = single_set (prev_nonjump_insn);
      rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;

      if (label_set != NULL
	  /* The source must be the direct LABEL_REF, not a
	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
	  && (rtx_equal_p (label_dest, pc_src)
	      || (GET_CODE (pc_src) == IF_THEN_ELSE
		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
	{
	  /* The CODE_LABEL referred to in the note must be the
	     CODE_LABEL in the LABEL_REF of the "set".  We can
	     conveniently use it for the marker function, which
	     requires a LABEL_REF wrapping.  */
275
	  gcc_assert (XEXP (label_note, 0) == LABEL_REF_LABEL (SET_SRC (label_set)));
276 277 278 279 280 281 282 283

	  mark_jump_label_1 (label_set, jump_insn, false, true);

	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
	}
    }
}

284
/* Mark the label each jump jumps to.
Jan Hubicka committed
285
   Combine consecutive labels, and count uses of labels.  */
286 287

static void
288
mark_all_labels (rtx_insn *f)
289
{
290
  rtx_insn *insn;
291

292 293 294
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    {
      basic_block bb;
295
      FOR_EACH_BB_FN (bb, cfun)
296
	{
297 298 299 300 301
	  /* In cfglayout mode, we don't bother with trivial next-insn
	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
	     handled by other optimizers using better algorithms.  */
	  FOR_BB_INSNS (bb, insn)
	    {
302
	      gcc_assert (! insn->deleted ());
303 304 305 306 307 308 309
	      if (NONDEBUG_INSN_P (insn))
	        mark_jump_label (PATTERN (insn), insn, 0);
	    }

	  /* In cfglayout mode, there may be non-insns between the
	     basic blocks.  If those non-insns represent tablejump data,
	     they contain label references that we must record.  */
310
	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
311 312
	    if (JUMP_TABLE_DATA_P (insn))
	      mark_jump_label (PATTERN (insn), insn, 0);
313
	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
314 315
	    if (JUMP_TABLE_DATA_P (insn))
	      mark_jump_label (PATTERN (insn), insn, 0);
316 317
	}
    }
318 319
  else
    {
320
      rtx_insn *prev_nonjump_insn = NULL;
321 322
      for (insn = f; insn; insn = NEXT_INSN (insn))
	{
323
	  if (insn->deleted ())
324 325 326
	    ;
	  else if (LABEL_P (insn))
	    prev_nonjump_insn = NULL;
327 328
	  else if (JUMP_TABLE_DATA_P (insn))
	    mark_jump_label (PATTERN (insn), insn, 0);
329 330 331 332 333 334 335 336 337 338 339 340 341
	  else if (NONDEBUG_INSN_P (insn))
	    {
	      mark_jump_label (PATTERN (insn), insn, 0);
	      if (JUMP_P (insn))
		{
		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
		}
	      else
		prev_nonjump_insn = insn;
	    }
	}
    }
342
}
Richard Kenner committed
343

344
/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
345 346 347 348 349 350
   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
   know whether it's source is floating point or integer comparison.  Machine
   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
   to help this function avoid overhead in these cases.  */
enum rtx_code
351 352
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
				const_rtx arg1, const_rtx insn)
Richard Kenner committed
353
{
354
  machine_mode mode;
Richard Kenner committed
355 356

  /* If this is not actually a comparison, we can't reverse it.  */
357 358
  if (GET_RTX_CLASS (code) != RTX_COMPARE
      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
359 360 361 362 363 364
    return UNKNOWN;

  mode = GET_MODE (arg0);
  if (mode == VOIDmode)
    mode = GET_MODE (arg1);

365 366 367
  /* First see if machine description supplies us way to reverse the
     comparison.  Give it priority over everything else to allow
     machine description to do tricks.  */
368
  if (GET_MODE_CLASS (mode) == MODE_CC
369 370 371
      && REVERSIBLE_CC_MODE (mode))
    {
#ifdef REVERSE_CONDITION
Kazu Hirata committed
372
      return REVERSE_CONDITION (code, mode);
373
#else
Kazu Hirata committed
374
      return reverse_condition (code);
375
#endif
Kazu Hirata committed
376
    }
Richard Kenner committed
377

378
  /* Try a few special cases based on the comparison code.  */
379 380
  switch (code)
    {
Kazu Hirata committed
381 382 383 384 385 386 387
    case GEU:
    case GTU:
    case LEU:
    case LTU:
    case NE:
    case EQ:
      /* It is always safe to reverse EQ and NE, even for the floating
388
	 point.  Similarly the unsigned comparisons are never used for
Kazu Hirata committed
389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405
	 floating point so we can reverse them in the default way.  */
      return reverse_condition (code);
    case ORDERED:
    case UNORDERED:
    case LTGT:
    case UNEQ:
      /* In case we already see unordered comparison, we can be sure to
	 be dealing with floating point so we don't need any more tests.  */
      return reverse_condition_maybe_unordered (code);
    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
      /* We don't have safe way to reverse these yet.  */
      return UNKNOWN;
    default:
      break;
406 407
    }

408
  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
Richard Kenner committed
409
    {
410
      const_rtx prev;
411 412 413 414
      /* Try to search for the comparison to determine the real mode.
         This code is expensive, but with sane machine description it
         will be never used, since REVERSIBLE_CC_MODE will return true
         in all cases.  */
415
      if (! insn)
416
	return UNKNOWN;
Kazu Hirata committed
417

418
      /* These CONST_CAST's are okay because prev_nonnote_insn just
419
	 returns its argument and we assign it to a const_rtx
420
	 variable.  */
421
      for (prev = prev_nonnote_insn (CONST_CAST_RTX (insn));
422
	   prev != 0 && !LABEL_P (prev);
423
	   prev = prev_nonnote_insn (CONST_CAST_RTX (prev)))
424
	{
425
	  const_rtx set = set_of (arg0, prev);
426 427 428 429
	  if (set && GET_CODE (set) == SET
	      && rtx_equal_p (SET_DEST (set), arg0))
	    {
	      rtx src = SET_SRC (set);
Richard Kenner committed
430

431 432 433 434 435 436 437 438 439
	      if (GET_CODE (src) == COMPARE)
		{
		  rtx comparison = src;
		  arg0 = XEXP (src, 0);
		  mode = GET_MODE (arg0);
		  if (mode == VOIDmode)
		    mode = GET_MODE (XEXP (comparison, 1));
		  break;
		}
440
	      /* We can get past reg-reg moves.  This may be useful for model
441 442 443 444 445 446 447 448 449 450 451 452
	         of i387 comparisons that first move flag registers around.  */
	      if (REG_P (src))
		{
		  arg0 = src;
		  continue;
		}
	    }
	  /* If register is clobbered in some ununderstandable way,
	     give up.  */
	  if (set)
	    return UNKNOWN;
	}
Richard Kenner committed
453 454
    }

455 456
  /* Test for an integer condition, or a floating-point comparison
     in which NaNs can be ignored.  */
Shujing Zhao committed
457
  if (CONST_INT_P (arg0)
458 459
      || (GET_MODE (arg0) != VOIDmode
	  && GET_MODE_CLASS (mode) != MODE_CC
460
	  && !HONOR_NANS (mode)))
461 462 463 464 465
    return reverse_condition (code);

  return UNKNOWN;
}

Kazu Hirata committed
466
/* A wrapper around the previous function to take COMPARISON as rtx
467 468
   expression.  This simplifies many callers.  */
enum rtx_code
469
reversed_comparison_code (const_rtx comparison, const_rtx insn)
470
{
471
  if (!COMPARISON_P (comparison))
472 473 474 475 476
    return UNKNOWN;
  return reversed_comparison_code_parts (GET_CODE (comparison),
					 XEXP (comparison, 0),
					 XEXP (comparison, 1), insn);
}
477 478 479 480

/* Return comparison with reversed code of EXP.
   Return NULL_RTX in case we fail to do the reversal.  */
rtx
481
reversed_comparison (const_rtx exp, machine_mode mode)
482 483 484 485 486 487 488 489 490
{
  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
  if (reversed_code == UNKNOWN)
    return NULL_RTX;
  else
    return simplify_gen_relational (reversed_code, mode, VOIDmode,
                                    XEXP (exp, 0), XEXP (exp, 1));
}

491

492 493 494 495 496
/* Given an rtx-code for a comparison, return the code for the negated
   comparison.  If no such code exists, return UNKNOWN.

   WATCH OUT!  reverse_condition is not safe to use on a jump that might
   be acting on the results of an IEEE floating point comparison, because
Kazu Hirata committed
497
   of the special treatment of non-signaling nans in comparisons.
498
   Use reversed_comparison_code instead.  */
Richard Kenner committed
499 500

enum rtx_code
501
reverse_condition (enum rtx_code code)
Richard Kenner committed
502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return LE;
    case GE:
      return LT;
    case LT:
      return GE;
    case LE:
      return GT;
    case GTU:
      return LEU;
    case GEU:
      return LTU;
    case LTU:
      return GEU;
    case LEU:
      return GTU;
525 526 527 528 529 530 531 532 533 534
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;

    case UNLT:
    case UNLE:
    case UNGT:
    case UNGE:
    case UNEQ:
535
    case LTGT:
536
      return UNKNOWN;
Richard Kenner committed
537 538

    default:
539
      gcc_unreachable ();
Richard Kenner committed
540 541 542
    }
}

543 544 545 546 547
/* Similar, but we're allowed to generate unordered comparisons, which
   makes it safe for IEEE floating-point.  Of course, we have to recognize
   that the target will support them too...  */

enum rtx_code
548
reverse_condition_maybe_unordered (enum rtx_code code)
549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581
{
  switch (code)
    {
    case EQ:
      return NE;
    case NE:
      return EQ;
    case GT:
      return UNLE;
    case GE:
      return UNLT;
    case LT:
      return UNGE;
    case LE:
      return UNGT;
    case LTGT:
      return UNEQ;
    case UNORDERED:
      return ORDERED;
    case ORDERED:
      return UNORDERED;
    case UNLT:
      return GE;
    case UNLE:
      return GT;
    case UNGT:
      return LE;
    case UNGE:
      return LT;
    case UNEQ:
      return LTGT;

    default:
582
      gcc_unreachable ();
583 584 585
    }
}

Richard Kenner committed
586 587 588 589
/* Similar, but return the code when two operands of a comparison are swapped.
   This IS safe for IEEE floating-point.  */

enum rtx_code
590
swap_condition (enum rtx_code code)
Richard Kenner committed
591 592 593 594 595
{
  switch (code)
    {
    case EQ:
    case NE:
596 597 598
    case UNORDERED:
    case ORDERED:
    case UNEQ:
599
    case LTGT:
Richard Kenner committed
600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617
      return code;

    case GT:
      return LT;
    case GE:
      return LE;
    case LT:
      return GT;
    case LE:
      return GE;
    case GTU:
      return LTU;
    case GEU:
      return LEU;
    case LTU:
      return GTU;
    case LEU:
      return GEU;
618 619 620 621 622 623 624 625 626
    case UNLT:
      return UNGT;
    case UNLE:
      return UNGE;
    case UNGT:
      return UNLT;
    case UNGE:
      return UNLE;

Richard Kenner committed
627
    default:
628
      gcc_unreachable ();
Richard Kenner committed
629 630 631 632 633 634 635 636
    }
}

/* Given a comparison CODE, return the corresponding unsigned comparison.
   If CODE is an equality comparison or already an unsigned comparison,
   CODE is returned.  */

enum rtx_code
637
unsigned_condition (enum rtx_code code)
Richard Kenner committed
638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658
{
  switch (code)
    {
    case EQ:
    case NE:
    case GTU:
    case GEU:
    case LTU:
    case LEU:
      return code;

    case GT:
      return GTU;
    case GE:
      return GEU;
    case LT:
      return LTU;
    case LE:
      return LEU;

    default:
659
      gcc_unreachable ();
Richard Kenner committed
660 661 662 663 664 665
    }
}

/* Similarly, return the signed version of a comparison.  */

enum rtx_code
666
signed_condition (enum rtx_code code)
Richard Kenner committed
667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687
{
  switch (code)
    {
    case EQ:
    case NE:
    case GT:
    case GE:
    case LT:
    case LE:
      return code;

    case GTU:
      return GT;
    case GEU:
      return GE;
    case LTU:
      return LT;
    case LEU:
      return LE;

    default:
688
      gcc_unreachable ();
Richard Kenner committed
689 690 691
    }
}

692
/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
Richard Kenner committed
693 694 695
   truth of CODE1 implies the truth of CODE2.  */

int
696
comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
Richard Kenner committed
697
{
698 699 700 701 702 703
  /* UNKNOWN comparison codes can happen as a result of trying to revert
     comparison codes.
     They can't match anything, so we have to reject them here.  */
  if (code1 == UNKNOWN || code2 == UNKNOWN)
    return 0;

Richard Kenner committed
704 705 706 707 708
  if (code1 == code2)
    return 1;

  switch (code1)
    {
709 710 711 712 713
    case UNEQ:
      if (code2 == UNLE || code2 == UNGE)
	return 1;
      break;

Richard Kenner committed
714
    case EQ:
715 716
      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
	  || code2 == ORDERED)
Richard Kenner committed
717 718 719
	return 1;
      break;

720 721 722 723 724
    case UNLT:
      if (code2 == UNLE || code2 == NE)
	return 1;
      break;

Richard Kenner committed
725
    case LT:
726 727 728 729 730 731
      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
	return 1;
      break;

    case UNGT:
      if (code2 == UNGE || code2 == NE)
Richard Kenner committed
732 733 734 735
	return 1;
      break;

    case GT:
736
      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
737 738 739 740 741 742 743 744 745 746 747
	return 1;
      break;

    case GE:
    case LE:
      if (code2 == ORDERED)
	return 1;
      break;

    case LTGT:
      if (code2 == NE || code2 == ORDERED)
Richard Kenner committed
748 749 750 751
	return 1;
      break;

    case LTU:
752
      if (code2 == LEU || code2 == NE)
Richard Kenner committed
753 754 755 756
	return 1;
      break;

    case GTU:
757
      if (code2 == GEU || code2 == NE)
Richard Kenner committed
758 759
	return 1;
      break;
760 761

    case UNORDERED:
762 763
      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
	  || code2 == UNGE || code2 == UNGT)
764 765
	return 1;
      break;
Kazu Hirata committed
766

767 768
    default:
      break;
Richard Kenner committed
769 770 771 772 773 774 775 776
    }

  return 0;
}

/* Return 1 if INSN is an unconditional jump and nothing else.  */

int
777
simplejump_p (const rtx_insn *insn)
Richard Kenner committed
778
{
779
  return (JUMP_P (insn)
780 781 782
	  && GET_CODE (PATTERN (insn)) == SET
	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
Richard Kenner committed
783 784 785
}

/* Return nonzero if INSN is a (possibly) conditional jump
Kazu Hirata committed
786 787
   and nothing more.

788
   Use of this function is deprecated, since we need to support combined
789
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
Richard Kenner committed
790 791

int
792
condjump_p (const rtx_insn *insn)
Richard Kenner committed
793
{
794
  const_rtx x = PATTERN (insn);
Jeff Law committed
795 796 797

  if (GET_CODE (x) != SET
      || GET_CODE (SET_DEST (x)) != PC)
Richard Kenner committed
798
    return 0;
Jeff Law committed
799 800 801

  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
802
    return 1;
Kazu Hirata committed
803 804 805 806
  else
    return (GET_CODE (x) == IF_THEN_ELSE
	    && ((GET_CODE (XEXP (x, 2)) == PC
		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
807
		     || ANY_RETURN_P (XEXP (x, 1))))
Kazu Hirata committed
808 809
		|| (GET_CODE (XEXP (x, 1)) == PC
		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
810
			|| ANY_RETURN_P (XEXP (x, 2))))));
811 812
}

Jeff Law committed
813
/* Return nonzero if INSN is a (possibly) conditional jump inside a
814
   PARALLEL.
Kazu Hirata committed
815

816 817
   Use this function is deprecated, since we need to support combined
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
818 819

int
820
condjump_in_parallel_p (const rtx_insn *insn)
821
{
822
  const_rtx x = PATTERN (insn);
823 824 825 826 827 828 829 830 831 832 833 834 835 836 837

  if (GET_CODE (x) != PARALLEL)
    return 0;
  else
    x = XVECEXP (x, 0, 0);

  if (GET_CODE (x) != SET)
    return 0;
  if (GET_CODE (SET_DEST (x)) != PC)
    return 0;
  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
    return 1;
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
  if (XEXP (SET_SRC (x), 2) == pc_rtx
Richard Kenner committed
838
      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
839
	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
Richard Kenner committed
840 841 842
    return 1;
  if (XEXP (SET_SRC (x), 1) == pc_rtx
      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
843
	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
Richard Kenner committed
844 845 846 847
    return 1;
  return 0;
}

848 849
/* Return set of PC, otherwise NULL.  */

850
rtx
851
pc_set (const rtx_insn *insn)
852 853
{
  rtx pat;
854
  if (!JUMP_P (insn))
855
    return NULL_RTX;
856
  pat = PATTERN (insn);
857 858 859 860 861

  /* The set is allowed to appear either as the insn pattern or
     the first set in a PARALLEL.  */
  if (GET_CODE (pat) == PARALLEL)
    pat = XVECEXP (pat, 0, 0);
862 863
  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
    return pat;
864 865

  return NULL_RTX;
866 867
}

868 869 870
/* Return true when insn is an unconditional direct jump,
   possibly bundled inside a PARALLEL.  */

871
int
872
any_uncondjump_p (const rtx_insn *insn)
873
{
874
  const_rtx x = pc_set (insn);
875 876 877 878
  if (!x)
    return 0;
  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
    return 0;
879 880
  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
    return 0;
881 882 883
  return 1;
}

884
/* Return true when insn is a conditional jump.  This function works for
885 886
   instructions containing PC sets in PARALLELs.  The instruction may have
   various other effects so before removing the jump you must verify
887
   onlyjump_p.
888

889 890
   Note that unlike condjump_p it returns false for unconditional jumps.  */

891
int
892
any_condjump_p (const rtx_insn *insn)
893
{
894
  const_rtx x = pc_set (insn);
895 896
  enum rtx_code a, b;

897 898
  if (!x)
    return 0;
899 900
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    return 0;
901

902 903
  a = GET_CODE (XEXP (SET_SRC (x), 1));
  b = GET_CODE (XEXP (SET_SRC (x), 2));
904

905 906 907
  return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
	  || (a == PC
	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
908 909
}

910 911 912
/* Return the label of a conditional jump.  */

rtx
913
condjump_label (const rtx_insn *insn)
914
{
915
  rtx x = pc_set (insn);
916

917
  if (!x)
918 919 920 921 922 923 924 925 926 927 928 929 930
    return NULL_RTX;
  x = SET_SRC (x);
  if (GET_CODE (x) == LABEL_REF)
    return x;
  if (GET_CODE (x) != IF_THEN_ELSE)
    return NULL_RTX;
  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
    return XEXP (x, 1);
  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
    return XEXP (x, 2);
  return NULL_RTX;
}

931 932
/* Return TRUE if INSN is a return jump.  */

933
int
934
returnjump_p (const rtx_insn *insn)
935
{
936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959
  if (JUMP_P (insn))
    {
      subrtx_iterator::array_type array;
      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
	{
	  const_rtx x = *iter;
	  switch (GET_CODE (x))
	    {
	    case RETURN:
	    case SIMPLE_RETURN:
	    case EH_RETURN:
	      return true;

	    case SET:
	      if (SET_IS_RETURN_P (x))
		return true;
	      break;

	    default:
	      break;
	    }
	}
    }
  return false;
960 961
}

962 963 964
/* Return true if INSN is a (possibly conditional) return insn.  */

int
965
eh_returnjump_p (rtx_insn *insn)
966
{
967 968 969 970 971 972 973 974
  if (JUMP_P (insn))
    {
      subrtx_iterator::array_type array;
      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
	if (GET_CODE (*iter) == EH_RETURN)
	  return true;
    }
  return false;
975 976
}

977 978 979 980
/* Return true if INSN is a jump that only transfers control and
   nothing more.  */

int
981
onlyjump_p (const rtx_insn *insn)
982 983 984
{
  rtx set;

985
  if (!JUMP_P (insn))
986 987 988 989 990 991 992 993 994 995 996 997 998
    return 0;

  set = single_set (insn);
  if (set == NULL)
    return 0;
  if (GET_CODE (SET_DEST (set)) != PC)
    return 0;
  if (side_effects_p (SET_SRC (set)))
    return 0;

  return 1;
}

999 1000 1001
/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
   NULL or a return.  */
bool
1002
jump_to_label_p (const rtx_insn *insn)
1003 1004 1005 1006 1007
{
  return (JUMP_P (insn)
	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
}

1008 1009
#ifdef HAVE_cc0

1010
/* Return nonzero if X is an RTX that only sets the condition codes
1011 1012 1013
   and has no side effects.  */

int
1014
only_sets_cc0_p (const_rtx x)
1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
{
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
}

Richard Kenner committed
1025 1026 1027 1028 1029 1030
/* Return 1 if X is an RTX that does nothing but set the condition codes
   and CLOBBER or USE registers.
   Return -1 if X does explicitly set the condition codes,
   but also does other things.  */

int
1031
sets_cc0_p (const_rtx x)
Richard Kenner committed
1032
{
1033 1034 1035 1036 1037 1038
  if (! x)
    return 0;

  if (INSN_P (x))
    x = PATTERN (x);

Richard Kenner committed
1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057
  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
    return 1;
  if (GET_CODE (x) == PARALLEL)
    {
      int i;
      int sets_cc0 = 0;
      int other_things = 0;
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
	{
	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
	    sets_cc0 = 1;
	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
	    other_things = 1;
	}
      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
    }
  return 0;
}
1058
#endif
Richard Kenner committed
1059

1060 1061 1062 1063 1064 1065 1066 1067
/* Find all CODE_LABELs referred to in X, and increment their use
   counts.  If INSN is a JUMP_INSN and there is at least one
   CODE_LABEL referenced in INSN as a jump target, then store the last
   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1068
   For returnjumps, the JUMP_LABEL will also be set as appropriate.
Richard Kenner committed
1069 1070 1071 1072 1073

   Note that two labels separated by a loop-beginning note
   must be kept distinct if we have not yet done loop-optimization,
   because the gap between them is where loop-optimize
   will want to move invariant code to.  CROSS_JUMP tells us
1074
   that loop-optimization is done with.  */
Richard Kenner committed
1075

1076
void
Trevor Saunders committed
1077
mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
Richard Kenner committed
1078
{
1079 1080 1081 1082 1083 1084
  rtx asmop = extract_asm_operands (x);
  if (asmop)
    mark_jump_label_asm (asmop, insn);
  else
    mark_jump_label_1 (x, insn, in_mem != 0,
		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1085 1086
}

1087
/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1088 1089 1090 1091 1092 1093
   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
   jump-target; when the JUMP_LABEL field of INSN should be set or a
   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
   note.  */

static void
Trevor Saunders committed
1094
mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1095
{
1096 1097 1098
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1099 1100 1101 1102 1103 1104 1105 1106 1107 1108

  switch (code)
    {
    case PC:
    case CC0:
    case REG:
    case CLOBBER:
    case CALL:
      return;

1109
    case RETURN:
1110
    case SIMPLE_RETURN:
1111 1112 1113 1114 1115 1116 1117
      if (is_target)
	{
	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
	  JUMP_LABEL (insn) = x;
	}
      return;

1118
    case MEM:
1119
      in_mem = true;
1120 1121
      break;

1122
    case SEQUENCE:
David Malcolm committed
1123 1124 1125 1126 1127 1128
      {
	rtx_sequence *seq = as_a <rtx_sequence *> (x);
	for (i = 0; i < seq->len (); i++)
	  mark_jump_label (PATTERN (seq->insn (i)),
			   seq->insn (i), 0);
      }
1129 1130
      return;

1131 1132
    case SYMBOL_REF:
      if (!in_mem)
Kazu Hirata committed
1133
	return;
1134

1135
      /* If this is a constant-pool reference, see if it is a label.  */
1136
      if (CONSTANT_POOL_ADDRESS_P (x))
1137
	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1138 1139
      break;

1140 1141 1142 1143 1144 1145 1146 1147 1148 1149
      /* Handle operands in the condition of an if-then-else as for a
	 non-jump insn.  */
    case IF_THEN_ELSE:
      if (!is_target)
	break;
      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
      return;

Richard Kenner committed
1150 1151
    case LABEL_REF:
      {
1152
	rtx label = LABEL_REF_LABEL (x);
1153

1154 1155
	/* Ignore remaining references to unreachable labels that
	   have been deleted.  */
1156
	if (NOTE_P (label)
1157
	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1158 1159
	  break;

1160
	gcc_assert (LABEL_P (label));
1161

Richard Stallman committed
1162 1163 1164
	/* Ignore references to labels of containing functions.  */
	if (LABEL_REF_NONLOCAL_P (x))
	  break;
1165

1166
	LABEL_REF_LABEL (x) = label;
1167
	if (! insn || ! insn->deleted ())
1168
	  ++LABEL_NUSES (label);
1169

Richard Kenner committed
1170 1171
	if (insn)
	  {
1172
	    if (is_target
1173 1174 1175
		/* Do not change a previous setting of JUMP_LABEL.  If the
		   JUMP_LABEL slot is occupied by a different label,
		   create a note for this label.  */
1176
		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
Richard Kenner committed
1177
	      JUMP_LABEL (insn) = label;
1178
	    else
1179
	      {
1180 1181 1182 1183 1184 1185 1186 1187
		enum reg_note kind
		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;

		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
		   for LABEL unless there already is one.  All uses of
		   a label, except for the primary target of a jump,
		   must have such a note.  */
		if (! find_reg_note (insn, kind, label))
1188
		  add_reg_note (insn, kind, label);
Richard Kenner committed
1189 1190 1191 1192 1193
	      }
	  }
	return;
      }

1194 1195
    /* Do walk the labels in a vector, but not the first operand of an
       ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
Richard Kenner committed
1196 1197
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
1198
      if (! insn->deleted ())
1199 1200
	{
	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
Richard Kenner committed
1201

1202
	  for (i = 0; i < XVECLEN (x, eltnum); i++)
Trevor Saunders committed
1203
	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1204
			       is_target);
1205
	}
1206
      return;
Kazu Hirata committed
1207

1208 1209
    default:
      break;
Richard Kenner committed
1210 1211 1212
    }

  fmt = GET_RTX_FORMAT (code);
1213 1214 1215 1216

  /* The primary target of a tablejump is the label of the ADDR_VEC,
     which is canonically mentioned *last* in the insn.  To get it
     marked as JUMP_LABEL, we iterate over items in reverse order.  */
Richard Kenner committed
1217 1218 1219
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1220
	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
Richard Kenner committed
1221 1222
      else if (fmt[i] == 'E')
	{
1223
	  int j;
1224 1225 1226 1227

	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
			       is_target);
Richard Kenner committed
1228 1229 1230 1231
	}
    }
}

1232 1233 1234 1235 1236 1237
/* Worker function for mark_jump_label.  Handle asm insns specially.
   In particular, output operands need not be considered so we can
   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
   need to be considered targets.  */

static void
Trevor Saunders committed
1238
mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1239 1240 1241 1242 1243 1244 1245 1246 1247
{
  int i;

  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);

  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
}
Richard Kenner committed
1248

1249
/* Delete insn INSN from the chain of insns and update label ref counts
1250
   and delete insns now unreachable.
1251

1252
   Returns the first insn after INSN that was not deleted.
Richard Kenner committed
1253

1254 1255
   Usage of this instruction is deprecated.  Use delete_insn instead and
   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
Richard Kenner committed
1256

1257
rtx_insn *
1258
delete_related_insns (rtx uncast_insn)
Richard Kenner committed
1259
{
1260
  rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1261
  int was_code_label = (LABEL_P (insn));
1262
  rtx note;
1263
  rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
Richard Kenner committed
1264

1265
  while (next && next->deleted ())
Richard Kenner committed
1266 1267 1268
    next = NEXT_INSN (next);

  /* This insn is already deleted => return first following nondeleted.  */
1269
  if (insn->deleted ())
Richard Kenner committed
1270 1271
    return next;

1272
  delete_insn (insn);
Richard Kenner committed
1273 1274 1275 1276

  /* If instruction is followed by a barrier,
     delete the barrier too.  */

1277
  if (next != 0 && BARRIER_P (next))
1278
    delete_insn (next);
Richard Kenner committed
1279

1280 1281 1282 1283 1284 1285 1286 1287
  /* If this is a call, then we have to remove the var tracking note
     for the call arguments.  */

  if (CALL_P (insn)
      || (NONJUMP_INSN_P (insn)
	  && GET_CODE (PATTERN (insn)) == SEQUENCE
	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
    {
1288
      rtx_insn *p;
1289

1290
      for (p = next && next->deleted () ? NEXT_INSN (next) : next;
1291 1292 1293 1294 1295 1296 1297 1298 1299
	   p && NOTE_P (p);
	   p = NEXT_INSN (p))
	if (NOTE_KIND (p) == NOTE_INSN_CALL_ARG_LOCATION)
	  {
	    remove_insn (p);
	    break;
	  }
    }

Richard Kenner committed
1300 1301 1302
  /* If deleting a jump, decrement the count of the label,
     and delete the label if it is now unused.  */

1303
  if (jump_to_label_p (insn))
1304
    {
1305 1306
      rtx lab = JUMP_LABEL (insn);
      rtx_jump_table_data *lab_next;
1307

1308
      if (LABEL_NUSES (lab) == 0)
1309 1310 1311 1312
	/* This can delete NEXT or PREV,
	   either directly if NEXT is JUMP_LABEL (INSN),
	   or indirectly through more levels of jumps.  */
	delete_related_insns (lab);
1313
      else if (tablejump_p (insn, NULL, &lab_next))
1314 1315
	{
	  /* If we're deleting the tablejump, delete the dispatch table.
1316
	     We may not be able to kill the label immediately preceding
1317 1318
	     just yet, as it might be referenced in code leading up to
	     the tablejump.  */
1319
	  delete_related_insns (lab_next);
1320 1321
	}
    }
Richard Kenner committed
1322

1323 1324
  /* Likewise if we're deleting a dispatch table.  */

1325
  if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1326
    {
1327 1328 1329
      rtvec labels = table->get_labels ();
      int i;
      int len = GET_NUM_ELEM (labels);
1330 1331

      for (i = 0; i < len; i++)
1332 1333
	if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
	  delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1334
      while (next && next->deleted ())
1335 1336 1337 1338
	next = NEXT_INSN (next);
      return next;
    }

1339 1340 1341
  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
  if (INSN_P (insn))
1342
    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1343 1344
      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1345
	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1346
	  && LABEL_P (XEXP (note, 0)))
1347 1348
	if (LABEL_NUSES (XEXP (note, 0)) == 0)
	  delete_related_insns (XEXP (note, 0));
1349

1350
  while (prev && (prev->deleted () || NOTE_P (prev)))
Richard Kenner committed
1351 1352 1353 1354 1355 1356
    prev = PREV_INSN (prev);

  /* If INSN was a label and a dispatch table follows it,
     delete the dispatch table.  The tablejump must have gone already.
     It isn't useful to fall through into a table.  */

1357
  if (was_code_label
Richard Kenner committed
1358
      && NEXT_INSN (insn) != 0
Shujing Zhao committed
1359
      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1360
    next = delete_related_insns (NEXT_INSN (insn));
Richard Kenner committed
1361 1362 1363

  /* If INSN was a label, delete insns following it if now unreachable.  */

1364
  if (was_code_label && prev && BARRIER_P (prev))
Richard Kenner committed
1365
    {
1366 1367
      enum rtx_code code;
      while (next)
Richard Kenner committed
1368
	{
1369
	  code = GET_CODE (next);
1370
	  if (code == NOTE)
Richard Kenner committed
1371
	    next = NEXT_INSN (next);
1372
	  /* Keep going past other deleted labels to delete what follows.  */
1373
	  else if (code == CODE_LABEL && next->deleted ())
1374
	    next = NEXT_INSN (next);
1375 1376 1377 1378 1379 1380 1381
	  /* Keep the (use (insn))s created by dbr_schedule, which needs
	     them in order to track liveness relative to a previous
	     barrier.  */
	  else if (INSN_P (next)
		   && GET_CODE (PATTERN (next)) == USE
		   && INSN_P (XEXP (PATTERN (next), 0)))
	    next = NEXT_INSN (next);
1382
	  else if (code == BARRIER || INSN_P (next))
Richard Kenner committed
1383 1384 1385 1386
	    /* Note: if this deletes a jump, it can cause more
	       deletion of unreachable code, after a different label.
	       As long as the value from this recursive call is correct,
	       this invocation functions correctly.  */
1387
	    next = delete_related_insns (next);
1388 1389
	  else
	    break;
Richard Kenner committed
1390 1391 1392
	}
    }

1393 1394 1395 1396
  /* I feel a little doubtful about this loop,
     but I see no clean and sure alternative way
     to find the first insn after INSN that is not now deleted.
     I hope this works.  */
1397
  while (next && next->deleted ())
1398
    next = NEXT_INSN (next);
Richard Kenner committed
1399 1400 1401 1402 1403 1404 1405 1406 1407
  return next;
}

/* Delete a range of insns from FROM to TO, inclusive.
   This is for the sake of peephole optimization, so assume
   that whatever these insns do will still be done by a new
   peephole insn that will replace them.  */

void
1408
delete_for_peephole (rtx_insn *from, rtx_insn *to)
Richard Kenner committed
1409
{
1410
  rtx_insn *insn = from;
Richard Kenner committed
1411 1412 1413

  while (1)
    {
1414 1415
      rtx_insn *next = NEXT_INSN (insn);
      rtx_insn *prev = PREV_INSN (insn);
Richard Kenner committed
1416

1417
      if (!NOTE_P (insn))
Richard Kenner committed
1418
	{
1419
	  insn->set_deleted();
Richard Kenner committed
1420 1421 1422 1423 1424

	  /* Patch this insn out of the chain.  */
	  /* We don't do this all at once, because we
	     must preserve all NOTEs.  */
	  if (prev)
1425
	    SET_NEXT_INSN (prev) = next;
Richard Kenner committed
1426 1427

	  if (next)
1428
	    SET_PREV_INSN (next) = prev;
Richard Kenner committed
1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441
	}

      if (insn == to)
	break;
      insn = next;
    }

  /* Note that if TO is an unconditional jump
     we *do not* delete the BARRIER that follows,
     since the peephole that replaces this sequence
     is also an unconditional jump in that case.  */
}

1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453
/* A helper function for redirect_exp_1; examines its input X and returns
   either a LABEL_REF around a label, or a RETURN if X was NULL.  */
static rtx
redirect_target (rtx x)
{
  if (x == NULL_RTX)
    return ret_rtx;
  if (!ANY_RETURN_P (x))
    return gen_rtx_LABEL_REF (Pmode, x);
  return x;
}

1454 1455
/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
   NLABEL as a return.  Accrue modifications into the change group.  */
Richard Kenner committed
1456

1457
static void
1458
redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
Richard Kenner committed
1459
{
1460 1461 1462 1463
  rtx x = *loc;
  RTX_CODE code = GET_CODE (x);
  int i;
  const char *fmt;
Richard Kenner committed
1464

1465
  if ((code == LABEL_REF && LABEL_REF_LABEL (x) == olabel)
1466
      || x == olabel)
Richard Kenner committed
1467
    {
1468 1469 1470
      x = redirect_target (nlabel);
      if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1471 1472 1473
      validate_change (insn, loc, x, 1);
      return;
    }
Richard Kenner committed
1474

1475 1476
  if (code == SET && SET_DEST (x) == pc_rtx
      && ANY_RETURN_P (nlabel)
1477
      && GET_CODE (SET_SRC (x)) == LABEL_REF
1478
      && LABEL_REF_LABEL (SET_SRC (x)) == olabel)
1479
    {
1480
      validate_change (insn, loc, nlabel, 1);
1481
      return;
Richard Kenner committed
1482 1483
    }

1484 1485 1486 1487 1488 1489 1490 1491 1492
  if (code == IF_THEN_ELSE)
    {
      /* Skip the condition of an IF_THEN_ELSE.  We only want to
         change jump destinations, not eventual label comparisons.  */
      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
      return;
    }

Richard Kenner committed
1493 1494 1495 1496
  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
1497
	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1498
      else if (fmt[i] == 'E')
Richard Kenner committed
1499
	{
1500
	  int j;
Richard Kenner committed
1501
	  for (j = 0; j < XVECLEN (x, i); j++)
1502
	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
Richard Kenner committed
1503 1504
	}
    }
1505
}
Richard Kenner committed
1506

1507 1508 1509 1510 1511
/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
   the modifications into the change group.  Return false if we did
   not see how to do that.  */

int
1512
redirect_jump_1 (rtx jump, rtx nlabel)
1513 1514
{
  int ochanges = num_validated_changes ();
1515
  rtx *loc, asmop;
Jan Hubicka committed
1516

1517
  gcc_assert (nlabel != NULL_RTX);
1518 1519 1520 1521 1522 1523 1524 1525 1526
  asmop = extract_asm_operands (PATTERN (jump));
  if (asmop)
    {
      if (nlabel == NULL)
	return 0;
      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
      loc = &ASM_OPERANDS_LABEL (asmop, 0);
    }
  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
Jan Hubicka committed
1527 1528 1529 1530 1531
    loc = &XVECEXP (PATTERN (jump), 0, 0);
  else
    loc = &PATTERN (jump);

  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1532 1533 1534 1535 1536 1537
  return num_validated_changes () > ochanges;
}

/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
   jump target label is unused as a result, it and the code following
   it may be deleted.
Richard Kenner committed
1538

1539 1540 1541
   Normally, NLABEL will be a label, but it may also be a RETURN rtx;
   in that case we are to turn the jump into a (possibly conditional)
   return insn.
Richard Kenner committed
1542

1543
   The return value will be 1 if the change was made, 0 if it wasn't
1544
   (this can only occur when trying to produce return insns).  */
Richard Kenner committed
1545 1546

int
1547
redirect_jump (rtx jump, rtx nlabel, int delete_unused)
Richard Kenner committed
1548
{
1549
  rtx olabel = JUMP_LABEL (jump);
Richard Kenner committed
1550

1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562
  if (!nlabel)
    {
      /* If there is no label, we are asked to redirect to the EXIT block.
	 When before the epilogue is emitted, return/simple_return cannot be
	 created so we return 0 immediately.  After the epilogue is emitted,
	 we always expect a label, either a non-null label, or a
	 return/simple_return RTX.  */

      if (!epilogue_completed)
	return 0;
      gcc_unreachable ();
    }
1563

Richard Kenner committed
1564 1565 1566
  if (nlabel == olabel)
    return 1;

1567
  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
Richard Kenner committed
1568 1569
    return 0;

1570 1571 1572 1573 1574
  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
  return 1;
}

/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
H.J. Lu committed
1575
   NLABEL in JUMP.
1576 1577 1578 1579 1580 1581 1582 1583
   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
   count has dropped to zero.  */
void
redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
		 int invert)
{
  rtx note;

1584 1585
  gcc_assert (JUMP_LABEL (jump) == olabel);

1586
  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1587 1588 1589
     moving FUNCTION_END note.  Just sanity check that no user still worry
     about this.  */
  gcc_assert (delete_unused >= 0);
Richard Kenner committed
1590
  JUMP_LABEL (jump) = nlabel;
1591
  if (!ANY_RETURN_P (nlabel))
Richard Kenner committed
1592 1593
    ++LABEL_NUSES (nlabel);

1594 1595 1596
  /* Update labels in any REG_EQUAL note.  */
  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
    {
1597 1598
      if (ANY_RETURN_P (nlabel)
	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1599 1600
	remove_note (jump, note);
      else
1601
	{
1602 1603
	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
	  confirm_change_group ();
1604 1605 1606
	}
    }

1607 1608 1609 1610
  /* Handle the case where we had a conditional crossing jump to a return
     label and are now changing it into a direct conditional return.
     The jump is no longer crossing in that case.  */
  if (ANY_RETURN_P (nlabel))
1611
    CROSSING_JUMP_P (jump) = 0;
1612

1613 1614
  if (!ANY_RETURN_P (olabel)
      && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1615 1616
      /* Undefined labels will remain outside the insn stream.  */
      && INSN_UID (olabel))
1617
    delete_related_insns (olabel);
1618 1619
  if (invert)
    invert_br_probabilities (jump);
Richard Kenner committed
1620 1621
}

1622 1623 1624 1625
/* Invert the jump condition X contained in jump insn INSN.  Accrue the
   modifications into the change group.  Return nonzero for success.  */
static int
invert_exp_1 (rtx x, rtx insn)
1626
{
1627
  RTX_CODE code = GET_CODE (x);
1628 1629 1630

  if (code == IF_THEN_ELSE)
    {
1631 1632
      rtx comp = XEXP (x, 0);
      rtx tem;
1633
      enum rtx_code reversed_code;
1634 1635 1636 1637 1638 1639

      /* We can do this in two ways:  The preferable way, which can only
	 be done if this is not an integer comparison, is to reverse
	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
	 of the IF_THEN_ELSE.  If we can't do either, fail.  */

1640 1641 1642
      reversed_code = reversed_comparison_code (comp, insn);

      if (reversed_code != UNKNOWN)
1643 1644
	{
	  validate_change (insn, &XEXP (x, 0),
1645
			   gen_rtx_fmt_ee (reversed_code,
1646 1647 1648
					   GET_MODE (comp), XEXP (comp, 0),
					   XEXP (comp, 1)),
			   1);
1649
	  return 1;
1650
	}
Kazu Hirata committed
1651

1652 1653 1654
      tem = XEXP (x, 1);
      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
      validate_change (insn, &XEXP (x, 2), tem, 1);
1655
      return 1;
1656
    }
Jan Hubicka committed
1657
  else
1658 1659 1660 1661 1662 1663 1664 1665 1666
    return 0;
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Accrue changes into the
   change group.  Return false if we didn't see how to perform the
   inversion and redirection.  */

int
1667
invert_jump_1 (rtx_insn *jump, rtx nlabel)
1668
{
1669
  rtx x = pc_set (jump);
1670
  int ochanges;
1671
  int ok;
1672 1673

  ochanges = num_validated_changes ();
1674 1675
  if (x == NULL)
    return 0;
1676 1677
  ok = invert_exp_1 (SET_SRC (x), jump);
  gcc_assert (ok);
H.J. Lu committed
1678

1679 1680 1681
  if (num_validated_changes () == ochanges)
    return 0;

1682 1683 1684
  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
     in Pmode, so checking this is not merely an optimization.  */
  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1685 1686 1687 1688 1689 1690
}

/* Invert the condition of the jump JUMP, and make it jump to label
   NLABEL instead of where it jumps now.  Return true if successful.  */

int
1691
invert_jump (rtx_insn *jump, rtx nlabel, int delete_unused)
1692
{
1693
  rtx olabel = JUMP_LABEL (jump);
1694

1695
  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1696
    {
1697
      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1698 1699
      return 1;
    }
1700
  cancel_changes (0);
1701 1702 1703
  return 0;
}

Richard Kenner committed
1704 1705

/* Like rtx_equal_p except that it considers two REGs as equal
1706 1707
   if they renumber to the same value and considers two commutative
   operations to be the same if the order of the operands has been
1708
   reversed.  */
Richard Kenner committed
1709 1710

int
1711
rtx_renumbered_equal_p (const_rtx x, const_rtx y)
Richard Kenner committed
1712
{
1713
  int i;
1714
  const enum rtx_code code = GET_CODE (x);
1715
  const char *fmt;
Kazu Hirata committed
1716

Richard Kenner committed
1717 1718
  if (x == y)
    return 1;
1719

1720 1721 1722
  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
      && (REG_P (y) || (GET_CODE (y) == SUBREG
				  && REG_P (SUBREG_REG (y)))))
Richard Kenner committed
1723
    {
1724
      int reg_x = -1, reg_y = -1;
1725
      int byte_x = 0, byte_y = 0;
1726
      struct subreg_info info;
Richard Kenner committed
1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737

      if (GET_MODE (x) != GET_MODE (y))
	return 0;

      /* If we haven't done any renumbering, don't
	 make any assumptions.  */
      if (reg_renumber == 0)
	return rtx_equal_p (x, y);

      if (code == SUBREG)
	{
1738
	  reg_x = REGNO (SUBREG_REG (x));
1739
	  byte_x = SUBREG_BYTE (x);
1740 1741 1742

	  if (reg_renumber[reg_x] >= 0)
	    {
1743 1744 1745 1746
	      subreg_get_info (reg_renumber[reg_x],
			       GET_MODE (SUBREG_REG (x)), byte_x,
			       GET_MODE (x), &info);
	      if (!info.representable_p)
1747
		return 0;
1748
	      reg_x = info.offset;
1749
	      byte_x = 0;
1750
	    }
Richard Kenner committed
1751 1752 1753
	}
      else
	{
1754 1755 1756
	  reg_x = REGNO (x);
	  if (reg_renumber[reg_x] >= 0)
	    reg_x = reg_renumber[reg_x];
Richard Kenner committed
1757
	}
1758

Richard Kenner committed
1759 1760
      if (GET_CODE (y) == SUBREG)
	{
1761
	  reg_y = REGNO (SUBREG_REG (y));
1762
	  byte_y = SUBREG_BYTE (y);
1763 1764 1765

	  if (reg_renumber[reg_y] >= 0)
	    {
1766 1767 1768 1769
	      subreg_get_info (reg_renumber[reg_y],
			       GET_MODE (SUBREG_REG (y)), byte_y,
			       GET_MODE (y), &info);
	      if (!info.representable_p)
1770
		return 0;
1771
	      reg_y = info.offset;
1772
	      byte_y = 0;
1773
	    }
Richard Kenner committed
1774 1775 1776
	}
      else
	{
1777 1778 1779
	  reg_y = REGNO (y);
	  if (reg_renumber[reg_y] >= 0)
	    reg_y = reg_renumber[reg_y];
Richard Kenner committed
1780
	}
1781

1782
      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
Richard Kenner committed
1783
    }
1784

Kazu Hirata committed
1785
  /* Now we have disposed of all the cases
Richard Kenner committed
1786 1787 1788
     in which different rtx codes can match.  */
  if (code != GET_CODE (y))
    return 0;
1789

Richard Kenner committed
1790 1791 1792 1793 1794 1795
  switch (code)
    {
    case PC:
    case CC0:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
1796
    CASE_CONST_UNIQUE:
1797
      return 0;
Richard Kenner committed
1798 1799

    case LABEL_REF:
Richard Stallman committed
1800 1801
      /* We can't assume nonlocal labels have their following insns yet.  */
      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1802
	return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
1803

Richard Kenner committed
1804 1805
      /* Two label-refs are equivalent if they point at labels
	 in the same position in the instruction stream.  */
1806 1807
      return (next_real_insn (LABEL_REF_LABEL (x))
	      == next_real_insn (LABEL_REF_LABEL (y)));
Richard Kenner committed
1808 1809 1810

    case SYMBOL_REF:
      return XSTR (x, 0) == XSTR (y, 0);
1811

1812 1813 1814 1815
    case CODE_LABEL:
      /* If we didn't match EQ equality above, they aren't the same.  */
      return 0;

1816 1817
    default:
      break;
Richard Kenner committed
1818 1819 1820 1821 1822 1823 1824
    }

  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */

  if (GET_MODE (x) != GET_MODE (y))
    return 0;

1825
  /* MEMs referring to different address space are not equivalent.  */
1826 1827 1828
  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
    return 0;

1829
  /* For commutative operations, the RTX match if the operand match in any
1830 1831
     order.  Also handle the simple binary and unary cases without a loop.  */
  if (targetm.commutative_p (x, UNKNOWN))
1832 1833 1834 1835
    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1836
  else if (NON_COMMUTATIVE_P (x))
1837 1838
    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1839
  else if (UNARY_P (x))
1840 1841
    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));

Richard Kenner committed
1842 1843 1844 1845 1846 1847
  /* Compare the elements.  If any pair of corresponding elements
     fail to match, return 0 for the whole things.  */

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
1848
      int j;
Richard Kenner committed
1849 1850
      switch (fmt[i])
	{
1851 1852 1853 1854 1855
	case 'w':
	  if (XWINT (x, i) != XWINT (y, i))
	    return 0;
	  break;

Richard Kenner committed
1856 1857
	case 'i':
	  if (XINT (x, i) != XINT (y, i))
1858 1859
	    {
	      if (((code == ASM_OPERANDS && i == 6)
1860
		   || (code == ASM_INPUT && i == 1)))
1861 1862 1863
		break;
	      return 0;
	    }
Richard Kenner committed
1864 1865
	  break;

1866 1867 1868 1869 1870
	case 't':
	  if (XTREE (x, i) != XTREE (y, i))
	    return 0;
	  break;

Richard Kenner committed
1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883
	case 's':
	  if (strcmp (XSTR (x, i), XSTR (y, i)))
	    return 0;
	  break;

	case 'e':
	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
	    return 0;
	  break;

	case 'u':
	  if (XEXP (x, i) != XEXP (y, i))
	    return 0;
1884
	  /* Fall through.  */
Richard Kenner committed
1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896
	case '0':
	  break;

	case 'E':
	  if (XVECLEN (x, i) != XVECLEN (y, i))
	    return 0;
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
	      return 0;
	  break;

	default:
1897
	  gcc_unreachable ();
Richard Kenner committed
1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908
	}
    }
  return 1;
}

/* If X is a hard register or equivalent to one or a subregister of one,
   return the hard register number.  If X is a pseudo register that was not
   assigned a hard register, return the pseudo register number.  Otherwise,
   return -1.  Any rtx is valid for X.  */

int
1909
true_regnum (const_rtx x)
Richard Kenner committed
1910
{
1911
  if (REG_P (x))
Richard Kenner committed
1912
    {
1913 1914
      if (REGNO (x) >= FIRST_PSEUDO_REGISTER
	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
Richard Kenner committed
1915 1916 1917 1918 1919 1920
	return reg_renumber[REGNO (x)];
      return REGNO (x);
    }
  if (GET_CODE (x) == SUBREG)
    {
      int base = true_regnum (SUBREG_REG (x));
1921
      if (base >= 0
1922 1923 1924 1925
	  && base < FIRST_PSEUDO_REGISTER)
	{
	  struct subreg_info info;

1926 1927
	  subreg_get_info (lra_in_progress
			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
1928 1929 1930 1931 1932 1933
			   GET_MODE (SUBREG_REG (x)),
			   SUBREG_BYTE (x), GET_MODE (x), &info);

	  if (info.representable_p)
	    return base + info.offset;
	}
Richard Kenner committed
1934 1935 1936
    }
  return -1;
}
1937 1938 1939

/* Return regno of the register REG and handle subregs too.  */
unsigned int
1940
reg_or_subregno (const_rtx reg)
1941 1942
{
  if (GET_CODE (reg) == SUBREG)
1943 1944 1945
    reg = SUBREG_REG (reg);
  gcc_assert (REG_P (reg));
  return REGNO (reg);
1946
}