cfgrtl.c 88.5 KB
Newer Older
1 2
/* Control flow graph manipulation code for GNU compiler.
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5 6 7 8 9

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11 12 13 14 15 16 17 18
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
21

22 23
/* This file contains low level functions to manipulate the CFG and analyze it
   that are aware of the RTL intermediate language.
24 25

   Available functionality:
26
     - Basic CFG/RTL manipulation API documented in cfghooks.h
27
     - CFG-aware instruction chain manipulation
28
	 delete_insn, delete_insn_chain
29 30 31 32 33 34
     - Edge splitting and committing to edges
	 insert_insn_on_edge, commit_edge_insertions
     - CFG updating after insn simplification
	 purge_dead_edges, purge_all_dead_edges

   Functions not supposed for generic use:
35
     - Infrastructure to determine quickly basic block for insn
36
	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37
     - Edge redirection with updating and optimizing of insn chain
38
	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 40 41

#include "config.h"
#include "system.h"
42 43
#include "coretypes.h"
#include "tm.h"
44 45 46 47 48 49 50 51
#include "tree.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "regs.h"
#include "flags.h"
#include "output.h"
#include "function.h"
#include "except.h"
52
#include "rtl-error.h"
53 54
#include "tm_p.h"
#include "obstack.h"
55
#include "insn-attr.h"
56
#include "insn-config.h"
57
#include "cfglayout.h"
58
#include "expr.h"
59
#include "target.h"
60
#include "cfgloop.h"
61
#include "ggc.h"
62
#include "tree-pass.h"
63
#include "df.h"
64

65 66
static int can_delete_note_p (const_rtx);
static int can_delete_label_p (const_rtx);
67
static basic_block rtl_split_edge (edge);
68
static bool rtl_move_block_after (basic_block, basic_block);
69
static int rtl_verify_flow_info (void);
70
static basic_block cfg_layout_split_block (basic_block, void *);
71
static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
72 73 74 75
static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
static void cfg_layout_delete_block (basic_block);
static void rtl_delete_block (basic_block);
static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
76
static edge rtl_redirect_edge_and_branch (edge, basic_block);
77
static basic_block rtl_split_block (basic_block, void *);
78
static void rtl_dump_bb (basic_block, FILE *, int, int);
79
static int rtl_verify_flow_info_1 (void);
80
static void rtl_make_forwarder_block (edge);
81 82

/* Return true if NOTE is not one of the ones that must be kept paired,
83
   so that we may simply delete it.  */
84 85

static int
86
can_delete_note_p (const_rtx note)
87
{
88 89 90 91 92 93 94 95 96 97
  switch (NOTE_KIND (note))
    {
    case NOTE_INSN_DELETED:
    case NOTE_INSN_BASIC_BLOCK:
    case NOTE_INSN_EPILOGUE_BEG:
      return true;

    default:
      return false;
    }
98 99 100 101 102
}

/* True if a given label can be deleted.  */

static int
103
can_delete_label_p (const_rtx label)
104
{
105 106 107
  return (!LABEL_PRESERVE_P (label)
	  /* User declared labels must be preserved.  */
	  && LABEL_NAME (label) == 0
108
	  && !in_expr_list_p (forced_labels, label));
109 110 111 112 113
}

/* Delete INSN by patching it out.  Return the next insn.  */

rtx
114
delete_insn (rtx insn)
115 116 117 118 119
{
  rtx next = NEXT_INSN (insn);
  rtx note;
  bool really_delete = true;

120
  if (LABEL_P (insn))
121 122
    {
      /* Some labels can't be directly removed from the INSN chain, as they
Mike Stump committed
123 124
	 might be references via variables, constant pool etc.
	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
125 126 127 128 129 130
      if (! can_delete_label_p (insn))
	{
	  const char *name = LABEL_NAME (insn);

	  really_delete = false;
	  PUT_CODE (insn, NOTE);
131
	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
132
	  NOTE_DELETED_LABEL_NAME (insn) = name;
133
	}
134

135 136 137 138 139
      remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
    }

  if (really_delete)
    {
140
      /* If this insn has already been deleted, something is very wrong.  */
141
      gcc_assert (!INSN_DELETED_P (insn));
142 143 144 145 146 147
      remove_insn (insn);
      INSN_DELETED_P (insn) = 1;
    }

  /* If deleting a jump, decrement the use count of the label.  Deleting
     the label itself should happen in the normal course of block merging.  */
148
  if (JUMP_P (insn))
149
    {
150 151 152 153 154 155 156
      if (JUMP_LABEL (insn)
	  && LABEL_P (JUMP_LABEL (insn)))
	LABEL_NUSES (JUMP_LABEL (insn))--;

      /* If there are more targets, remove them too.  */
      while ((note
	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
157
	     && LABEL_P (XEXP (note, 0)))
158 159 160 161 162
	{
	  LABEL_NUSES (XEXP (note, 0))--;
	  remove_note (insn, note);
	}
    }
163

164 165 166 167 168 169 170 171
  /* Also if deleting any insn that references a label as an operand.  */
  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
	 && LABEL_P (XEXP (note, 0)))
    {
      LABEL_NUSES (XEXP (note, 0))--;
      remove_note (insn, note);
    }

Shujing Zhao committed
172
  if (JUMP_TABLE_DATA_P (insn))
173 174 175 176 177 178 179
    {
      rtx pat = PATTERN (insn);
      int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
      int len = XVECLEN (pat, diff_vec_p);
      int i;

      for (i = 0; i < len; i++)
180 181 182 183 184 185
	{
	  rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);

	  /* When deleting code in bulk (e.g. removing many unreachable
	     blocks) we can delete a label that's a target of the vector
	     before deleting the vector itself.  */
186
	  if (!NOTE_P (label))
187 188
	    LABEL_NUSES (label)--;
	}
189 190 191 192 193
    }

  return next;
}

194
/* Like delete_insn but also purge dead edges from BB.  */
195

196
rtx
197
delete_insn_and_edges (rtx insn)
198 199 200 201
{
  rtx x;
  bool purge = false;

202
  if (INSN_P (insn)
203
      && BLOCK_FOR_INSN (insn)
204
      && BB_END (BLOCK_FOR_INSN (insn)) == insn)
205 206 207 208 209 210 211
    purge = true;
  x = delete_insn (insn);
  if (purge)
    purge_dead_edges (BLOCK_FOR_INSN (insn));
  return x;
}

212
/* Unlink a chain of insns between START and FINISH, leaving notes
213 214
   that must be paired.  If CLEAR_BB is true, we set bb field for
   insns that cannot be removed to NULL.  */
215 216

void
217
delete_insn_chain (rtx start, rtx finish, bool clear_bb)
218 219 220
{
  rtx next;

221 222 223
  /* Unchain the insns one by one.  It would be quicker to delete all of these
     with a single unchaining, rather than one at a time, but we need to keep
     the NOTE's.  */
224 225 226
  while (1)
    {
      next = NEXT_INSN (start);
227
      if (NOTE_P (start) && !can_delete_note_p (start))
228 229 230 231
	;
      else
	next = delete_insn (start);

232 233 234
      if (clear_bb && !INSN_DELETED_P (start))
	set_block_for_insn (start, NULL);

235 236 237 238 239 240
      if (start == finish)
	break;
      start = next;
    }
}

241 242 243 244 245
/* Create a new basic block consisting of the instructions between HEAD and END
   inclusive.  This function is designed to allow fast BB construction - reuses
   the note and basic block struct in BB_NOTE, if any and do not grow
   BASIC_BLOCK chain and should be used directly only by CFG construction code.
   END can be NULL in to create new empty basic block before HEAD.  Both END
246 247
   and HEAD can be NULL to create basic block at the end of INSN chain.
   AFTER is the basic block we should be put after.  */
248 249

basic_block
250
create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
251 252 253 254 255 256 257 258 259 260 261
{
  basic_block bb;

  if (bb_note
      && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
      && bb->aux == NULL)
    {
      /* If we found an existing note, thread it back onto the chain.  */

      rtx after;

262
      if (LABEL_P (head))
263 264 265 266 267 268 269 270
	after = head;
      else
	{
	  after = PREV_INSN (head);
	  head = bb_note;
	}

      if (after != bb_note && NEXT_INSN (after) != bb_note)
271
	reorder_insns_nobb (bb_note, bb_note, after);
272 273 274 275 276 277 278
    }
  else
    {
      /* Otherwise we must create a note and a basic block structure.  */

      bb = alloc_block ();

279
      init_rtl_bb_info (bb);
280
      if (!head && !end)
281 282
	head = end = bb_note
	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
283
      else if (LABEL_P (head) && end)
284 285 286 287 288 289 290 291 292 293 294 295
	{
	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
	  if (head == end)
	    end = bb_note;
	}
      else
	{
	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
	  head = bb_note;
	  if (!end)
	    end = head;
	}
296

297 298 299 300 301 302 303
      NOTE_BASIC_BLOCK (bb_note) = bb;
    }

  /* Always include the bb note in the block.  */
  if (NEXT_INSN (end) == bb_note)
    end = bb_note;

304 305
  BB_HEAD (bb) = head;
  BB_END (bb) = end;
306
  bb->index = last_basic_block++;
307
  bb->flags = BB_NEW | BB_RTL;
308
  link_block (bb, after);
309
  SET_BASIC_BLOCK (bb->index, bb);
310
  df_bb_refs_record (bb->index, false);
311
  update_bb_for_insn (bb);
312
  BB_SET_PARTITION (bb, BB_UNPARTITIONED);
313 314 315 316 317 318 319 320

  /* Tag the block so that we know it has been used when considering
     other basic block notes.  */
  bb->aux = bb;

  return bb;
}

321
/* Create new basic block consisting of instructions in between HEAD and END
322
   and place it to the BB chain after block AFTER.  END can be NULL in to
323 324
   create new empty basic block before HEAD.  Both END and HEAD can be NULL to
   create basic block at the end of INSN chain.  */
325

326 327
static basic_block
rtl_create_basic_block (void *headp, void *endp, basic_block after)
328
{
329
  rtx head = (rtx) headp, end = (rtx) endp;
330
  basic_block bb;
331

332
  /* Grow the basic block array if needed.  */
333
  if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
334 335
    {
      size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
336
      VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
337
    }
338

339
  n_basic_blocks++;
340

341
  bb = create_basic_block_structure (head, end, NULL, after);
342 343 344
  bb->aux = NULL;
  return bb;
}
345 346 347 348 349 350 351 352

static basic_block
cfg_layout_create_basic_block (void *head, void *end, basic_block after)
{
  basic_block newbb = rtl_create_basic_block (head, end, after);

  return newbb;
}
353 354 355 356 357 358 359 360 361

/* Delete the insns in a (non-live) block.  We physically delete every
   non-deleted-note insn, and update the flow graph appropriately.

   Return nonzero if we deleted an exception handler.  */

/* ??? Preserving all such notes strikes me as wrong.  It would be nice
   to post-process the stream to remove empty blocks, loops, ranges, etc.  */

362
static void
363
rtl_delete_block (basic_block b)
364
{
365
  rtx insn, end;
366 367

  /* If the head of this block is a CODE_LABEL, then it might be the
368 369
     label for an exception handler which can't be reached.  We need
     to remove the label from the exception_handler_label list.  */
370
  insn = BB_HEAD (b);
371

372
  end = get_last_bb_insn (b);
373 374

  /* Selectively delete the entire chain.  */
375
  BB_HEAD (b) = NULL;
376 377
  delete_insn_chain (insn, end, true);

378 379 380 381

  if (dump_file)
    fprintf (dump_file, "deleting block %d\n", b->index);
  df_bb_delete (b->index);
382 383
}

384
/* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
385 386

void
387
compute_bb_for_insn (void)
388
{
389
  basic_block bb;
390

391
  FOR_EACH_BB (bb)
392
    {
393
      rtx end = BB_END (bb);
394
      rtx insn;
395

396
      for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
397
	{
398
	  BLOCK_FOR_INSN (insn) = bb;
399 400 401 402 403 404 405 406
	  if (insn == end)
	    break;
	}
    }
}

/* Release the basic_block_for_insn array.  */

407
unsigned int
408
free_bb_for_insn (void)
409
{
410 411
  rtx insn;
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
412
    if (!BARRIER_P (insn))
413
      BLOCK_FOR_INSN (insn) = NULL;
414
  return 0;
415 416
}

417 418 419 420 421 422 423
static unsigned int
rest_of_pass_free_cfg (void)
{
#ifdef DELAY_SLOTS
  /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
     valid at that point so it would be too late to call df_analyze.  */
  if (optimize > 0 && flag_delayed_branch)
424 425 426 427
    {
      df_note_add_problem ();
      df_analyze ();
    }
428 429 430 431 432 433
#endif

  free_bb_for_insn ();
  return 0;
}

434
struct rtl_opt_pass pass_free_cfg =
435
{
436 437
 {
  RTL_PASS,
438
  "*free_cfg",                          /* name */
439
  NULL,                                 /* gate */
440
  rest_of_pass_free_cfg,                /* execute */
441 442 443
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
444
  TV_NONE,                              /* tv_id */
445 446 447 448 449
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  PROP_cfg,                             /* properties_destroyed */
  0,                                    /* todo_flags_start */
  0,                                    /* todo_flags_finish */
450
 }
451 452
};

453 454 455 456
/* Return RTX to emit after when we want to emit code on the entry of function.  */
rtx
entry_of_function (void)
{
Mike Stump committed
457
  return (n_basic_blocks > NUM_FIXED_BLOCKS ?
458
	  BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
459 460
}

461 462 463 464 465 466 467
/* Emit INSN at the entry point of the function, ensuring that it is only
   executed once per function.  */
void
emit_insn_at_entry (rtx insn)
{
  edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
  edge e = ei_safe_edge (ei);
468
  gcc_assert (e->flags & EDGE_FALLTHRU);
469 470 471 472 473

  insert_insn_on_edge (insn, e);
  commit_edge_insertions ();
}

474
/* Update BLOCK_FOR_INSN of insns between BEGIN and END
H.J. Lu committed
475
   (or BARRIER if found) and notify df of the bb change.
476 477
   The insn chain range is inclusive
   (i.e. both BEGIN and END will be updated. */
478

479 480
static void
update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
481 482 483
{
  rtx insn;

484 485
  end = NEXT_INSN (end);
  for (insn = begin; insn != end; insn = NEXT_INSN (insn))
486 487
    if (!BARRIER_P (insn))
      df_insn_change_bb (insn, bb);
488
}
489 490 491 492 493 494 495 496 497 498

/* Update BLOCK_FOR_INSN of insns in BB to BB,
   and notify df of the change.  */

void
update_bb_for_insn (basic_block bb)
{
  update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
}

499

500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
   note associated with the BLOCK.  */

static rtx
first_insn_after_basic_block_note (basic_block block)
{
  rtx insn;

  /* Get the first instruction in the block.  */
  insn = BB_HEAD (block);

  if (insn == NULL_RTX)
    return NULL_RTX;
  if (LABEL_P (insn))
    insn = NEXT_INSN (insn);
  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));

  return NEXT_INSN (insn);
}

520 521
/* Creates a new basic block just after basic block B by splitting
   everything after specified instruction I.  */
522

523
static basic_block
524
rtl_split_block (basic_block bb, void *insnp)
525 526
{
  basic_block new_bb;
527
  rtx insn = (rtx) insnp;
528
  edge e;
529
  edge_iterator ei;
530

531 532 533 534 535
  if (!insn)
    {
      insn = first_insn_after_basic_block_note (bb);

      if (insn)
536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555
	{
	  rtx next = insn;

	  insn = PREV_INSN (insn);

	  /* If the block contains only debug insns, insn would have
	     been NULL in a non-debug compilation, and then we'd end
	     up emitting a DELETED note.  For -fcompare-debug
	     stability, emit the note too.  */
	  if (insn != BB_END (bb)
	      && DEBUG_INSN_P (next)
	      && DEBUG_INSN_P (BB_END (bb)))
	    {
	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
		next = NEXT_INSN (next);

	      if (next == BB_END (bb))
		emit_note_after (NOTE_INSN_DELETED, next);
	    }
	}
556 557 558 559 560 561 562 563 564
      else
	insn = get_last_insn ();
    }

  /* We probably should check type of the insn so that we do not create
     inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
     bother.  */
  if (insn == BB_END (bb))
    emit_note_after (NOTE_INSN_DELETED, insn);
565 566

  /* Create the new basic block.  */
567
  new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
568
  BB_COPY_PARTITION (new_bb, bb);
569
  BB_END (bb) = insn;
570 571

  /* Redirect the outgoing edges.  */
572 573 574
  new_bb->succs = bb->succs;
  bb->succs = NULL;
  FOR_EACH_EDGE (e, ei, new_bb->succs)
575 576
    e->src = new_bb;

577 578
  /* The new block starts off being dirty.  */
  df_set_bb_dirty (bb);
579
  return new_bb;
580 581
}

582
/* Blocks A and B are to be merged into a single block A.  The insns
583
   are already contiguous.  */
584

585 586
static void
rtl_merge_blocks (basic_block a, basic_block b)
587
{
588
  rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
589
  rtx del_first = NULL_RTX, del_last = NULL_RTX;
590
  rtx b_debug_start = b_end, b_debug_end = b_end;
591 592
  int b_empty = 0;

593 594 595
  if (dump_file)
    fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);

596 597 598
  while (DEBUG_INSN_P (b_end))
    b_end = PREV_INSN (b_debug_start = b_end);

599
  /* If there was a CODE_LABEL beginning B, delete it.  */
600
  if (LABEL_P (b_head))
601 602 603 604 605
    {
      /* Detect basic blocks with nothing but a label.  This can happen
	 in particular at the end of a function.  */
      if (b_head == b_end)
	b_empty = 1;
606

607 608 609 610
      del_first = del_last = b_head;
      b_head = NEXT_INSN (b_head);
    }

611 612
  /* Delete the basic block note and handle blocks containing just that
     note.  */
613 614 615 616 617 618
  if (NOTE_INSN_BASIC_BLOCK_P (b_head))
    {
      if (b_head == b_end)
	b_empty = 1;
      if (! del_last)
	del_first = b_head;
619

620 621 622 623 624
      del_last = b_head;
      b_head = NEXT_INSN (b_head);
    }

  /* If there was a jump out of A, delete it.  */
625
  if (JUMP_P (a_end))
626 627 628 629
    {
      rtx prev;

      for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
630
	if (!NOTE_P (prev)
631
	    || NOTE_INSN_BASIC_BLOCK_P (prev)
632
	    || prev == BB_HEAD (a))
633 634 635 636 637 638 639 640 641 642
	  break;

      del_first = a_end;

#ifdef HAVE_cc0
      /* If this was a conditional jump, we need to also delete
	 the insn that set cc0.  */
      if (only_sets_cc0_p (prev))
	{
	  rtx tmp = prev;
643

644 645
	  prev = prev_nonnote_insn (prev);
	  if (!prev)
646
	    prev = BB_HEAD (a);
647 648 649 650 651 652
	  del_first = tmp;
	}
#endif

      a_end = PREV_INSN (del_first);
    }
653
  else if (BARRIER_P (NEXT_INSN (a_end)))
654 655 656 657
    del_first = NEXT_INSN (a_end);

  /* Delete everything marked above as well as crap that might be
     hanging out between the two blocks.  */
658
  BB_HEAD (b) = NULL;
659
  delete_insn_chain (del_first, del_last, true);
660 661 662 663

  /* Reassociate the insns of B with A.  */
  if (!b_empty)
    {
664
      update_bb_for_insn_chain (a_end, b_debug_end, a);
665

666 667 668 669 670 671 672 673 674 675 676 677 678
      a_end = b_debug_end;
    }
  else if (b_end != b_debug_end)
    {
      /* Move any deleted labels and other notes between the end of A
	 and the debug insns that make up B after the debug insns,
	 bringing the debug insns into A while keeping the notes after
	 the end of A.  */
      if (NEXT_INSN (a_end) != b_debug_start)
	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
			    b_debug_end);
      update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
      a_end = b_debug_end;
679
    }
680

681
  df_bb_delete (b->index);
682
  BB_END (a) = a_end;
683
}
684

685

686
/* Return true when block A and B can be merged.  */
687

688 689
static bool
rtl_can_merge_blocks (basic_block a, basic_block b)
690
{
691 692
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
693 694
     and cold sections.

695
     Basic block partitioning may result in some jumps that appear to
Mike Stump committed
696 697 698
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
699
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
700

701
  if (BB_PARTITION (a) != BB_PARTITION (b))
702
    return false;
703

704
  /* There must be exactly one edge in between the blocks.  */
705 706 707
  return (single_succ_p (a)
	  && single_succ (a) == b
	  && single_pred_p (b)
708
	  && a != b
709
	  /* Must be simple edge.  */
710
	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
711 712 713 714
	  && a->next_bb == b
	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
	  /* If the jump insn has side effects,
	     we can't kill the edge.  */
715
	  && (!JUMP_P (BB_END (a))
716
	      || (reload_completed
717
		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
718
}
719

720 721
/* Return the label in the head of basic block BLOCK.  Create one if it doesn't
   exist.  */
722 723

rtx
724
block_label (basic_block block)
725 726 727
{
  if (block == EXIT_BLOCK_PTR)
    return NULL_RTX;
728

729
  if (!LABEL_P (BB_HEAD (block)))
730
    {
731
      BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
732
    }
733

734
  return BB_HEAD (block);
735 736 737
}

/* Attempt to perform edge redirection by replacing possibly complex jump
738 739 740
   instruction by unconditional jump or removing jump completely.  This can
   apply only if all edges now point to the same block.  The parameters and
   return values are equivalent to redirect_edge_and_branch.  */
741

742
edge
743
try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
744 745
{
  basic_block src = e->src;
746
  rtx insn = BB_END (src), kill_from;
747
  rtx set;
748
  int fallthru = 0;
749 750 751

  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
752 753 754
     and cold sections.

     Basic block partitioning may result in some jumps that appear to
Mike Stump committed
755 756 757
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
758
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
Mike Stump committed
759

760 761
  if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
      || BB_PARTITION (src) != BB_PARTITION (target))
762
    return NULL;
763

764 765 766 767 768 769 770 771 772
  /* We can replace or remove a complex jump only when we have exactly
     two edges.  Also, if we have exactly one outgoing edge, we can
     redirect that.  */
  if (EDGE_COUNT (src->succs) >= 3
      /* Verify that all targets will be TARGET.  Specifically, the
	 edge that is not E must also go to TARGET.  */
      || (EDGE_COUNT (src->succs) == 2
	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
    return NULL;
773

774
  if (!onlyjump_p (insn))
775
    return NULL;
776
  if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
777
    return NULL;
778 779 780 781

  /* Avoid removing branch with side effects.  */
  set = single_set (insn);
  if (!set || side_effects_p (set))
782
    return NULL;
783 784 785 786 787

  /* In case we zap a conditional jump, we'll need to kill
     the cc0 setter too.  */
  kill_from = insn;
#ifdef HAVE_cc0
788 789
  if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
      && only_sets_cc0_p (PREV_INSN (insn)))
790 791 792 793
    kill_from = PREV_INSN (insn);
#endif

  /* See if we can create the fallthru edge.  */
794
  if (in_cfglayout || can_fallthru (src, target))
795
    {
796 797
      if (dump_file)
	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
798 799
      fallthru = 1;

800
      /* Selectively unlink whole insn chain.  */
801 802
      if (in_cfglayout)
	{
803
	  rtx insn = src->il.rtl->footer;
804

805
	  delete_insn_chain (kill_from, BB_END (src), false);
806 807 808 809

	  /* Remove barriers but keep jumptables.  */
	  while (insn)
	    {
810
	      if (BARRIER_P (insn))
811 812 813 814
		{
		  if (PREV_INSN (insn))
		    NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
		  else
815
		    src->il.rtl->footer = NEXT_INSN (insn);
816 817 818
		  if (NEXT_INSN (insn))
		    PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
		}
819
	      if (LABEL_P (insn))
820 821 822 823 824
		break;
	      insn = NEXT_INSN (insn);
	    }
	}
      else
825 826
	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
			   false);
827
    }
828

829 830 831 832
  /* If this already is simplejump, redirect it.  */
  else if (simplejump_p (insn))
    {
      if (e->dest == target)
833
	return NULL;
834 835
      if (dump_file)
	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
836
		 INSN_UID (insn), e->dest->index, target->index);
837 838
      if (!redirect_jump (insn, block_label (target), 0))
	{
839 840
	  gcc_assert (target == EXIT_BLOCK_PTR);
	  return NULL;
841
	}
842
    }
843

844 845
  /* Cannot do anything for target exit block.  */
  else if (target == EXIT_BLOCK_PTR)
846
    return NULL;
847

848 849 850 851
  /* Or replace possibly complicated jump insn by simple jump insn.  */
  else
    {
      rtx target_label = block_label (target);
852
      rtx barrier, label, table;
853

854
      emit_jump_insn_after_noloc (gen_jump (target_label), insn);
855
      JUMP_LABEL (BB_END (src)) = target_label;
856
      LABEL_NUSES (target_label)++;
857 858
      if (dump_file)
	fprintf (dump_file, "Replacing insn %i by jump %i\n",
859
		 INSN_UID (insn), INSN_UID (BB_END (src)));
860

861

862 863
      delete_insn_chain (kill_from, insn, false);

864 865 866
      /* Recognize a tablejump that we are converting to a
	 simple jump and remove its associated CODE_LABEL
	 and ADDR_VEC or ADDR_DIFF_VEC.  */
867 868
      if (tablejump_p (insn, &label, &table))
	delete_insn_chain (label, table, false);
869

870
      barrier = next_nonnote_insn (BB_END (src));
871
      if (!barrier || !BARRIER_P (barrier))
872
	emit_barrier_after (BB_END (src));
873 874
      else
	{
875
	  if (barrier != NEXT_INSN (BB_END (src)))
876 877 878 879
	    {
	      /* Move the jump before barrier so that the notes
		 which originally were or were created before jump table are
		 inside the basic block.  */
880
	      rtx new_insn = BB_END (src);
881

882 883
	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
				        PREV_INSN (barrier), src);
884 885 886 887 888 889 890 891 892 893 894

	      NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
	      PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);

	      NEXT_INSN (new_insn) = barrier;
	      NEXT_INSN (PREV_INSN (barrier)) = new_insn;

	      PREV_INSN (new_insn) = PREV_INSN (barrier);
	      PREV_INSN (barrier) = new_insn;
	    }
	}
895 896 897
    }

  /* Keep only one edge out and set proper flags.  */
898
  if (!single_succ_p (src))
899
    remove_edge (e);
900
  gcc_assert (single_succ_p (src));
901

902
  e = single_succ_edge (src);
903 904 905 906
  if (fallthru)
    e->flags = EDGE_FALLTHRU;
  else
    e->flags = 0;
907

908 909 910 911 912
  e->probability = REG_BR_PROB_BASE;
  e->count = src->count;

  if (e->dest != target)
    redirect_edge_succ (e, target);
913
  return e;
914 915
}

Michael Matz committed
916 917 918 919 920 921 922 923
/* Subroutine of redirect_branch_edge that tries to patch the jump
   instruction INSN so that it reaches block NEW.  Do this
   only when it originally reached block OLD.  Return true if this
   worked or the original target wasn't OLD, return false if redirection
   doesn't work.  */

static bool
patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
924 925 926
{
  rtx tmp;
  /* Recognize a tablejump and adjust all matching cases.  */
927
  if (tablejump_p (insn, NULL, &tmp))
928 929 930
    {
      rtvec vec;
      int j;
Michael Matz committed
931
      rtx new_label = block_label (new_bb);
932

Michael Matz committed
933 934
      if (new_bb == EXIT_BLOCK_PTR)
	return false;
935 936 937 938 939 940 941 942 943 944 945 946 947
      if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
	vec = XVEC (PATTERN (tmp), 0);
      else
	vec = XVEC (PATTERN (tmp), 1);

      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
	  {
	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
	    --LABEL_NUSES (old_label);
	    ++LABEL_NUSES (new_label);
	  }

948
      /* Handle casesi dispatch insns.  */
949 950 951 952 953 954
      if ((tmp = single_set (insn)) != NULL
	  && SET_DEST (tmp) == pc_rtx
	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
	  && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
	{
955
	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
956 957 958 959 960
						       new_label);
	  --LABEL_NUSES (old_label);
	  ++LABEL_NUSES (new_label);
	}
    }
961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998
  else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
    {
      int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
      rtx new_label, note;

      if (new_bb == EXIT_BLOCK_PTR)
	return false;
      new_label = block_label (new_bb);

      for (i = 0; i < n; ++i)
	{
	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
	  if (XEXP (old_ref, 0) == old_label)
	    {
	      ASM_OPERANDS_LABEL (tmp, i)
		= gen_rtx_LABEL_REF (Pmode, new_label);
	      --LABEL_NUSES (old_label);
	      ++LABEL_NUSES (new_label);
	    }
	}

      if (JUMP_LABEL (insn) == old_label)
	{
	  JUMP_LABEL (insn) = new_label;
	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
	  if (note)
	    remove_note (insn, note);
	}
      else
	{
	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
	  if (note)
	    remove_note (insn, note);
	  if (JUMP_LABEL (insn) != new_label
	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
	}
999 1000 1001
      while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
	     != NULL_RTX)
	XEXP (note, 0) = new_label;
1002
    }
1003 1004 1005 1006 1007
  else
    {
      /* ?? We may play the games with moving the named labels from
	 one basic block to the other in case only one computed_jump is
	 available.  */
1008 1009 1010
      if (computed_jump_p (insn)
	  /* A return instruction can't be redirected.  */
	  || returnjump_p (insn))
Michael Matz committed
1011
	return false;
1012

Michael Matz committed
1013
      if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1014
	{
Michael Matz committed
1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025
	  /* If the insn doesn't go where we think, we're confused.  */
	  gcc_assert (JUMP_LABEL (insn) == old_label);

	  /* If the substitution doesn't succeed, die.  This can happen
	     if the back end emitted unrecognizable instructions or if
	     target is exit block on some arches.  */
	  if (!redirect_jump (insn, block_label (new_bb), 0))
	    {
	      gcc_assert (new_bb == EXIT_BLOCK_PTR);
	      return false;
	    }
1026
	}
1027
    }
Michael Matz committed
1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059
  return true;
}


/* Redirect edge representing branch of (un)conditional jump or tablejump,
   NULL on failure  */
static edge
redirect_branch_edge (edge e, basic_block target)
{
  rtx old_label = BB_HEAD (e->dest);
  basic_block src = e->src;
  rtx insn = BB_END (src);

  /* We can only redirect non-fallthru edges of jump insn.  */
  if (e->flags & EDGE_FALLTHRU)
    return NULL;
  else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
    return NULL;

  if (!currently_expanding_to_rtl)
    {
      if (!patch_jump_insn (insn, old_label, target))
	return NULL;
    }
  else
    /* When expanding this BB might actually contain multiple
       jumps (i.e. not yet split by find_many_sub_basic_blocks).
       Redirect all of those that match our label.  */
    for (insn = BB_HEAD (src); insn != NEXT_INSN (BB_END (src));
	 insn = NEXT_INSN (insn))
      if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
	return NULL;
1060

1061 1062
  if (dump_file)
    fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1063
	     e->src->index, e->dest->index, target->index);
1064

1065
  if (e->dest != target)
1066
    e = redirect_edge_succ_nodup (e, target);
1067

1068
  return e;
1069 1070 1071 1072 1073 1074 1075 1076
}

/* Attempt to change code to redirect edge E to TARGET.  Don't do that on
   expense of adding new instructions or reordering basic blocks.

   Function can be also called with edge destination equivalent to the TARGET.
   Then it should try the simplifications and do nothing if none is possible.

1077 1078 1079 1080
   Return edge representing the branch if transformation succeeded.  Return NULL
   on failure.
   We still return NULL in case E already destinated TARGET and we didn't
   managed to simplify instruction stream.  */
1081

1082
static edge
1083
rtl_redirect_edge_and_branch (edge e, basic_block target)
1084
{
1085
  edge ret;
1086 1087
  basic_block src = e->src;

1088
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1089
    return NULL;
1090

1091
  if (e->dest == target)
1092
    return e;
1093

1094
  if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1095
    {
1096
      df_set_bb_dirty (src);
1097
      return ret;
1098
    }
1099

1100 1101 1102
  ret = redirect_branch_edge (e, target);
  if (!ret)
    return NULL;
1103

1104
  df_set_bb_dirty (src);
1105
  return ret;
1106 1107
}

1108
/* Like force_nonfallthru below, but additionally performs redirection
1109 1110
   Used by redirect_edge_and_branch_force.  */

1111
static basic_block
1112
force_nonfallthru_and_redirect (edge e, basic_block target)
1113
{
1114
  basic_block jump_block, new_bb = NULL, src = e->src;
1115 1116
  rtx note;
  edge new_edge;
1117
  int abnormal_edge_flags = 0;
1118
  int loc;
1119

1120 1121
  /* In the case the last instruction is conditional jump to the next
     instruction, first redirect the jump itself and then continue
Kazu Hirata committed
1122
     by creating a basic block afterwards to redirect fallthru edge.  */
1123
  if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1124 1125
      && any_condjump_p (BB_END (e->src))
      && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1126 1127
    {
      rtx note;
1128
      edge b = unchecked_make_edge (e->src, target, 0);
1129
      bool redirected;
1130

1131 1132
      redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
      gcc_assert (redirected);
Mike Stump committed
1133

1134
      note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149
      if (note)
	{
	  int prob = INTVAL (XEXP (note, 0));

	  b->probability = prob;
	  b->count = e->count * prob / REG_BR_PROB_BASE;
	  e->probability -= e->probability;
	  e->count -= b->count;
	  if (e->probability < 0)
	    e->probability = 0;
	  if (e->count < 0)
	    e->count = 0;
	}
    }

1150
  if (e->flags & EDGE_ABNORMAL)
1151 1152 1153 1154
    {
      /* Irritating special case - fallthru edge to the same block as abnormal
	 edge.
	 We can't redirect abnormal edge, but we still can split the fallthru
1155
	 one and create separate abnormal edge to original destination.
1156
	 This allows bb-reorder to make such edge non-fallthru.  */
1157
      gcc_assert (e->dest == target);
1158 1159 1160
      abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
      e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
    }
1161
  else
1162
    {
1163 1164 1165 1166
      gcc_assert (e->flags & EDGE_FALLTHRU);
      if (e->src == ENTRY_BLOCK_PTR)
	{
	  /* We can't redirect the entry block.  Create an empty block
1167 1168 1169 1170 1171
	     at the start of the function which we use to add the new
	     jump.  */
	  edge tmp;
	  edge_iterator ei;
	  bool found = false;
Mike Stump committed
1172

1173
	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
Mike Stump committed
1174

1175 1176 1177
	  /* Change the existing edge's source to be the new block, and add
	     a new edge from the entry block to the new block.  */
	  e->src = bb;
1178 1179 1180 1181
	  for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
	    {
	      if (tmp == e)
		{
1182
		  VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1183 1184 1185 1186 1187 1188
		  found = true;
		  break;
		}
	      else
		ei_next (&ei);
	    }
Mike Stump committed
1189

1190
	  gcc_assert (found);
Mike Stump committed
1191

1192
	  VEC_safe_push (edge, gc, bb->succs, e);
1193 1194
	  make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
	}
1195 1196
    }

1197
  if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1198 1199
    {
      /* Create the new structures.  */
1200

1201 1202 1203
      /* If the old block ended with a tablejump, skip its table
	 by searching forward from there.  Otherwise start searching
	 forward from the last instruction of the old block.  */
1204 1205
      if (!tablejump_p (BB_END (e->src), NULL, &note))
	note = BB_END (e->src);
1206 1207 1208
      note = NEXT_INSN (note);

      jump_block = create_basic_block (note, NULL, e->src);
1209 1210 1211 1212
      jump_block->count = e->count;
      jump_block->frequency = EDGE_FREQUENCY (e);
      jump_block->loop_depth = target->loop_depth;

1213 1214
      /* Make sure new block ends up in correct hot/cold section.  */

1215
      BB_COPY_PARTITION (jump_block, e->src);
1216
      if (flag_reorder_blocks_and_partition
1217 1218 1219 1220
	  && targetm.have_named_sections
	  && JUMP_P (BB_END (jump_block))
	  && !any_condjump_p (BB_END (jump_block))
	  && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1221
	add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
Mike Stump committed
1222

1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235
      /* Wire edge in.  */
      new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
      new_edge->probability = e->probability;
      new_edge->count = e->count;

      /* Redirect old edge.  */
      redirect_edge_pred (e, jump_block);
      e->probability = REG_BR_PROB_BASE;

      new_bb = jump_block;
    }
  else
    jump_block = e->src;
1236

1237 1238 1239 1240
  if (e->goto_locus && e->goto_block == NULL)
    loc = e->goto_locus;
  else
    loc = 0;
1241 1242 1243
  e->flags &= ~EDGE_FALLTHRU;
  if (target == EXIT_BLOCK_PTR)
    {
1244
#ifdef HAVE_return
1245
	emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1246
#else
1247
	gcc_unreachable ();
1248
#endif
1249 1250 1251 1252
    }
  else
    {
      rtx label = block_label (target);
1253
      emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1254
      JUMP_LABEL (BB_END (jump_block)) = label;
1255 1256
      LABEL_NUSES (label)++;
    }
1257

1258
  emit_barrier_after (BB_END (jump_block));
1259 1260
  redirect_edge_succ_nodup (e, target);

1261 1262 1263
  if (abnormal_edge_flags)
    make_edge (src, target, abnormal_edge_flags);

H.J. Lu committed
1264
  df_mark_solutions_dirty ();
1265 1266 1267 1268 1269 1270
  return new_bb;
}

/* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
   (and possibly create new basic block) to make edge non-fallthru.
   Return newly created BB or NULL if none.  */
1271

1272
basic_block
1273
force_nonfallthru (edge e)
1274 1275 1276 1277 1278 1279
{
  return force_nonfallthru_and_redirect (e, e->dest);
}

/* Redirect edge even at the expense of creating new jump insn or
   basic block.  Return new basic block if created, NULL otherwise.
1280
   Conversion must be possible.  */
1281

1282
static basic_block
1283
rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1284
{
1285 1286
  if (redirect_edge_and_branch (e, target)
      || e->dest == target)
1287 1288 1289 1290
    return NULL;

  /* In case the edge redirection failed, try to force it to be non-fallthru
     and redirect newly created simplejump.  */
1291
  df_set_bb_dirty (e->src);
1292
  return force_nonfallthru_and_redirect (e, target);
1293 1294 1295 1296 1297
}

/* The given edge should potentially be a fallthru edge.  If that is in
   fact true, delete the jump and barriers that are in the way.  */

1298 1299
static void
rtl_tidy_fallthru_edge (edge e)
1300 1301
{
  rtx q;
1302 1303
  basic_block b = e->src, c = b->next_bb;

1304 1305
  /* ??? In a late-running flow pass, other folks may have deleted basic
     blocks by nopping out blocks, leaving multiple BARRIERs between here
1306
     and the target label. They ought to be chastised and fixed.
1307 1308 1309 1310 1311 1312 1313

     We can also wind up with a sequence of undeletable labels between
     one block and the next.

     So search through a sequence of barriers, labels, and notes for
     the head of block C and assert that we really do fall through.  */

1314
  for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1315 1316
    if (INSN_P (q))
      return;
1317 1318 1319 1320

  /* Remove what will soon cease being the jump insn from the source block.
     If block B consisted only of this single jump, turn it into a deleted
     note.  */
1321
  q = BB_END (b);
1322
  if (JUMP_P (q)
1323 1324
      && onlyjump_p (q)
      && (any_uncondjump_p (q)
1325
	  || single_succ_p (b)))
1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337
    {
#ifdef HAVE_cc0
      /* If this was a conditional jump, we need to also delete
	 the insn that set cc0.  */
      if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
	q = PREV_INSN (q);
#endif

      q = PREV_INSN (q);
    }

  /* Selectively unlink the sequence.  */
1338
  if (q != PREV_INSN (BB_HEAD (c)))
1339
    delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1340 1341 1342 1343

  e->flags |= EDGE_FALLTHRU;
}

1344 1345 1346 1347 1348 1349 1350 1351 1352
/* Should move basic block BB after basic block AFTER.  NIY.  */

static bool
rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
		      basic_block after ATTRIBUTE_UNUSED)
{
  return false;
}

1353
/* Split a (typically critical) edge.  Return the new block.
1354
   The edge must not be abnormal.
1355 1356 1357 1358 1359

   ??? The code generally expects to be called on critical edges.
   The case of a block ending in an unconditional jump to a
   block with multiple predecessors is not handled optimally.  */

1360
static basic_block
1361
rtl_split_edge (edge edge_in)
1362 1363 1364 1365 1366
{
  basic_block bb;
  rtx before;

  /* Abnormal edges cannot be split.  */
1367
  gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1368 1369

  /* We are going to place the new block in front of edge destination.
1370
     Avoid existence of fallthru predecessors.  */
1371 1372 1373
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
    {
      edge e;
1374
      edge_iterator ei;
1375

1376
      FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1377 1378 1379 1380 1381 1382 1383
	if (e->flags & EDGE_FALLTHRU)
	  break;

      if (e)
	force_nonfallthru (e);
    }

1384 1385
  /* Create the basic block note.  */
  if (edge_in->dest != EXIT_BLOCK_PTR)
1386
    before = BB_HEAD (edge_in->dest);
1387 1388 1389
  else
    before = NULL_RTX;

1390 1391 1392 1393 1394 1395
  /* If this is a fall through edge to the exit block, the blocks might be
     not adjacent, and the right place is the after the source.  */
  if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
    {
      before = NEXT_INSN (BB_END (edge_in->src));
      bb = create_basic_block (before, NULL, edge_in->src);
1396
      BB_COPY_PARTITION (bb, edge_in->src);
1397 1398
    }
  else
1399 1400
    {
      bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1401 1402
      /* ??? Why not edge_in->dest->prev_bb here?  */
      BB_COPY_PARTITION (bb, edge_in->dest);
1403
    }
1404

1405
  make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1406

1407
  /* For non-fallthru edges, we must adjust the predecessor's
1408 1409 1410
     jump instruction to target our new block.  */
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
    {
1411 1412
      edge redirected = redirect_edge_and_branch (edge_in, bb);
      gcc_assert (redirected);
1413 1414
    }
  else
1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430
    {
      if (edge_in->src != ENTRY_BLOCK_PTR)
	{
	  /* For asm goto even splitting of fallthru edge might
	     need insn patching, as other labels might point to the
	     old label.  */
	  rtx last = BB_END (edge_in->src);
	  if (last
	      && JUMP_P (last)
	      && edge_in->dest != EXIT_BLOCK_PTR
	      && extract_asm_operands (PATTERN (last)) != NULL_RTX
	      && patch_jump_insn (last, before, bb))
	    df_set_bb_dirty (edge_in->src);
	}
      redirect_edge_succ (edge_in, bb);
    }
1431 1432 1433 1434 1435 1436 1437 1438 1439

  return bb;
}

/* Queue instructions for insertion on an edge between two basic blocks.
   The new instructions and basic blocks (if any) will not appear in the
   CFG until commit_edge_insertions is called.  */

void
1440
insert_insn_on_edge (rtx pattern, edge e)
1441 1442 1443
{
  /* We cannot insert instructions on an abnormal critical edge.
     It will be easier to find the culprit if we die now.  */
1444
  gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1445

1446
  if (e->insns.r == NULL_RTX)
1447 1448
    start_sequence ();
  else
1449
    push_to_sequence (e->insns.r);
1450 1451 1452

  emit_insn (pattern);

1453
  e->insns.r = get_insns ();
1454 1455 1456 1457 1458
  end_sequence ();
}

/* Update the CFG for the instructions queued on edge E.  */

Michael Matz committed
1459
void
1460
commit_one_edge_insertion (edge e)
1461 1462
{
  rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1463
  basic_block bb = NULL;
1464 1465

  /* Pull the insns off the edge now since the edge might go away.  */
1466 1467
  insns = e->insns.r;
  e->insns.r = NULL_RTX;
1468

1469
  if (!before && !after)
1470
    {
1471
      /* Figure out where to put these things.  If the destination has
Mike Stump committed
1472
	 one predecessor, insert there.  Except for the exit block.  */
1473
      if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1474
	{
1475 1476 1477 1478
	  bb = e->dest;

	  /* Get the location correct wrt a code label, and "nice" wrt
	     a basic block note, and before everything else.  */
1479
	  tmp = BB_HEAD (bb);
1480
	  if (LABEL_P (tmp))
1481 1482 1483
	    tmp = NEXT_INSN (tmp);
	  if (NOTE_INSN_BASIC_BLOCK_P (tmp))
	    tmp = NEXT_INSN (tmp);
1484
	  if (tmp == BB_HEAD (bb))
1485 1486 1487 1488 1489 1490 1491 1492
	    before = tmp;
	  else if (tmp)
	    after = PREV_INSN (tmp);
	  else
	    after = get_last_insn ();
	}

      /* If the source has one successor and the edge is not abnormal,
Mike Stump committed
1493
	 insert there.  Except for the entry block.  */
1494
      else if ((e->flags & EDGE_ABNORMAL) == 0
1495
	       && single_succ_p (e->src)
1496 1497 1498 1499 1500 1501 1502 1503 1504 1505
	       && e->src != ENTRY_BLOCK_PTR)
	{
	  bb = e->src;

	  /* It is possible to have a non-simple jump here.  Consider a target
	     where some forms of unconditional jumps clobber a register.  This
	     happens on the fr30 for example.

	     We know this block has a single successor, so we can just emit
	     the queued insns before the jump.  */
1506
	  if (JUMP_P (BB_END (bb)))
1507
	    before = BB_END (bb);
1508 1509
	  else
	    {
1510 1511 1512
	      /* We'd better be fallthru, or we've lost track of
		 what's what.  */
	      gcc_assert (e->flags & EDGE_FALLTHRU);
1513

1514
	      after = BB_END (bb);
1515 1516 1517 1518 1519 1520
	    }
	}
      /* Otherwise we must split the edge.  */
      else
	{
	  bb = split_edge (e);
1521
	  after = BB_END (bb);
1522 1523

	  if (flag_reorder_blocks_and_partition
1524
	      && targetm.have_named_sections
1525
	      && e->src != ENTRY_BLOCK_PTR
1526
	      && BB_PARTITION (e->src) == BB_COLD_PARTITION
1527 1528 1529 1530 1531
	      && !(e->flags & EDGE_CROSSING)
	      && JUMP_P (after)
	      && !any_condjump_p (after)
	      && (single_succ_edge (bb)->flags & EDGE_CROSSING))
	    add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1532 1533 1534 1535 1536 1537 1538
	}
    }

  /* Now that we've found the spot, do the insertion.  */

  if (before)
    {
1539
      emit_insn_before_noloc (insns, before, bb);
1540 1541 1542
      last = prev_nonnote_insn (before);
    }
  else
1543
    last = emit_insn_after_noloc (insns, after, bb);
1544 1545 1546 1547

  if (returnjump_p (last))
    {
      /* ??? Remove all outgoing edges from BB and add one for EXIT.
Mike Stump committed
1548 1549 1550
	 This is not currently a problem because this only happens
	 for the (single) epilogue, which already has a fallthru edge
	 to EXIT.  */
1551

1552
      e = single_succ_edge (bb);
1553
      gcc_assert (e->dest == EXIT_BLOCK_PTR
1554
		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1555

1556
      e->flags &= ~EDGE_FALLTHRU;
1557
      emit_barrier_after (last);
1558

1559 1560 1561
      if (before)
	delete_insn (before);
    }
1562 1563
  else
    gcc_assert (!JUMP_P (last));
1564

1565
  /* Mark the basic block for find_many_sub_basic_blocks.  */
1566 1567
  if (current_ir_type () != IR_RTL_CFGLAYOUT)
    bb->aux = &bb->aux;
1568 1569 1570 1571 1572
}

/* Update the CFG for all queued instructions.  */

void
1573
commit_edge_insertions (void)
1574 1575
{
  basic_block bb;
1576
  sbitmap blocks;
1577
  bool changed = false;
1578 1579 1580 1581 1582

#ifdef ENABLE_CHECKING
  verify_flow_info ();
#endif

1583
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1584
    {
1585 1586
      edge e;
      edge_iterator ei;
1587

1588 1589 1590 1591
      FOR_EACH_EDGE (e, ei, bb->succs)
	if (e->insns.r)
	  {
	    changed = true;
1592
	    commit_one_edge_insertion (e);
1593
	  }
1594
    }
1595

1596 1597 1598
  if (!changed)
    return;

1599 1600 1601 1602 1603 1604 1605
  /* In the old rtl CFG API, it was OK to insert control flow on an
     edge, apparently?  In cfglayout mode, this will *not* work, and
     the caller is responsible for making sure that control flow is
     valid at all times.  */
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    return;

1606 1607 1608 1609 1610
  blocks = sbitmap_alloc (last_basic_block);
  sbitmap_zero (blocks);
  FOR_EACH_BB (bb)
    if (bb->aux)
      {
Mike Stump committed
1611
	SET_BIT (blocks, bb->index);
1612 1613
	/* Check for forgotten bb->aux values before commit_edge_insertions
	   call.  */
1614
	gcc_assert (bb->aux == &bb->aux);
1615
	bb->aux = NULL;
1616 1617 1618
      }
  find_many_sub_basic_blocks (blocks);
  sbitmap_free (blocks);
1619 1620
}

1621

1622 1623
/* Print out RTL-specific basic block information (live information
   at start and end).  */
1624

1625
static void
1626
rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1627 1628 1629
{
  rtx insn;
  rtx last;
1630
  char *s_indent;
1631

1632
  s_indent = (char *) alloca ((size_t) indent + 1);
1633
  memset (s_indent, ' ', (size_t) indent);
1634
  s_indent[indent] = '\0';
H.J. Lu committed
1635

1636 1637 1638 1639 1640
  if (df)
    {
      df_dump_top (bb, outf);
      putc ('\n', outf);
    }
1641

1642
  for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1643 1644 1645
       insn = NEXT_INSN (insn))
    print_rtl_single (outf, insn);

1646 1647 1648 1649 1650 1651
  if (df)
    {
      df_dump_bottom (bb, outf);
      putc ('\n', outf);
    }

1652 1653 1654 1655 1656 1657
}

/* Like print_rtl, but also print out live information for the start of each
   basic block.  */

void
1658
print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1659
{
1660
  const_rtx tmp_rtx;
1661 1662 1663 1664 1665 1666
  if (rtx_first == 0)
    fprintf (outf, "(nil)\n");
  else
    {
      enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
      int max_uid = get_max_uid ();
1667 1668 1669
      basic_block *start = XCNEWVEC (basic_block, max_uid);
      basic_block *end = XCNEWVEC (basic_block, max_uid);
      enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1670

1671 1672
      basic_block bb;

1673 1674 1675
      if (df)
	df_dump_start (outf);

1676
      FOR_EACH_BB_REVERSE (bb)
1677 1678 1679
	{
	  rtx x;

1680 1681 1682
	  start[INSN_UID (BB_HEAD (bb))] = bb;
	  end[INSN_UID (BB_END (bb))] = bb;
	  for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1683 1684
	    {
	      enum bb_state state = IN_MULTIPLE_BB;
1685

1686 1687 1688 1689
	      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
		state = IN_ONE_BB;
	      in_bb_p[INSN_UID (x)] = state;

1690
	      if (x == BB_END (bb))
1691 1692 1693 1694 1695 1696 1697 1698 1699
		break;
	    }
	}

      for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
	{
	  int did_output;
	  if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
	    {
1700 1701
	      edge e;
	      edge_iterator ei;
H.J. Lu committed
1702

1703 1704 1705 1706 1707 1708 1709 1710 1711 1712
	      fprintf (outf, ";; Start of basic block (");
	      FOR_EACH_EDGE (e, ei, bb->preds)
		fprintf (outf, " %d", e->src->index);
	      fprintf (outf, ") -> %d\n", bb->index);

	      if (df)
		{
		  df_dump_top (bb, outf);
		  putc ('\n', outf);
		}
1713 1714 1715 1716 1717 1718
	      FOR_EACH_EDGE (e, ei, bb->preds)
		{
		  fputs (";; Pred edge ", outf);
		  dump_edge_info (outf, e, 0);
		  fputc ('\n', outf);
		}
1719 1720 1721
	    }

	  if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1722 1723
	      && !NOTE_P (tmp_rtx)
	      && !BARRIER_P (tmp_rtx))
1724 1725 1726 1727 1728 1729 1730 1731
	    fprintf (outf, ";; Insn is not within a basic block\n");
	  else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
	    fprintf (outf, ";; Insn is in multiple basic blocks\n");

	  did_output = print_rtl_single (outf, tmp_rtx);

	  if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
	    {
1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744
	      edge e;
	      edge_iterator ei;

	      fprintf (outf, ";; End of basic block %d -> (", bb->index);
	      FOR_EACH_EDGE (e, ei, bb->succs)
		fprintf (outf, " %d", e->dest->index);
	      fprintf (outf, ")\n");

	      if (df)
		{
		  df_dump_bottom (bb, outf);
		  putc ('\n', outf);
		}
1745
	      putc ('\n', outf);
1746 1747 1748 1749 1750 1751
	      FOR_EACH_EDGE (e, ei, bb->succs)
		{
		  fputs (";; Succ edge ", outf);
		  dump_edge_info (outf, e, 1);
		  fputc ('\n', outf);
		}
1752 1753 1754 1755 1756 1757 1758 1759 1760 1761
	    }
	  if (did_output)
	    putc ('\n', outf);
	}

      free (start);
      free (end);
      free (in_bb_p);
    }

1762
  if (crtl->epilogue_delay_list != 0)
1763 1764
    {
      fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1765
      for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1766 1767 1768 1769 1770
	   tmp_rtx = XEXP (tmp_rtx, 1))
	print_rtl_single (outf, XEXP (tmp_rtx, 0));
    }
}

1771
void
1772
update_br_prob_note (basic_block bb)
1773 1774
{
  rtx note;
1775
  if (!JUMP_P (BB_END (bb)))
1776
    return;
1777
  note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1778 1779 1780 1781
  if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
    return;
  XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
}
1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795

/* Get the last insn associated with block BB (that includes barriers and
   tablejumps after BB).  */
rtx
get_last_bb_insn (basic_block bb)
{
  rtx tmp;
  rtx end = BB_END (bb);

  /* Include any jump table following the basic block.  */
  if (tablejump_p (end, NULL, &tmp))
    end = tmp;

  /* Include any barriers that may follow the basic block.  */
1796
  tmp = next_nonnote_insn_bb (end);
1797 1798 1799
  while (tmp && BARRIER_P (tmp))
    {
      end = tmp;
1800
      tmp = next_nonnote_insn_bb (end);
1801 1802 1803 1804
    }

  return end;
}
1805

1806 1807
/* Verify the CFG and RTL consistency common for both underlying RTL and
   cfglayout RTL.
1808 1809 1810 1811

   Currently it does following checks:

   - overlapping of basic blocks
1812
   - insns with wrong BLOCK_FOR_INSN pointers
1813
   - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1814
   - tails of basic blocks (ensure that boundary is necessary)
1815 1816
   - scans body of the basic block for JUMP_INSN, CODE_LABEL
     and NOTE_INSN_BASIC_BLOCK
1817
   - verify that no fall_thru edge crosses hot/cold partition boundaries
1818
   - verify that there are no pending RTL branch predictions
1819 1820 1821

   In future it can be extended check a lot of other stuff as well
   (reachability of basic blocks, life information, etc. etc.).  */
1822

1823
static int
1824
rtl_verify_flow_info_1 (void)
1825 1826
{
  rtx x;
1827
  int err = 0;
1828
  basic_block bb;
1829

1830
  /* Check the general integrity of the basic blocks.  */
1831
  FOR_EACH_BB_REVERSE (bb)
1832
    {
1833
      rtx insn;
1834

1835 1836 1837 1838 1839 1840
      if (!(bb->flags & BB_RTL))
	{
	  error ("BB_RTL flag not set for block %d", bb->index);
	  err = 1;
	}

1841
      FOR_BB_INSNS (bb, insn)
1842
	if (BLOCK_FOR_INSN (insn) != bb)
1843 1844 1845 1846 1847 1848 1849
	  {
	    error ("insn %d basic block pointer is %d, should be %d",
		   INSN_UID (insn),
		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
		   bb->index);
	    err = 1;
	  }
1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866

      for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
	if (!BARRIER_P (insn)
	    && BLOCK_FOR_INSN (insn) != NULL)
	  {
	    error ("insn %d in header of bb %d has non-NULL basic block",
		   INSN_UID (insn), bb->index);
	    err = 1;
	  }
      for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
	if (!BARRIER_P (insn)
	    && BLOCK_FOR_INSN (insn) != NULL)
	  {
	    error ("insn %d in footer of bb %d has non-NULL basic block",
		   INSN_UID (insn), bb->index);
	    err = 1;
	  }
1867 1868 1869
    }

  /* Now check the basic blocks (boundaries etc.) */
1870
  FOR_EACH_BB_REVERSE (bb)
1871
    {
1872
      int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1873
      edge e, fallthru = NULL;
1874
      rtx note;
1875
      edge_iterator ei;
1876

1877
      if (JUMP_P (BB_END (bb))
1878
	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1879
	  && EDGE_COUNT (bb->succs) >= 2
1880
	  && any_condjump_p (BB_END (bb)))
1881
	{
1882 1883
	  if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
	      && profile_status != PROFILE_ABSENT)
1884
	    {
1885
	      error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1886 1887 1888 1889
		     INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
	      err = 1;
	    }
	}
1890
      FOR_EACH_EDGE (e, ei, bb->succs)
1891 1892
	{
	  if (e->flags & EDGE_FALLTHRU)
1893 1894
	    {
	      n_fallthru++, fallthru = e;
1895
	      if ((e->flags & EDGE_CROSSING)
1896
		  || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1897 1898
		      && e->src != ENTRY_BLOCK_PTR
		      && e->dest != EXIT_BLOCK_PTR))
Mike Stump committed
1899
	    {
1900
		  error ("fallthru edge crosses section boundary (bb %i)",
1901 1902 1903 1904
			 e->src->index);
		  err = 1;
		}
	    }
1905

1906 1907 1908
	  if ((e->flags & ~(EDGE_DFS_BACK
			    | EDGE_CAN_FALLTHRU
			    | EDGE_IRREDUCIBLE_LOOP
1909 1910
			    | EDGE_LOOP_EXIT
			    | EDGE_CROSSING)) == 0)
1911 1912 1913 1914 1915 1916 1917 1918 1919
	    n_branch++;

	  if (e->flags & EDGE_ABNORMAL_CALL)
	    n_call++;

	  if (e->flags & EDGE_EH)
	    n_eh++;
	  else if (e->flags & EDGE_ABNORMAL)
	    n_abnormal++;
1920
	}
1921

1922
      if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1923
	{
1924
	  error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1925 1926
	  err = 1;
	}
1927 1928 1929 1930 1931
      if (n_eh > 1)
	{
	  error ("too many eh edges %i", bb->index);
	  err = 1;
	}
1932
      if (n_branch
1933
	  && (!JUMP_P (BB_END (bb))
1934 1935
	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
				   || any_condjump_p (BB_END (bb))))))
1936
	{
1937
	  error ("too many outgoing branch edges from bb %i", bb->index);
1938 1939
	  err = 1;
	}
1940
      if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1941
	{
1942
	  error ("fallthru edge after unconditional jump %i", bb->index);
1943 1944
	  err = 1;
	}
1945
      if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1946
	{
1947 1948
	  error ("wrong number of branch edges after unconditional jump %i",
		 bb->index);
1949 1950
	  err = 1;
	}
1951
      if (n_branch != 1 && any_condjump_p (BB_END (bb))
1952
	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1953
	{
1954 1955
	  error ("wrong amount of branch edges after conditional jump %i",
		 bb->index);
1956 1957
	  err = 1;
	}
1958
      if (n_call && !CALL_P (BB_END (bb)))
1959
	{
1960
	  error ("call edges for non-call insn in bb %i", bb->index);
1961 1962 1963
	  err = 1;
	}
      if (n_abnormal
1964 1965
	  && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
	  && (!JUMP_P (BB_END (bb))
1966 1967
	      || any_condjump_p (BB_END (bb))
	      || any_uncondjump_p (BB_END (bb))))
1968
	{
1969
	  error ("abnormal edges for no purpose in bb %i", bb->index);
1970 1971
	  err = 1;
	}
1972

1973
      for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1974
	/* We may have a barrier inside a basic block before dead code
1975 1976
	   elimination.  There is no BLOCK_FOR_INSN field in a barrier.  */
	if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1977 1978 1979 1980 1981
	  {
	    debug_rtx (x);
	    if (! BLOCK_FOR_INSN (x))
	      error
		("insn %d inside basic block %d but block_for_insn is NULL",
1982
		 INSN_UID (x), bb->index);
1983 1984 1985
	    else
	      error
		("insn %d inside basic block %d but block_for_insn is %i",
1986
		 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1987 1988 1989

	    err = 1;
	  }
1990 1991

      /* OK pointers are correct.  Now check the header of basic
Mike Stump committed
1992
	 block.  It ought to contain optional CODE_LABEL followed
1993
	 by NOTE_BASIC_BLOCK.  */
1994
      x = BB_HEAD (bb);
1995
      if (LABEL_P (x))
1996
	{
1997
	  if (BB_END (bb) == x)
1998 1999
	    {
	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2000
		     bb->index);
2001 2002
	      err = 1;
	    }
2003

2004 2005
	  x = NEXT_INSN (x);
	}
2006

2007 2008 2009
      if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
	{
	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2010
		 bb->index);
2011 2012 2013
	  err = 1;
	}

2014
      if (BB_END (bb) == x)
2015
	/* Do checks for empty blocks here.  */
2016
	;
2017
      else
2018 2019 2020 2021 2022
	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
	  {
	    if (NOTE_INSN_BASIC_BLOCK_P (x))
	      {
		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2023
		       INSN_UID (x), bb->index);
2024 2025 2026
		err = 1;
	      }

2027
	    if (x == BB_END (bb))
2028
	      break;
2029

2030
	    if (control_flow_insn_p (x))
2031
	      {
2032
		error ("in basic block %d:", bb->index);
2033 2034 2035
		fatal_insn ("flow control insn inside a basic block", x);
	      }
	  }
2036 2037
    }

2038 2039 2040
  /* Clean up.  */
  return err;
}
2041

2042 2043
/* Verify the CFG and RTL consistency common for both underlying RTL and
   cfglayout RTL.
2044

2045 2046
   Currently it does following checks:
   - all checks of rtl_verify_flow_info_1
2047
   - test head/end pointers
2048 2049 2050 2051
   - check that all insns are in the basic blocks
     (except the switch handling code, barriers and notes)
   - check that all returns are followed by barriers
   - check that all fallthru edge points to the adjacent blocks.  */
2052

2053
static int
2054
rtl_verify_flow_info (void)
2055 2056 2057 2058
{
  basic_block bb;
  int err = rtl_verify_flow_info_1 ();
  rtx x;
2059 2060
  rtx last_head = get_last_insn ();
  basic_block *bb_info;
2061 2062 2063
  int num_bb_notes;
  const rtx rtx_first = get_insns ();
  basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2064 2065 2066
  const int max_uid = get_max_uid ();

  bb_info = XCNEWVEC (basic_block, max_uid);
2067

2068 2069 2070
  FOR_EACH_BB_REVERSE (bb)
    {
      edge e;
2071
      edge_iterator ei;
2072 2073
      rtx head = BB_HEAD (bb);
      rtx end = BB_END (bb);
2074

2075
      for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089
	{
	  /* Verify the end of the basic block is in the INSN chain.  */
	  if (x == end)
	    break;

	  /* And that the code outside of basic blocks has NULL bb field.  */
	if (!BARRIER_P (x)
	    && BLOCK_FOR_INSN (x) != NULL)
	  {
	    error ("insn %d outside of basic blocks has non-NULL bb field",
		   INSN_UID (x));
	    err = 1;
	  }
	}
2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100

      if (!x)
	{
	  error ("end insn %d for block %d not found in the insn stream",
		 INSN_UID (end), bb->index);
	  err = 1;
	}

      /* Work backwards from the end to the head of the basic block
	 to verify the head is in the RTL chain.  */
      for (; x != NULL_RTX; x = PREV_INSN (x))
2101
	{
2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119
	  /* While walking over the insn chain, verify insns appear
	     in only one basic block.  */
	  if (bb_info[INSN_UID (x)] != NULL)
	    {
	      error ("insn %d is in multiple basic blocks (%d and %d)",
		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
	      err = 1;
	    }

	  bb_info[INSN_UID (x)] = bb;

	  if (x == head)
	    break;
	}
      if (!x)
	{
	  error ("head insn %d for block %d not found in the insn stream",
		 INSN_UID (head), bb->index);
2120 2121 2122
	  err = 1;
	}

2123
      last_head = PREV_INSN (x);
2124

2125
      FOR_EACH_EDGE (e, ei, bb->succs)
2126 2127 2128 2129 2130 2131 2132
	if (e->flags & EDGE_FALLTHRU)
	  break;
      if (!e)
	{
	  rtx insn;

	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2133 2134 2135
	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
	    {
	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2136 2137 2138 2139 2140
		{
		  error ("missing barrier after block %i", bb->index);
		  err = 1;
		  break;
		}
2141 2142 2143
	      if (BARRIER_P (insn))
		break;
	    }
2144 2145 2146
	}
      else if (e->src != ENTRY_BLOCK_PTR
	       && e->dest != EXIT_BLOCK_PTR)
Mike Stump committed
2147
	{
2148 2149 2150 2151 2152 2153 2154 2155 2156 2157
	  rtx insn;

	  if (e->src->next_bb != e->dest)
	    {
	      error
		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
		 e->src->index, e->dest->index);
	      err = 1;
	    }
	  else
2158
	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2159
		 insn = NEXT_INSN (insn))
2160
	      if (BARRIER_P (insn) || INSN_P (insn))
2161 2162 2163 2164 2165 2166
		{
		  error ("verify_flow_info: Incorrect fallthru %i->%i",
			 e->src->index, e->dest->index);
		  fatal_insn ("wrong insn in the fallthru edge", insn);
		  err = 1;
		}
Mike Stump committed
2167
	}
2168
    }
2169

2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181
  for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
    {
      /* Check that the code before the first basic block has NULL
	 bb field.  */
      if (!BARRIER_P (x)
	  && BLOCK_FOR_INSN (x) != NULL)
	{
	  error ("insn %d outside of basic blocks has non-NULL bb field",
		 INSN_UID (x));
	  err = 1;
	}
    }
2182 2183
  free (bb_info);

2184
  num_bb_notes = 0;
2185 2186
  last_bb_seen = ENTRY_BLOCK_PTR;

2187
  for (x = rtx_first; x; x = NEXT_INSN (x))
2188 2189 2190
    {
      if (NOTE_INSN_BASIC_BLOCK_P (x))
	{
2191
	  bb = NOTE_BASIC_BLOCK (x);
2192

2193
	  num_bb_notes++;
2194
	  if (bb != last_bb_seen->next_bb)
2195
	    internal_error ("basic blocks not laid down consecutively");
2196

2197
	  curr_bb = last_bb_seen = bb;
2198 2199
	}

2200
      if (!curr_bb)
2201 2202 2203 2204 2205 2206 2207 2208
	{
	  switch (GET_CODE (x))
	    {
	    case BARRIER:
	    case NOTE:
	      break;

	    case CODE_LABEL:
2209
	      /* An addr_vec is placed outside any basic block.  */
2210
	      if (NEXT_INSN (x)
Shujing Zhao committed
2211
		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2212
		x = NEXT_INSN (x);
2213 2214 2215 2216 2217

	      /* But in any case, non-deletable labels can appear anywhere.  */
	      break;

	    default:
2218
	      fatal_insn ("insn outside basic block", x);
2219 2220 2221
	    }
	}

2222
      if (JUMP_P (x)
2223
	  && returnjump_p (x) && ! condjump_p (x)
2224
	  && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2225
	    fatal_insn ("return not followed by barrier", x);
2226
      if (curr_bb && x == BB_END (curr_bb))
2227
	curr_bb = NULL;
2228 2229
    }

2230
  if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2231
    internal_error
2232 2233
      ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
       num_bb_notes, n_basic_blocks);
2234

2235
   return err;
2236 2237
}

2238
/* Assume that the preceding pass has possibly eliminated jump instructions
2239 2240 2241 2242
   or converted the unconditional jumps.  Eliminate the edges from CFG.
   Return true if any edges are eliminated.  */

bool
2243
purge_dead_edges (basic_block bb)
2244
{
2245
  edge e;
2246
  rtx insn = BB_END (bb), note;
2247
  bool purged = false;
2248 2249
  bool found;
  edge_iterator ei;
2250

2251 2252 2253 2254 2255
  if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
    do
      insn = PREV_INSN (insn);
    while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));

2256
  /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
2257
  if (NONJUMP_INSN_P (insn)
2258 2259 2260 2261 2262 2263 2264 2265 2266 2267
      && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
    {
      rtx eqnote;

      if (! may_trap_p (PATTERN (insn))
	  || ((eqnote = find_reg_equal_equiv_note (insn))
	      && ! may_trap_p (XEXP (eqnote, 0))))
	remove_note (insn, note);
    }

2268
  /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
2269
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2270
    {
2271 2272
      bool remove = false;

2273 2274 2275
      /* There are three types of edges we need to handle correctly here: EH
	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
	 latter can appear when nonlocal gotos are used.  */
2276
      if (e->flags & EDGE_ABNORMAL_CALL)
2277
	{
2278 2279 2280 2281 2282 2283 2284 2285
	  if (!CALL_P (insn))
	    remove = true;
	  else if (can_nonlocal_goto (insn))
	    ;
	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
	    ;
	  else
	    remove = true;
2286
	}
2287 2288 2289 2290
      else if (e->flags & EDGE_EH)
	remove = !can_throw_internal (insn);

      if (remove)
2291
	{
2292 2293 2294
	  remove_edge (e);
	  df_set_bb_dirty (bb);
	  purged = true;
2295 2296
	}
      else
2297
	ei_next (&ei);
2298
    }
2299

2300
  if (JUMP_P (insn))
2301 2302 2303
    {
      rtx note;
      edge b,f;
2304
      edge_iterator ei;
2305

2306 2307 2308 2309
      /* We do care only about conditional jumps and simplejumps.  */
      if (!any_condjump_p (insn)
	  && !returnjump_p (insn)
	  && !simplejump_p (insn))
2310
	return purged;
2311

2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322
      /* Branch probability/prediction notes are defined only for
	 condjumps.  We've possibly turned condjump into simplejump.  */
      if (simplejump_p (insn))
	{
	  note = find_reg_note (insn, REG_BR_PROB, NULL);
	  if (note)
	    remove_note (insn, note);
	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
	    remove_note (insn, note);
	}

2323
      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2324
	{
2325 2326
	  /* Avoid abnormal flags to leak from computed jumps turned
	     into simplejumps.  */
2327

2328
	  e->flags &= ~EDGE_ABNORMAL;
2329

2330 2331 2332 2333
	  /* See if this edge is one we should keep.  */
	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
	    /* A conditional jump can fall through into the next
	       block, so we should keep the edge.  */
2334 2335 2336 2337
	    {
	      ei_next (&ei);
	      continue;
	    }
2338
	  else if (e->dest != EXIT_BLOCK_PTR
2339
		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2340 2341
	    /* If the destination block is the target of the jump,
	       keep the edge.  */
2342 2343 2344 2345
	    {
	      ei_next (&ei);
	      continue;
	    }
2346 2347 2348
	  else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
	    /* If the destination block is the exit block, and this
	       instruction is a return, then keep the edge.  */
2349 2350 2351 2352
	    {
	      ei_next (&ei);
	      continue;
	    }
2353 2354
	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
	    /* Keep the edges that correspond to exceptions thrown by
2355 2356 2357 2358
	       this instruction and rematerialize the EDGE_ABNORMAL
	       flag we just cleared above.  */
	    {
	      e->flags |= EDGE_ABNORMAL;
2359
	      ei_next (&ei);
2360 2361
	      continue;
	    }
2362

2363
	  /* We do not need this edge.  */
2364
	  df_set_bb_dirty (bb);
2365 2366 2367
	  purged = true;
	  remove_edge (e);
	}
2368

2369
      if (EDGE_COUNT (bb->succs) == 0 || !purged)
2370
	return purged;
2371

2372 2373
      if (dump_file)
	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2374

2375 2376 2377 2378
      if (!optimize)
	return purged;

      /* Redistribute probabilities.  */
2379
      if (single_succ_p (bb))
2380
	{
2381 2382
	  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
	  single_succ_edge (bb)->count = bb->count;
2383
	}
2384 2385 2386 2387 2388
      else
	{
	  note = find_reg_note (insn, REG_BR_PROB, NULL);
	  if (!note)
	    return purged;
2389

2390 2391 2392 2393 2394 2395 2396
	  b = BRANCH_EDGE (bb);
	  f = FALLTHRU_EDGE (bb);
	  b->probability = INTVAL (XEXP (note, 0));
	  f->probability = REG_BR_PROB_BASE - b->probability;
	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
	}
2397

2398 2399
      return purged;
    }
2400
  else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2401 2402 2403 2404 2405
    {
      /* First, there should not be any EH or ABCALL edges resulting
	 from non-local gotos and the like.  If there were, we shouldn't
	 have created the sibcall in the first place.  Second, there
	 should of course never have been a fallthru edge.  */
2406 2407 2408
      gcc_assert (single_succ_p (bb));
      gcc_assert (single_succ_edge (bb)->flags
		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
2409 2410 2411

      return 0;
    }
2412 2413 2414 2415 2416 2417

  /* If we don't see a jump insn, we don't know exactly why the block would
     have been broken at this point.  Look for a simple, non-fallthru edge,
     as these are only created by conditional branches.  If we find such an
     edge we know that there used to be a jump here and can then safely
     remove all non-fallthru edges.  */
2418 2419 2420 2421 2422 2423 2424
  found = false;
  FOR_EACH_EDGE (e, ei, bb->succs)
    if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
      {
	found = true;
	break;
      }
2425

2426
  if (!found)
2427
    return purged;
2428

2429 2430 2431
  /* Remove all but the fake and fallthru edges.  The fake edge may be
     the only successor for this block in the case of noreturn
     calls.  */
2432
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2433
    {
2434
      if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2435
	{
2436
	  df_set_bb_dirty (bb);
2437 2438 2439
	  remove_edge (e);
	  purged = true;
	}
2440 2441
      else
	ei_next (&ei);
2442
    }
2443

2444
  gcc_assert (single_succ_p (bb));
2445

2446 2447
  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
  single_succ_edge (bb)->count = bb->count;
2448

2449 2450
  if (dump_file)
    fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2451
	     bb->index);
2452 2453 2454
  return purged;
}

2455 2456
/* Search all basic blocks for potentially dead edges and purge them.  Return
   true if some edge has been eliminated.  */
2457 2458

bool
2459
purge_all_dead_edges (void)
2460
{
2461 2462
  int purged = false;
  basic_block bb;
2463

2464
  FOR_EACH_BB (bb)
2465
    {
2466
      bool purged_here = purge_dead_edges (bb);
2467

2468 2469
      purged |= purged_here;
    }
2470

2471 2472
  return purged;
}
2473 2474

/* Same as split_block but update cfg_layout structures.  */
2475 2476

static basic_block
2477
cfg_layout_split_block (basic_block bb, void *insnp)
2478
{
2479
  rtx insn = (rtx) insnp;
2480
  basic_block new_bb = rtl_split_block (bb, insn);
2481

2482 2483
  new_bb->il.rtl->footer = bb->il.rtl->footer;
  bb->il.rtl->footer = NULL;
2484

2485
  return new_bb;
2486 2487 2488
}

/* Redirect Edge to DEST.  */
2489
static edge
2490
cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2491 2492
{
  basic_block src = e->src;
2493
  edge ret;
2494

2495
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2496
    return NULL;
2497

2498
  if (e->dest == dest)
2499
    return e;
2500

2501
  if (e->src != ENTRY_BLOCK_PTR
2502
      && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2503
    {
2504
      df_set_bb_dirty (src);
2505
      return ret;
2506
    }
2507 2508 2509 2510

  if (e->src == ENTRY_BLOCK_PTR
      && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
    {
2511 2512
      if (dump_file)
	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2513 2514
		 e->src->index, dest->index);

2515
      df_set_bb_dirty (e->src);
2516
      redirect_edge_succ (e, dest);
2517
      return e;
2518 2519
    }

2520 2521 2522 2523 2524 2525 2526
  /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
     in the case the basic block appears to be in sequence.  Avoid this
     transformation.  */

  if (e->flags & EDGE_FALLTHRU)
    {
      /* Redirect any branch edges unified with the fallthru one.  */
2527
      if (JUMP_P (BB_END (src))
2528 2529
	  && label_is_jump_target_p (BB_HEAD (e->dest),
				     BB_END (src)))
2530
	{
2531
	  edge redirected;
Mike Stump committed
2532

2533 2534
	  if (dump_file)
	    fprintf (dump_file, "Fallthru edge unified with branch "
2535 2536 2537
		     "%i->%i redirected to %i\n",
		     e->src->index, e->dest->index, dest->index);
	  e->flags &= ~EDGE_FALLTHRU;
2538 2539
	  redirected = redirect_branch_edge (e, dest);
	  gcc_assert (redirected);
2540
	  e->flags |= EDGE_FALLTHRU;
2541
	  df_set_bb_dirty (e->src);
2542
	  return e;
2543 2544
	}
      /* In case we are redirecting fallthru edge to the branch edge
Mike Stump committed
2545
	 of conditional jump, remove it.  */
2546
      if (EDGE_COUNT (src->succs) == 2)
2547
	{
2548 2549
	  /* Find the edge that is different from E.  */
	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2550

2551
	  if (s->dest == dest
2552 2553 2554
	      && any_condjump_p (BB_END (src))
	      && onlyjump_p (BB_END (src)))
	    delete_insn (BB_END (src));
2555
	}
2556
      ret = redirect_edge_succ_nodup (e, dest);
2557 2558
      if (dump_file)
	fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2559
		 e->src->index, e->dest->index, dest->index);
2560 2561
    }
  else
2562
    ret = redirect_branch_edge (e, dest);
2563 2564

  /* We don't want simplejumps in the insn stream during cfglayout.  */
2565
  gcc_assert (!simplejump_p (BB_END (src)));
2566

2567
  df_set_bb_dirty (src);
2568 2569 2570 2571 2572
  return ret;
}

/* Simple wrapper as we always can redirect fallthru edges.  */
static basic_block
2573
cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2574
{
2575 2576 2577
  edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);

  gcc_assert (redirected);
2578 2579 2580
  return NULL;
}

2581 2582
/* Same as delete_basic_block but update cfg_layout structures.  */

2583
static void
2584
cfg_layout_delete_block (basic_block bb)
2585
{
2586
  rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2587

2588
  if (bb->il.rtl->header)
2589
    {
2590
      next = BB_HEAD (bb);
2591
      if (prev)
2592
	NEXT_INSN (prev) = bb->il.rtl->header;
2593
      else
2594 2595 2596
	set_first_insn (bb->il.rtl->header);
      PREV_INSN (bb->il.rtl->header) = prev;
      insn = bb->il.rtl->header;
2597 2598 2599 2600 2601
      while (NEXT_INSN (insn))
	insn = NEXT_INSN (insn);
      NEXT_INSN (insn) = next;
      PREV_INSN (next) = insn;
    }
2602
  next = NEXT_INSN (BB_END (bb));
2603
  if (bb->il.rtl->footer)
2604
    {
2605
      insn = bb->il.rtl->footer;
2606 2607
      while (insn)
	{
2608
	  if (BARRIER_P (insn))
2609 2610 2611 2612
	    {
	      if (PREV_INSN (insn))
		NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
	      else
2613
		bb->il.rtl->footer = NEXT_INSN (insn);
2614 2615 2616
	      if (NEXT_INSN (insn))
		PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
	    }
2617
	  if (LABEL_P (insn))
2618 2619 2620
	    break;
	  insn = NEXT_INSN (insn);
	}
2621
      if (bb->il.rtl->footer)
2622
	{
2623
	  insn = BB_END (bb);
2624 2625
	  NEXT_INSN (insn) = bb->il.rtl->footer;
	  PREV_INSN (bb->il.rtl->footer) = insn;
2626 2627 2628 2629 2630 2631 2632 2633
	  while (NEXT_INSN (insn))
	    insn = NEXT_INSN (insn);
	  NEXT_INSN (insn) = next;
	  if (next)
	    PREV_INSN (next) = insn;
	  else
	    set_last_insn (insn);
	}
2634 2635
    }
  if (bb->next_bb != EXIT_BLOCK_PTR)
2636
    to = &bb->next_bb->il.rtl->header;
2637 2638
  else
    to = &cfg_layout_function_footer;
2639

2640 2641 2642 2643
  rtl_delete_block (bb);

  if (prev)
    prev = NEXT_INSN (prev);
2644
  else
2645 2646 2647
    prev = get_insns ();
  if (next)
    next = PREV_INSN (next);
2648
  else
2649 2650 2651 2652 2653 2654 2655 2656 2657 2658 2659 2660 2661 2662 2663
    next = get_last_insn ();

  if (next && NEXT_INSN (next) != prev)
    {
      remaints = unlink_insn_chain (prev, next);
      insn = remaints;
      while (NEXT_INSN (insn))
	insn = NEXT_INSN (insn);
      NEXT_INSN (insn) = *to;
      if (*to)
	PREV_INSN (*to) = insn;
      *to = remaints;
    }
}

2664
/* Return true when blocks A and B can be safely merged.  */
2665

2666
static bool
2667
cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2668
{
2669 2670
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
2671 2672
     and cold sections.

2673
     Basic block partitioning may result in some jumps that appear to
Mike Stump committed
2674 2675 2676
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
2677 2678
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */

2679
  if (BB_PARTITION (a) != BB_PARTITION (b))
2680
    return false;
2681

2682
  /* There must be exactly one edge in between the blocks.  */
2683 2684 2685
  return (single_succ_p (a)
	  && single_succ (a) == b
	  && single_pred_p (b) == 1
2686
	  && a != b
2687
	  /* Must be simple edge.  */
2688
	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2689
	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2690 2691 2692
	  /* If the jump insn has side effects, we can't kill the edge.
	     When not optimizing, try_redirect_by_replacing_jump will
	     not allow us to redirect an edge by replacing a table jump.  */
2693
	  && (!JUMP_P (BB_END (a))
2694
	      || ((!optimize || reload_completed)
2695
		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2696 2697
}

2698 2699
/* Merge block A and B.  The blocks must be mergeable.  */

2700 2701 2702 2703
static void
cfg_layout_merge_blocks (basic_block a, basic_block b)
{
#ifdef ENABLE_CHECKING
2704
  gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2705 2706
#endif

2707 2708 2709
  if (dump_file)
    fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);

2710
  /* If there was a CODE_LABEL beginning B, delete it.  */
2711
  if (LABEL_P (BB_HEAD (b)))
2712 2713 2714
    {
      delete_insn (BB_HEAD (b));
    }
2715 2716 2717

  /* We should have fallthru edge in a, or we can do dummy redirection to get
     it cleaned up.  */
2718
  if (JUMP_P (BB_END (a)))
2719
    try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2720
  gcc_assert (!JUMP_P (BB_END (a)));
2721

2722 2723 2724 2725
  /* When not optimizing and the edge is the only place in RTL which holds
     some unique locus, emit a nop with that locus in between.  */
  if (!optimize && EDGE_SUCC (a, 0)->goto_locus)
    {
2726
      rtx insn = BB_END (a), end = PREV_INSN (BB_HEAD (a));
2727 2728
      int goto_locus = EDGE_SUCC (a, 0)->goto_locus;

2729 2730 2731
      while (insn != end && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
	insn = PREV_INSN (insn);
      if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
2732 2733 2734 2735
	goto_locus = 0;
      else
	{
	  insn = BB_HEAD (b);
2736 2737 2738 2739 2740
	  end = NEXT_INSN (BB_END (b));
	  while (insn != end && !INSN_P (insn))
	    insn = NEXT_INSN (insn);
	  if (insn != end && INSN_LOCATOR (insn) != 0
	      && locator_eq (INSN_LOCATOR (insn), goto_locus))
2741 2742 2743 2744 2745 2746 2747 2748 2749
	    goto_locus = 0;
	}
      if (goto_locus)
	{
	  BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
	  INSN_LOCATOR (BB_END (a)) = goto_locus;
	}
    }

2750
  /* Possible line number notes should appear in between.  */
2751
  if (b->il.rtl->header)
2752
    {
2753
      rtx first = BB_END (a), last;
2754

2755
      last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2756
      delete_insn_chain (NEXT_INSN (first), last, false);
2757
      b->il.rtl->header = NULL;
2758 2759 2760
    }

  /* In the case basic blocks are not adjacent, move them around.  */
2761
  if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2762
    {
2763
      rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2764

2765
      emit_insn_after_noloc (first, BB_END (a), a);
2766 2767 2768
      /* Skip possible DELETED_LABEL insn.  */
      if (!NOTE_INSN_BASIC_BLOCK_P (first))
	first = NEXT_INSN (first);
2769
      gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2770
      BB_HEAD (b) = NULL;
2771 2772 2773 2774 2775 2776 2777

      /* emit_insn_after_noloc doesn't call df_insn_change_bb.
         We need to explicitly call. */
      update_bb_for_insn_chain (NEXT_INSN (first),
				BB_END (b),
				a);

2778 2779 2780 2781 2782 2783 2784
      delete_insn (first);
    }
  /* Otherwise just re-associate the instructions.  */
  else
    {
      rtx insn;

2785
      update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2786

2787
      insn = BB_HEAD (b);
2788 2789 2790
      /* Skip possible DELETED_LABEL insn.  */
      if (!NOTE_INSN_BASIC_BLOCK_P (insn))
	insn = NEXT_INSN (insn);
2791
      gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2792 2793
      BB_HEAD (b) = NULL;
      BB_END (a) = BB_END (b);
2794 2795 2796
      delete_insn (insn);
    }

2797 2798
  df_bb_delete (b->index);

2799
  /* Possible tablejumps and barriers should appear after the block.  */
2800
  if (b->il.rtl->footer)
2801
    {
2802 2803
      if (!a->il.rtl->footer)
	a->il.rtl->footer = b->il.rtl->footer;
2804 2805
      else
	{
2806
	  rtx last = a->il.rtl->footer;
2807 2808 2809

	  while (NEXT_INSN (last))
	    last = NEXT_INSN (last);
2810 2811
	  NEXT_INSN (last) = b->il.rtl->footer;
	  PREV_INSN (b->il.rtl->footer) = last;
2812
	}
2813
      b->il.rtl->footer = NULL;
2814 2815
    }

2816 2817
  if (dump_file)
    fprintf (dump_file, "Merged blocks %d and %d.\n",
2818 2819 2820 2821
	     a->index, b->index);
}

/* Split edge E.  */
2822

2823 2824 2825 2826 2827
static basic_block
cfg_layout_split_edge (edge e)
{
  basic_block new_bb =
    create_basic_block (e->src != ENTRY_BLOCK_PTR
2828
			? NEXT_INSN (BB_END (e->src)) : get_insns (),
2829 2830
			NULL_RTX, e->src);

2831 2832 2833 2834
  if (e->dest == EXIT_BLOCK_PTR)
    BB_COPY_PARTITION (new_bb, e->src);
  else
    BB_COPY_PARTITION (new_bb, e->dest);
2835
  make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2836 2837 2838 2839 2840
  redirect_edge_and_branch_force (e, new_bb);

  return new_bb;
}

2841 2842 2843 2844 2845 2846 2847
/* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */

static void
rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
{
}

2848 2849 2850 2851
/* Return 1 if BB ends with a call, possibly followed by some
   instructions that must stay with the call, 0 otherwise.  */

static bool
2852
rtl_block_ends_with_call_p (basic_block bb)
2853 2854 2855
{
  rtx insn = BB_END (bb);

2856
  while (!CALL_P (insn)
2857
	 && insn != BB_HEAD (bb)
2858
	 && (keep_with_call_p (insn)
2859 2860
	     || NOTE_P (insn)
	     || DEBUG_INSN_P (insn)))
2861
    insn = PREV_INSN (insn);
2862
  return (CALL_P (insn));
2863 2864 2865 2866 2867
}

/* Return 1 if BB ends with a conditional branch, 0 otherwise.  */

static bool
2868
rtl_block_ends_with_condjump_p (const_basic_block bb)
2869 2870 2871 2872 2873 2874 2875 2876
{
  return any_condjump_p (BB_END (bb));
}

/* Return true if we need to add fake edge to exit.
   Helper function for rtl_flow_call_edges_add.  */

static bool
2877
need_fake_edge_p (const_rtx insn)
2878 2879 2880 2881
{
  if (!INSN_P (insn))
    return false;

2882
  if ((CALL_P (insn)
2883 2884
       && !SIBLING_CALL_P (insn)
       && !find_reg_note (insn, REG_NORETURN, NULL)
Kenneth Zadeck committed
2885
       && !(RTL_CONST_OR_PURE_CALL_P (insn))))
2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911
    return true;

  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
	   && MEM_VOLATILE_P (PATTERN (insn)))
	  || (GET_CODE (PATTERN (insn)) == PARALLEL
	      && asm_noperands (insn) != -1
	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
}

/* Add fake edges to the function exit for any non constant and non noreturn
   calls, volatile inline assembly in the bitmap of blocks specified by
   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
   that were split.

   The goal is to expose cases in which entering a basic block does not imply
   that all subsequent instructions must be executed.  */

static int
rtl_flow_call_edges_add (sbitmap blocks)
{
  int i;
  int blocks_split = 0;
  int last_bb = last_basic_block;
  bool check_last_block = false;

2912
  if (n_basic_blocks == NUM_FIXED_BLOCKS)
2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945
    return 0;

  if (! blocks)
    check_last_block = true;
  else
    check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);

  /* In the last basic block, before epilogue generation, there will be
     a fallthru edge to EXIT.  Special care is required if the last insn
     of the last basic block is a call because make_edge folds duplicate
     edges, which would result in the fallthru edge also being marked
     fake, which would result in the fallthru edge being removed by
     remove_fake_edges, which would result in an invalid CFG.

     Moreover, we can't elide the outgoing fake edge, since the block
     profiler needs to take this into account in order to solve the minimal
     spanning tree in the case that the call doesn't return.

     Handle this by adding a dummy instruction in a new last basic block.  */
  if (check_last_block)
    {
      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
      rtx insn = BB_END (bb);

      /* Back up past insns that must be kept in the same block as a call.  */
      while (insn != BB_HEAD (bb)
	     && keep_with_call_p (insn))
	insn = PREV_INSN (insn);

      if (need_fake_edge_p (insn))
	{
	  edge e;

2946 2947 2948
	  e = find_edge (bb, EXIT_BLOCK_PTR);
	  if (e)
	    {
2949
	      insert_insn_on_edge (gen_use (const0_rtx), e);
2950 2951
	      commit_edge_insertions ();
	    }
2952 2953 2954 2955 2956 2957 2958
	}
    }

  /* Now add fake edges to the function exit for any non constant
     calls since there is no way that we can determine if they will
     return or not...  */

2959
  for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979
    {
      basic_block bb = BASIC_BLOCK (i);
      rtx insn;
      rtx prev_insn;

      if (!bb)
	continue;

      if (blocks && !TEST_BIT (blocks, i))
	continue;

      for (insn = BB_END (bb); ; insn = prev_insn)
	{
	  prev_insn = PREV_INSN (insn);
	  if (need_fake_edge_p (insn))
	    {
	      edge e;
	      rtx split_at_insn = insn;

	      /* Don't split the block between a call and an insn that should
Mike Stump committed
2980
		 remain in the same block as the call.  */
2981
	      if (CALL_P (insn))
2982 2983 2984 2985 2986
		while (split_at_insn != BB_END (bb)
		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
		  split_at_insn = NEXT_INSN (split_at_insn);

	      /* The handling above of the final block before the epilogue
Mike Stump committed
2987
		 should be enough to verify that there is no edge to the exit
2988 2989 2990 2991 2992
		 block in CFG already.  Calling make_edge in such case would
		 cause us to mark that edge as fake and remove it later.  */

#ifdef ENABLE_CHECKING
	      if (split_at_insn == BB_END (bb))
2993
		{
2994 2995
		  e = find_edge (bb, EXIT_BLOCK_PTR);
		  gcc_assert (e == NULL);
2996
		}
2997 2998 2999 3000 3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021
#endif

	      /* Note that the following may create a new basic block
		 and renumber the existing basic blocks.  */
	      if (split_at_insn != BB_END (bb))
		{
		  e = split_block (bb, split_at_insn);
		  if (e)
		    blocks_split++;
		}

	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
	    }

	  if (insn == BB_HEAD (bb))
	    break;
	}
    }

  if (blocks_split)
    verify_flow_info ();

  return blocks_split;
}

3022
/* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
3023
   the conditional branch target, SECOND_HEAD should be the fall-thru
3024 3025 3026 3027 3028
   there is no need to handle this here the loop versioning code handles
   this.  the reason for SECON_HEAD is that it is needed for condition
   in trees, and this should be of the same type since it is a hook.  */
static void
rtl_lv_add_condition_to_bb (basic_block first_head ,
Mike Stump committed
3029 3030
			    basic_block second_head ATTRIBUTE_UNUSED,
			    basic_block cond_bb, void *comp_rtx)
3031 3032 3033 3034 3035 3036 3037 3038 3039 3040 3041 3042 3043 3044 3045 3046 3047
{
  rtx label, seq, jump;
  rtx op0 = XEXP ((rtx)comp_rtx, 0);
  rtx op1 = XEXP ((rtx)comp_rtx, 1);
  enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
  enum machine_mode mode;


  label = block_label (first_head);
  mode = GET_MODE (op0);
  if (mode == VOIDmode)
    mode = GET_MODE (op1);

  start_sequence ();
  op0 = force_operand (op0, NULL_RTX);
  op1 = force_operand (op1, NULL_RTX);
  do_compare_rtx_and_jump (op0, op1, comp, 0,
3048
			   mode, NULL_RTX, NULL_RTX, label, -1);
3049 3050 3051 3052 3053 3054 3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067 3068 3069 3070 3071 3072 3073 3074 3075 3076 3077 3078 3079 3080
  jump = get_last_insn ();
  JUMP_LABEL (jump) = label;
  LABEL_NUSES (label)++;
  seq = get_insns ();
  end_sequence ();

  /* Add the new cond , in the new head.  */
  emit_insn_after(seq, BB_END(cond_bb));
}


/* Given a block B with unconditional branch at its end, get the
   store the return the branch edge and the fall-thru edge in
   BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
static void
rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
			   edge *fallthru_edge)
{
  edge e = EDGE_SUCC (b, 0);

  if (e->flags & EDGE_FALLTHRU)
    {
      *fallthru_edge = e;
      *branch_edge = EDGE_SUCC (b, 1);
    }
  else
    {
      *branch_edge = e;
      *fallthru_edge = EDGE_SUCC (b, 1);
    }
}

3081 3082 3083 3084
void
init_rtl_bb_info (basic_block bb)
{
  gcc_assert (!bb->il.rtl);
3085
  bb->il.rtl = ggc_alloc_cleared_rtl_bb_info ();
3086 3087
}

3088

Razya Ladelsky committed
3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136
/* Add EXPR to the end of basic block BB.  */

rtx
insert_insn_end_bb_new (rtx pat, basic_block bb)
{
  rtx insn = BB_END (bb);
  rtx new_insn;
  rtx pat_end = pat;

  while (NEXT_INSN (pat_end) != NULL_RTX)
    pat_end = NEXT_INSN (pat_end);

  /* If the last insn is a jump, insert EXPR in front [taking care to
     handle cc0, etc. properly].  Similarly we need to care trapping
     instructions in presence of non-call exceptions.  */

  if (JUMP_P (insn)
      || (NONJUMP_INSN_P (insn)
          && (!single_succ_p (bb)
              || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
    {
#ifdef HAVE_cc0
      rtx note;
#endif
      /* If this is a jump table, then we can't insert stuff here.  Since
         we know the previous real insn must be the tablejump, we insert
         the new instruction just before the tablejump.  */
      if (GET_CODE (PATTERN (insn)) == ADDR_VEC
          || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
        insn = prev_real_insn (insn);

#ifdef HAVE_cc0
      /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
         if cc0 isn't set.  */
      note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
      if (note)
        insn = XEXP (note, 0);
      else
        {
          rtx maybe_cc0_setter = prev_nonnote_insn (insn);
          if (maybe_cc0_setter
              && INSN_P (maybe_cc0_setter)
              && sets_cc0_p (PATTERN (maybe_cc0_setter)))
            insn = maybe_cc0_setter;
        }
#endif
      /* FIXME: What if something in cc0/jump uses value set in new
         insn?  */
3137
      new_insn = emit_insn_before_noloc (pat, insn, bb);
Razya Ladelsky committed
3138 3139 3140 3141 3142 3143 3144 3145
    }

  /* Likewise if the last insn is a call, as will happen in the presence
     of exception handling.  */
  else if (CALL_P (insn)
           && (!single_succ_p (bb)
               || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
    {
3146 3147 3148 3149
      /* Keeping in mind targets with small register classes and parameters
         in registers, we search backward and place the instructions before
	 the first parameter is loaded.  Do this for everyone for consistency
	 and a presumption that we'll get better code elsewhere as well.  */
Razya Ladelsky committed
3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160 3161 3162 3163 3164 3165 3166 3167

      /* Since different machines initialize their parameter registers
         in different orders, assume nothing.  Collect the set of all
         parameter registers.  */
      insn = find_first_parameter_load (insn, BB_HEAD (bb));

      /* If we found all the parameter loads, then we want to insert
         before the first parameter load.

         If we did not find all the parameter loads, then we might have
         stopped on the head of the block, which could be a CODE_LABEL.
         If we inserted before the CODE_LABEL, then we would be putting
         the insn in the wrong basic block.  In that case, put the insn
         after the CODE_LABEL.  Also, respect NOTE_INSN_BASIC_BLOCK.  */
      while (LABEL_P (insn)
             || NOTE_INSN_BASIC_BLOCK_P (insn))
        insn = NEXT_INSN (insn);

3168
      new_insn = emit_insn_before_noloc (pat, insn, bb);
Razya Ladelsky committed
3169 3170
    }
  else
3171
    new_insn = emit_insn_after_noloc (pat, insn, bb);
Razya Ladelsky committed
3172 3173 3174 3175

  return new_insn;
}

3176 3177 3178 3179
/* Returns true if it is possible to remove edge E by redirecting
   it to the destination of the other edge from E->src.  */

static bool
3180
rtl_can_remove_branch_p (const_edge e)
3181
{
3182 3183 3184
  const_basic_block src = e->src;
  const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
  const_rtx insn = BB_END (src), set;
3185 3186 3187 3188 3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199 3200 3201 3202 3203 3204 3205 3206 3207

  /* The conditions are taken from try_redirect_by_replacing_jump.  */
  if (target == EXIT_BLOCK_PTR)
    return false;

  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
    return false;

  if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
      || BB_PARTITION (src) != BB_PARTITION (target))
    return false;

  if (!onlyjump_p (insn)
      || tablejump_p (insn, NULL, NULL))
    return false;

  set = single_set (insn);
  if (!set || side_effects_p (set))
    return false;

  return true;
}

3208 3209
/* Implementation of CFG manipulation for linearized RTL.  */
struct cfg_hooks rtl_cfg_hooks = {
3210
  "rtl",
3211
  rtl_verify_flow_info,
3212
  rtl_dump_bb,
3213
  rtl_create_basic_block,
3214 3215
  rtl_redirect_edge_and_branch,
  rtl_redirect_edge_and_branch_force,
3216
  rtl_can_remove_branch_p,
3217 3218
  rtl_delete_block,
  rtl_split_block,
3219
  rtl_move_block_after,
3220 3221
  rtl_can_merge_blocks,  /* can_merge_blocks_p */
  rtl_merge_blocks,
3222 3223 3224 3225
  rtl_predict_edge,
  rtl_predicted_by_p,
  NULL, /* can_duplicate_block_p */
  NULL, /* duplicate_block */
3226 3227
  rtl_split_edge,
  rtl_make_forwarder_block,
3228 3229 3230
  rtl_tidy_fallthru_edge,
  rtl_block_ends_with_call_p,
  rtl_block_ends_with_condjump_p,
3231 3232
  rtl_flow_call_edges_add,
  NULL, /* execute_on_growing_pred */
3233 3234 3235 3236 3237
  NULL, /* execute_on_shrinking_pred */
  NULL, /* duplicate loop for trees */
  NULL, /* lv_add_condition_to_bb */
  NULL, /* lv_adjust_loop_header_phi*/
  NULL, /* extract_cond_bb_edges */
Mike Stump committed
3238
  NULL		/* flush_pending_stmts */
3239 3240 3241 3242 3243 3244
};

/* Implementation of CFG manipulation for cfg layout RTL, where
   basic block connected via fallthru edges does not have to be adjacent.
   This representation will hopefully become the default one in future
   version of the compiler.  */
3245 3246 3247 3248

/* We do not want to declare these functions in a header file, since they
   should only be used through the cfghooks interface, and we do not want to
   move them here since it would require also moving quite a lot of related
3249
   code.  They are in cfglayout.c.  */
3250
extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3251 3252
extern basic_block cfg_layout_duplicate_bb (basic_block);

3253
struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3254
  "cfglayout mode",
3255
  rtl_verify_flow_info_1,
3256
  rtl_dump_bb,
3257
  cfg_layout_create_basic_block,
3258 3259
  cfg_layout_redirect_edge_and_branch,
  cfg_layout_redirect_edge_and_branch_force,
3260
  rtl_can_remove_branch_p,
3261 3262
  cfg_layout_delete_block,
  cfg_layout_split_block,
3263
  rtl_move_block_after,
3264 3265
  cfg_layout_can_merge_blocks_p,
  cfg_layout_merge_blocks,
3266 3267 3268 3269
  rtl_predict_edge,
  rtl_predicted_by_p,
  cfg_layout_can_duplicate_bb_p,
  cfg_layout_duplicate_bb,
3270 3271
  cfg_layout_split_edge,
  rtl_make_forwarder_block,
3272 3273 3274
  NULL,
  rtl_block_ends_with_call_p,
  rtl_block_ends_with_condjump_p,
3275 3276
  rtl_flow_call_edges_add,
  NULL, /* execute_on_growing_pred */
3277 3278 3279 3280 3281
  NULL, /* execute_on_shrinking_pred */
  duplicate_loop_to_header_edge, /* duplicate loop for trees */
  rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
  NULL, /* lv_adjust_loop_header_phi*/
  rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
Mike Stump committed
3282
  NULL		/* flush_pending_stmts */
3283
};