cfgrtl.c 62 KB
Newer Older
1 2
/* Control flow graph manipulation code for GNU compiler.
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING.  If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA.  */

22 23
/* This file contains low level functions to manipulate the CFG and analyze it
   that are aware of the RTL intermediate language.
24 25

   Available functionality:
26
     - CFG-aware instruction chain manipulation
27 28
	 delete_insn, delete_insn_chain
     - Basic block manipulation
29 30 31
	 create_basic_block, flow_delete_block, split_block,
	 merge_blocks_nomove
     - Infrastructure to determine quickly basic block for insn
32
	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 34 35
     - Edge redirection with updating and optimizing of insn chain
	 block_label, redirect_edge_and_branch,
	 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36
     - Edge splitting and committing to edges
37
	 split_edge, insert_insn_on_edge, commit_edge_insertions
38
     - Dumping and debugging
39
	 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40
     - Consistency checking
41
	 verify_flow_info
42
     - CFG updating after constant propagation
43
	 purge_dead_edges, purge_all_dead_edges   */
44 45 46

#include "config.h"
#include "system.h"
47 48
#include "coretypes.h"
#include "tm.h"
49 50 51 52 53 54 55 56 57 58 59 60
#include "tree.h"
#include "rtl.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "regs.h"
#include "flags.h"
#include "output.h"
#include "function.h"
#include "except.h"
#include "toplev.h"
#include "tm_p.h"
#include "obstack.h"
61
#include "insn-config.h"
62

63
/* Stubs in case we don't have a return insn.  */
64 65 66 67 68 69 70 71 72 73 74 75 76
#ifndef HAVE_return
#define HAVE_return 0
#define gen_return() NULL_RTX
#endif

/* The labels mentioned in non-jump rtl.  Valid during find_basic_blocks.  */
/* ??? Should probably be using LABEL_NUSES instead.  It would take a
   bit of surgery to be able to use or co-opt the routines in jump.  */
rtx label_value_list;
rtx tail_recursion_label_list;

static int can_delete_note_p		PARAMS ((rtx));
static int can_delete_label_p		PARAMS ((rtx));
77
static void commit_one_edge_insertion	PARAMS ((edge, int));
78 79 80 81 82 83
static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
static rtx last_loop_beg_note		PARAMS ((rtx));
static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));

/* Return true if NOTE is not one of the ones that must be kept paired,
84
   so that we may simply delete it.  */
85 86 87 88 89 90

static int
can_delete_note_p (note)
     rtx note;
{
  return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
91
	  || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
92
	  || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
93 94 95 96 97 98 99 100
}

/* True if a given label can be deleted.  */

static int
can_delete_label_p (label)
     rtx label;
{
101 102 103 104
  return (!LABEL_PRESERVE_P (label)
	  /* User declared labels must be preserved.  */
	  && LABEL_NAME (label) == 0
	  && !in_expr_list_p (forced_labels, label)
105
	  && !in_expr_list_p (label_value_list, label));
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
}

/* Delete INSN by patching it out.  Return the next insn.  */

rtx
delete_insn (insn)
     rtx insn;
{
  rtx next = NEXT_INSN (insn);
  rtx note;
  bool really_delete = true;

  if (GET_CODE (insn) == CODE_LABEL)
    {
      /* Some labels can't be directly removed from the INSN chain, as they
121
         might be references via variables, constant pool etc.
122 123 124 125 126 127 128 129 130 131
         Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
      if (! can_delete_label_p (insn))
	{
	  const char *name = LABEL_NAME (insn);

	  really_delete = false;
	  PUT_CODE (insn, NOTE);
	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
	  NOTE_SOURCE_FILE (insn) = name;
	}
132

133 134 135 136 137
      remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
    }

  if (really_delete)
    {
138 139 140
      /* If this insn has already been deleted, something is very wrong.  */
      if (INSN_DELETED_P (insn))
	abort ();
141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166
      remove_insn (insn);
      INSN_DELETED_P (insn) = 1;
    }

  /* If deleting a jump, decrement the use count of the label.  Deleting
     the label itself should happen in the normal course of block merging.  */
  if (GET_CODE (insn) == JUMP_INSN
      && JUMP_LABEL (insn)
      && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
    LABEL_NUSES (JUMP_LABEL (insn))--;

  /* Also if deleting an insn that references a label.  */
  else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
	   && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
    LABEL_NUSES (XEXP (note, 0))--;

  if (GET_CODE (insn) == JUMP_INSN
      && (GET_CODE (PATTERN (insn)) == ADDR_VEC
	  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
    {
      rtx pat = PATTERN (insn);
      int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
      int len = XVECLEN (pat, diff_vec_p);
      int i;

      for (i = 0; i < len; i++)
167 168 169 170 171 172 173 174 175
	{
	  rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);

	  /* When deleting code in bulk (e.g. removing many unreachable
	     blocks) we can delete a label that's a target of the vector
	     before deleting the vector itself.  */
	  if (GET_CODE (label) != NOTE)
	    LABEL_NUSES (label)--;
	}
176 177 178 179 180
    }

  return next;
}

181 182 183 184 185 186 187 188
/* Like delete_insn but also purge dead edges from BB.  */
rtx
delete_insn_and_edges (insn)
     rtx insn;
{
  rtx x;
  bool purge = false;

189
  if (INSN_P (insn)
190 191 192 193 194 195 196 197 198
      && BLOCK_FOR_INSN (insn)
      && BLOCK_FOR_INSN (insn)->end == insn)
    purge = true;
  x = delete_insn (insn);
  if (purge)
    purge_dead_edges (BLOCK_FOR_INSN (insn));
  return x;
}

199 200 201 202 203 204 205 206 207
/* Unlink a chain of insns between START and FINISH, leaving notes
   that must be paired.  */

void
delete_insn_chain (start, finish)
     rtx start, finish;
{
  rtx next;

208 209 210
  /* Unchain the insns one by one.  It would be quicker to delete all of these
     with a single unchaining, rather than one at a time, but we need to keep
     the NOTE's.  */
211 212 213 214 215 216 217 218 219 220 221 222 223
  while (1)
    {
      next = NEXT_INSN (start);
      if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
	;
      else
	next = delete_insn (start);

      if (start == finish)
	break;
      start = next;
    }
}
224 225 226 227 228 229 230 231

/* Like delete_insn but also purge dead edges from BB.  */
void
delete_insn_chain_and_edges (first, last)
     rtx first, last;
{
  bool purge = false;

232
  if (INSN_P (last)
233 234 235 236 237 238 239
      && BLOCK_FOR_INSN (last)
      && BLOCK_FOR_INSN (last)->end == last)
    purge = true;
  delete_insn_chain (first, last);
  if (purge)
    purge_dead_edges (BLOCK_FOR_INSN (last));
}
240

241 242 243 244 245
/* Create a new basic block consisting of the instructions between HEAD and END
   inclusive.  This function is designed to allow fast BB construction - reuses
   the note and basic block struct in BB_NOTE, if any and do not grow
   BASIC_BLOCK chain and should be used directly only by CFG construction code.
   END can be NULL in to create new empty basic block before HEAD.  Both END
246 247
   and HEAD can be NULL to create basic block at the end of INSN chain.
   AFTER is the basic block we should be put after.  */
248 249

basic_block
250
create_basic_block_structure (head, end, bb_note, after)
251
     rtx head, end, bb_note;
252
     basic_block after;
253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273
{
  basic_block bb;

  if (bb_note
      && ! RTX_INTEGRATED_P (bb_note)
      && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
      && bb->aux == NULL)
    {
      /* If we found an existing note, thread it back onto the chain.  */

      rtx after;

      if (GET_CODE (head) == CODE_LABEL)
	after = head;
      else
	{
	  after = PREV_INSN (head);
	  head = bb_note;
	}

      if (after != bb_note && NEXT_INSN (after) != bb_note)
274
	reorder_insns_nobb (bb_note, bb_note, after);
275 276 277 278 279 280 281 282
    }
  else
    {
      /* Otherwise we must create a note and a basic block structure.  */

      bb = alloc_block ();

      if (!head && !end)
283 284
	head = end = bb_note
	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
285 286 287 288 289 290 291 292 293 294 295 296 297
      else if (GET_CODE (head) == CODE_LABEL && end)
	{
	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
	  if (head == end)
	    end = bb_note;
	}
      else
	{
	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
	  head = bb_note;
	  if (!end)
	    end = head;
	}
298

299 300 301 302 303 304 305 306 307
      NOTE_BASIC_BLOCK (bb_note) = bb;
    }

  /* Always include the bb note in the block.  */
  if (NEXT_INSN (end) == bb_note)
    end = bb_note;

  bb->head = head;
  bb->end = end;
308
  bb->index = last_basic_block++;
309
  bb->flags = BB_NEW;
310
  link_block (bb, after);
311
  BASIC_BLOCK (bb->index) = bb;
312
  update_bb_for_insn (bb);
313 314 315 316 317 318 319 320

  /* Tag the block so that we know it has been used when considering
     other basic block notes.  */
  bb->aux = bb;

  return bb;
}

321
/* Create new basic block consisting of instructions in between HEAD and END
322
   and place it to the BB chain after block AFTER.  END can be NULL in to
323 324
   create new empty basic block before HEAD.  Both END and HEAD can be NULL to
   create basic block at the end of INSN chain.  */
325 326

basic_block
327
create_basic_block (head, end, after)
328
     rtx head, end;
329
     basic_block after;
330 331
{
  basic_block bb;
332

333
  /* Place the new block just after the end.  */
334
  VARRAY_GROW (basic_block_info, last_basic_block+1);
335

336
  n_basic_blocks++;
337

338
  bb = create_basic_block_structure (head, end, NULL, after);
339 340 341 342 343 344 345 346 347 348 349 350 351
  bb->aux = NULL;
  return bb;
}

/* Delete the insns in a (non-live) block.  We physically delete every
   non-deleted-note insn, and update the flow graph appropriately.

   Return nonzero if we deleted an exception handler.  */

/* ??? Preserving all such notes strikes me as wrong.  It would be nice
   to post-process the stream to remove empty blocks, loops, ranges, etc.  */

int
352
flow_delete_block_noexpunge (b)
353 354 355 356 357 358 359 360 361 362 363 364
     basic_block b;
{
  int deleted_handler = 0;
  rtx insn, end, tmp;

  /* If the head of this block is a CODE_LABEL, then it might be the
     label for an exception handler which can't be reached.

     We need to remove the label from the exception_handler_label list
     and remove the associated NOTE_INSN_EH_REGION_BEG and
     NOTE_INSN_EH_REGION_END notes.  */

365 366
  /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
     hanging before the block.  */
367

368 369 370
  for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
    {
      if (GET_CODE (insn) != NOTE)
371
	break;
372 373
      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
	  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
374
	NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
375 376
    }

377 378
  insn = b->head;

379
  never_reached_warning (insn, b->end);
380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412

  if (GET_CODE (insn) == CODE_LABEL)
    maybe_remove_eh_handler (insn);

  /* Include any jump table following the basic block.  */
  end = b->end;
  if (GET_CODE (end) == JUMP_INSN
      && (tmp = JUMP_LABEL (end)) != NULL_RTX
      && (tmp = NEXT_INSN (tmp)) != NULL_RTX
      && GET_CODE (tmp) == JUMP_INSN
      && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
	  || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
    end = tmp;

  /* Include any barrier that may follow the basic block.  */
  tmp = next_nonnote_insn (end);
  if (tmp && GET_CODE (tmp) == BARRIER)
    end = tmp;

  /* Selectively delete the entire chain.  */
  b->head = NULL;
  delete_insn_chain (insn, end);

  /* Remove the edges into and out of this block.  Note that there may
     indeed be edges in, if we are removing an unreachable loop.  */
  while (b->pred != NULL)
    remove_edge (b->pred);
  while (b->succ != NULL)
    remove_edge (b->succ);

  b->pred = NULL;
  b->succ = NULL;

413 414 415 416 417 418 419 420
  return deleted_handler;
}

int
flow_delete_block (b)
     basic_block b;
{
  int deleted_handler = flow_delete_block_noexpunge (b);
421

422
  /* Remove the basic block from the array.  */
423 424 425 426 427
  expunge_block (b);

  return deleted_handler;
}

428
/* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
429 430

void
431
compute_bb_for_insn ()
432
{
433
  basic_block bb;
434

435
  FOR_EACH_BB (bb)
436
    {
437 438
      rtx end = bb->end;
      rtx insn;
439

440
      for (insn = bb->head; ; insn = NEXT_INSN (insn))
441
	{
442
	  BLOCK_FOR_INSN (insn) = bb;
443 444 445 446 447 448 449 450 451 452 453
	  if (insn == end)
	    break;
	}
    }
}

/* Release the basic_block_for_insn array.  */

void
free_bb_for_insn ()
{
454 455 456 457
  rtx insn;
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    if (GET_CODE (insn) != BARRIER)
      BLOCK_FOR_INSN (insn) = NULL;
458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494
}

/* Update insns block within BB.  */

void
update_bb_for_insn (bb)
     basic_block bb;
{
  rtx insn;

  for (insn = bb->head; ; insn = NEXT_INSN (insn))
    {
      set_block_for_insn (insn, bb);
      if (insn == bb->end)
	break;
    }
}

/* Split a block BB after insn INSN creating a new fallthru edge.
   Return the new edge.  Note that to keep other parts of the compiler happy,
   this function renumbers all the basic blocks so that the new
   one has a number one greater than the block split.  */

edge
split_block (bb, insn)
     basic_block bb;
     rtx insn;
{
  basic_block new_bb;
  edge new_edge;
  edge e;

  /* There is no point splitting the block after its end.  */
  if (bb->end == insn)
    return 0;

  /* Create the new basic block.  */
495
  new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523
  new_bb->count = bb->count;
  new_bb->frequency = bb->frequency;
  new_bb->loop_depth = bb->loop_depth;
  bb->end = insn;

  /* Redirect the outgoing edges.  */
  new_bb->succ = bb->succ;
  bb->succ = NULL;
  for (e = new_bb->succ; e; e = e->succ_next)
    e->src = new_bb;

  new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);

  if (bb->global_live_at_start)
    {
      new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
      new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
      COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);

      /* We now have to calculate which registers are live at the end
	 of the split basic block and at the start of the new basic
	 block.  Start with those registers that are known to be live
	 at the end of the original basic block and get
	 propagate_block to determine which registers are live.  */
      COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
      propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
      COPY_REG_SET (bb->global_live_at_end,
		    new_bb->global_live_at_start);
524 525 526 527 528 529 530 531 532
#ifdef HAVE_conditional_execution
      /* In the presence of conditional execution we are not able to update
	 liveness precisely.  */
      if (reload_completed)
	{
	  bb->flags |= BB_DIRTY;
	  new_bb->flags |= BB_DIRTY;
	}
#endif
533 534 535 536 537 538 539 540 541 542 543 544
    }

  return new_edge;
}

/* Blocks A and B are to be merged into a single block A.  The insns
   are already contiguous, hence `nomove'.  */

void
merge_blocks_nomove (a, b)
     basic_block a, b;
{
545
  rtx b_head = b->head, b_end = b->end, a_end = a->end;
546 547
  rtx del_first = NULL_RTX, del_last = NULL_RTX;
  int b_empty = 0;
548
  edge e;
549 550 551 552 553 554 555 556

  /* If there was a CODE_LABEL beginning B, delete it.  */
  if (GET_CODE (b_head) == CODE_LABEL)
    {
      /* Detect basic blocks with nothing but a label.  This can happen
	 in particular at the end of a function.  */
      if (b_head == b_end)
	b_empty = 1;
557

558 559 560 561
      del_first = del_last = b_head;
      b_head = NEXT_INSN (b_head);
    }

562 563
  /* Delete the basic block note and handle blocks containing just that
     note.  */
564 565 566 567 568 569
  if (NOTE_INSN_BASIC_BLOCK_P (b_head))
    {
      if (b_head == b_end)
	b_empty = 1;
      if (! del_last)
	del_first = b_head;
570

571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593
      del_last = b_head;
      b_head = NEXT_INSN (b_head);
    }

  /* If there was a jump out of A, delete it.  */
  if (GET_CODE (a_end) == JUMP_INSN)
    {
      rtx prev;

      for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
	if (GET_CODE (prev) != NOTE
	    || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
	    || prev == a->head)
	  break;

      del_first = a_end;

#ifdef HAVE_cc0
      /* If this was a conditional jump, we need to also delete
	 the insn that set cc0.  */
      if (only_sets_cc0_p (prev))
	{
	  rtx tmp = prev;
594

595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617
	  prev = prev_nonnote_insn (prev);
	  if (!prev)
	    prev = a->head;
	  del_first = tmp;
	}
#endif

      a_end = PREV_INSN (del_first);
    }
  else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
    del_first = NEXT_INSN (a_end);

  /* Normally there should only be one successor of A and that is B, but
     partway though the merge of blocks for conditional_execution we'll
     be merging a TEST block with THEN and ELSE successors.  Free the
     whole lot of them and hope the caller knows what they're doing.  */
  while (a->succ)
    remove_edge (a->succ);

  /* Adjust the edges out of B for the new owner.  */
  for (e = b->succ; e; e = e->succ_next)
    e->src = a;
  a->succ = b->succ;
618
  a->flags |= b->flags;
619 620 621

  /* B hasn't quite yet ceased to exist.  Attempt to prevent mishap.  */
  b->pred = b->succ = NULL;
622
  a->global_live_at_end = b->global_live_at_end;
623 624 625 626 627 628 629 630 631 632

  expunge_block (b);

  /* Delete everything marked above as well as crap that might be
     hanging out between the two blocks.  */
  delete_insn_chain (del_first, del_last);

  /* Reassociate the insns of B with A.  */
  if (!b_empty)
    {
633
      rtx x;
634

635 636
      for (x = a_end; x != b_end; x = NEXT_INSN (x))
	set_block_for_insn (x, a);
637

638
      set_block_for_insn (b_end, a);
639

640 641
      a_end = b_end;
    }
642

643 644 645
  a->end = a_end;
}

646 647
/* Return the label in the head of basic block BLOCK.  Create one if it doesn't
   exist.  */
648 649 650 651 652 653 654

rtx
block_label (block)
     basic_block block;
{
  if (block == EXIT_BLOCK_PTR)
    return NULL_RTX;
655

656 657 658 659
  if (GET_CODE (block->head) != CODE_LABEL)
    {
      block->head = emit_label_before (gen_label_rtx (), block->head);
    }
660

661 662 663 664
  return block->head;
}

/* Attempt to perform edge redirection by replacing possibly complex jump
665 666 667
   instruction by unconditional jump or removing jump completely.  This can
   apply only if all edges now point to the same block.  The parameters and
   return values are equivalent to redirect_edge_and_branch.  */
668 669 670 671 672 673 674 675 676

static bool
try_redirect_by_replacing_jump (e, target)
     edge e;
     basic_block target;
{
  basic_block src = e->src;
  rtx insn = src->end, kill_from;
  edge tmp;
677
  rtx set, table;
678 679 680 681 682 683
  int fallthru = 0;

  /* Verify that all targets will be TARGET.  */
  for (tmp = src->succ; tmp; tmp = tmp->succ_next)
    if (tmp->dest != target && tmp != e)
      break;
684

685 686
  if (tmp || !onlyjump_p (insn))
    return false;
687 688 689 690 691 692
  if (reload_completed && JUMP_LABEL (insn)
      && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
      && GET_CODE (table) == JUMP_INSN
      && (GET_CODE (PATTERN (table)) == ADDR_VEC
	  || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
    return false;
693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713

  /* Avoid removing branch with side effects.  */
  set = single_set (insn);
  if (!set || side_effects_p (set))
    return false;

  /* In case we zap a conditional jump, we'll need to kill
     the cc0 setter too.  */
  kill_from = insn;
#ifdef HAVE_cc0
  if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
    kill_from = PREV_INSN (insn);
#endif

  /* See if we can create the fallthru edge.  */
  if (can_fallthru (src, target))
    {
      if (rtl_dump_file)
	fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
      fallthru = 1;

714
      /* Selectively unlink whole insn chain.  */
715 716
      delete_insn_chain (kill_from, PREV_INSN (target->head));
    }
717

718 719 720 721 722 723 724
  /* If this already is simplejump, redirect it.  */
  else if (simplejump_p (insn))
    {
      if (e->dest == target)
	return false;
      if (rtl_dump_file)
	fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
725
		 INSN_UID (insn), e->dest->index, target->index);
726 727 728 729 730 731
      if (!redirect_jump (insn, block_label (target), 0))
	{
	  if (target == EXIT_BLOCK_PTR)
	    return false;
	  abort ();
	}
732
    }
733

734 735 736 737
  /* Cannot do anything for target exit block.  */
  else if (target == EXIT_BLOCK_PTR)
    return false;

738 739 740 741
  /* Or replace possibly complicated jump insn by simple jump insn.  */
  else
    {
      rtx target_label = block_label (target);
742
      rtx barrier, tmp;
743

744
      emit_jump_insn_after (gen_jump (target_label), insn);
745 746 747 748 749 750
      JUMP_LABEL (src->end) = target_label;
      LABEL_NUSES (target_label)++;
      if (rtl_dump_file)
	fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
		 INSN_UID (insn), INSN_UID (src->end));

751

752 753
      delete_insn_chain (kill_from, insn);

754 755 756 757 758 759 760 761 762 763 764 765
      /* Recognize a tablejump that we are converting to a
	 simple jump and remove its associated CODE_LABEL
	 and ADDR_VEC or ADDR_DIFF_VEC.  */
      if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
	  && (tmp = NEXT_INSN (tmp)) != NULL_RTX
	  && GET_CODE (tmp) == JUMP_INSN
	  && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
	      || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
	{
	  delete_insn_chain (JUMP_LABEL (insn), tmp);
	}

766 767 768 769 770 771 772 773 774 775 776 777 778
      barrier = next_nonnote_insn (src->end);
      if (!barrier || GET_CODE (barrier) != BARRIER)
	emit_barrier_after (src->end);
    }

  /* Keep only one edge out and set proper flags.  */
  while (src->succ->succ_next)
    remove_edge (src->succ);
  e = src->succ;
  if (fallthru)
    e->flags = EDGE_FALLTHRU;
  else
    e->flags = 0;
779

780 781 782 783 784 785 786 787 788 789 790
  e->probability = REG_BR_PROB_BASE;
  e->count = src->count;

  /* We don't want a block to end on a line-number note since that has
     the potential of changing the code between -g and not -g.  */
  while (GET_CODE (e->src->end) == NOTE
	 && NOTE_LINE_NUMBER (e->src->end) >= 0)
    delete_insn (e->src->end);

  if (e->dest != target)
    redirect_edge_succ (e, target);
791

792 793 794 795 796 797
  return true;
}

/* Return last loop_beg note appearing after INSN, before start of next
   basic block.  Return INSN if there are no such notes.

798
   When emitting jump to redirect a fallthru edge, it should always appear
799 800 801
   after the LOOP_BEG notes, as loop optimizer expect loop to either start by
   fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
   test.  */
802 803 804 805 806 807

static rtx
last_loop_beg_note (insn)
     rtx insn;
{
  rtx last = insn;
808 809 810 811 812 813 814

  for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
       && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
       insn = NEXT_INSN (insn))
    if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
      last = insn;

815 816 817
  return last;
}

818 819
/* Attempt to change code to redirect edge E to TARGET.  Don't do that on
   expense of adding new instructions or reordering basic blocks.
820

821 822
   Function can be also called with edge destination equivalent to the TARGET.
   Then it should try the simplifications and do nothing if none is possible.
823

824 825
   Return true if transformation succeeded.  We still return false in case E
   already destinated TARGET and we didn't managed to simplify instruction
826 827 828 829 830 831 832 833 834 835 836 837
   stream.  */

bool
redirect_edge_and_branch (e, target)
     edge e;
     basic_block target;
{
  rtx tmp;
  rtx old_label = e->dest->head;
  basic_block src = e->src;
  rtx insn = src->end;

838
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
839 840 841 842
    return false;

  if (try_redirect_by_replacing_jump (e, target))
    return true;
843

844 845 846 847 848 849 850 851 852
  /* Do this fast path late, as we want above code to simplify for cases
     where called on single edge leaving basic block containing nontrivial
     jump insn.  */
  else if (e->dest == target)
    return false;

  /* We can only redirect non-fallthru edges of jump insn.  */
  if (e->flags & EDGE_FALLTHRU)
    return false;
853
  else if (GET_CODE (insn) != JUMP_INSN)
854 855 856 857 858 859 860 861 862 863 864 865 866
    return false;

  /* Recognize a tablejump and adjust all matching cases.  */
  if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
      && (tmp = NEXT_INSN (tmp)) != NULL_RTX
      && GET_CODE (tmp) == JUMP_INSN
      && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
	  || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
    {
      rtvec vec;
      int j;
      rtx new_label = block_label (target);

867 868
      if (target == EXIT_BLOCK_PTR)
	return false;
869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899
      if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
	vec = XVEC (PATTERN (tmp), 0);
      else
	vec = XVEC (PATTERN (tmp), 1);

      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
	  {
	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
	    --LABEL_NUSES (old_label);
	    ++LABEL_NUSES (new_label);
	  }

      /* Handle casesi dispatch insns */
      if ((tmp = single_set (insn)) != NULL
	  && SET_DEST (tmp) == pc_rtx
	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
	  && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
	{
	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
						       new_label);
	  --LABEL_NUSES (old_label);
	  ++LABEL_NUSES (new_label);
	}
    }
  else
    {
      /* ?? We may play the games with moving the named labels from
	 one basic block to the other in case only one computed_jump is
	 available.  */
900 901 902
      if (computed_jump_p (insn)
	  /* A return instruction can't be redirected.  */
	  || returnjump_p (insn))
903 904 905
	return false;

      /* If the insn doesn't go where we think, we're confused.  */
906
      if (JUMP_LABEL (insn) != old_label)
907
	abort ();
908 909 910 911 912 913 914 915 916 917

      /* If the substitution doesn't succeed, die.  This can happen
	 if the back end emitted unrecognizable instructions or if
	 target is exit block on some arches.  */
      if (!redirect_jump (insn, block_label (target), 0))
	{
	  if (target == EXIT_BLOCK_PTR)
	    return false;
	  abort ();
	}
918 919 920 921
    }

  if (rtl_dump_file)
    fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
922
	     e->src->index, e->dest->index, target->index);
923

924 925
  if (e->dest != target)
    redirect_edge_succ_nodup (e, target);
926

927 928 929
  return true;
}

930
/* Like force_nonfallthru below, but additionally performs redirection
931 932 933 934 935 936 937
   Used by redirect_edge_and_branch_force.  */

static basic_block
force_nonfallthru_and_redirect (e, target)
     edge e;
     basic_block target;
{
938
  basic_block jump_block, new_bb = NULL, src = e->src;
939 940
  rtx note;
  edge new_edge;
941
  int abnormal_edge_flags = 0;
942 943

  if (e->flags & EDGE_ABNORMAL)
944 945 946 947 948 949 950 951 952 953 954
    {
      /* Irritating special case - fallthru edge to the same block as abnormal
	 edge.
	 We can't redirect abnormal edge, but we still can split the fallthru
	 one and create separate abnormal edge to original destination. 
	 This allows bb-reorder to make such edge non-fallthru.  */
      if (e->dest != target)
	abort ();
      abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
      e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
    }
955
  else if (!(e->flags & EDGE_FALLTHRU))
956
    abort ();
957 958 959 960 961
  else if (e->src == ENTRY_BLOCK_PTR)
    {
      /* We can't redirect the entry block.  Create an empty block at the
         start of the function which we use to add the new jump.  */
      edge *pe1;
962
      basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
963 964 965 966 967 968 969 970 971 972 973 974 975 976 977

      /* Change the existing edge's source to be the new block, and add
	 a new edge from the entry block to the new block.  */
      e->src = bb;
      for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
	if (*pe1 == e)
	  {
	    *pe1 = e->succ_next;
	    break;
	  }
      e->succ_next = 0;
      bb->succ = e;
      make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
    }

978
  if (e->src->succ->succ_next || abnormal_edge_flags)
979 980
    {
      /* Create the new structures.  */
981 982

      /* Position the new block correctly relative to loop notes.  */
983
      note = last_loop_beg_note (e->src->end);
984 985 986 987 988 989 990 991 992 993 994 995
      note = NEXT_INSN (note);

      /* ... and ADDR_VECs.  */
      if (note != NULL
	  && GET_CODE (note) == CODE_LABEL
	  && NEXT_INSN (note)
	  && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
	  && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
	      || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
	note = NEXT_INSN (NEXT_INSN (note));

      jump_block = create_basic_block (note, NULL, e->src);
996 997 998 999 1000 1001
      jump_block->count = e->count;
      jump_block->frequency = EDGE_FREQUENCY (e);
      jump_block->loop_depth = target->loop_depth;

      if (target->global_live_at_start)
	{
1002 1003 1004 1005
	  jump_block->global_live_at_start
	    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
	  jump_block->global_live_at_end
	    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
	  COPY_REG_SET (jump_block->global_live_at_start,
			target->global_live_at_start);
	  COPY_REG_SET (jump_block->global_live_at_end,
			target->global_live_at_start);
	}

      /* Wire edge in.  */
      new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
      new_edge->probability = e->probability;
      new_edge->count = e->count;

      /* Redirect old edge.  */
      redirect_edge_pred (e, jump_block);
      e->probability = REG_BR_PROB_BASE;

      new_bb = jump_block;
    }
  else
    jump_block = e->src;
1025

1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040
  e->flags &= ~EDGE_FALLTHRU;
  if (target == EXIT_BLOCK_PTR)
    {
      if (HAVE_return)
	emit_jump_insn_after (gen_return (), jump_block->end);
      else
	abort ();
    }
  else
    {
      rtx label = block_label (target);
      emit_jump_insn_after (gen_jump (label), jump_block->end);
      JUMP_LABEL (jump_block->end) = label;
      LABEL_NUSES (label)++;
    }
1041

1042 1043 1044
  emit_barrier_after (jump_block->end);
  redirect_edge_succ_nodup (e, target);

1045 1046 1047
  if (abnormal_edge_flags)
    make_edge (src, target, abnormal_edge_flags);

1048 1049 1050 1051 1052 1053
  return new_bb;
}

/* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
   (and possibly create new basic block) to make edge non-fallthru.
   Return newly created BB or NULL if none.  */
1054

1055 1056 1057 1058 1059 1060 1061 1062 1063
basic_block
force_nonfallthru (e)
     edge e;
{
  return force_nonfallthru_and_redirect (e, e->dest);
}

/* Redirect edge even at the expense of creating new jump insn or
   basic block.  Return new basic block if created, NULL otherwise.
1064
   Abort if conversion is impossible.  */
1065 1066 1067 1068 1069 1070

basic_block
redirect_edge_and_branch_force (e, target)
     edge e;
     basic_block target;
{
1071 1072
  if (redirect_edge_and_branch (e, target)
      || e->dest == target)
1073 1074 1075 1076
    return NULL;

  /* In case the edge redirection failed, try to force it to be non-fallthru
     and redirect newly created simplejump.  */
1077
  return force_nonfallthru_and_redirect (e, target);
1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099
}

/* The given edge should potentially be a fallthru edge.  If that is in
   fact true, delete the jump and barriers that are in the way.  */

void
tidy_fallthru_edge (e, b, c)
     edge e;
     basic_block b, c;
{
  rtx q;

  /* ??? In a late-running flow pass, other folks may have deleted basic
     blocks by nopping out blocks, leaving multiple BARRIERs between here
     and the target label. They ought to be chastized and fixed.

     We can also wind up with a sequence of undeletable labels between
     one block and the next.

     So search through a sequence of barriers, labels, and notes for
     the head of block C and assert that we really do fall through.  */

1100 1101 1102
  for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
    if (INSN_P (q))
      return;
1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142

  /* Remove what will soon cease being the jump insn from the source block.
     If block B consisted only of this single jump, turn it into a deleted
     note.  */
  q = b->end;
  if (GET_CODE (q) == JUMP_INSN
      && onlyjump_p (q)
      && (any_uncondjump_p (q)
	  || (b->succ == e && e->succ_next == NULL)))
    {
#ifdef HAVE_cc0
      /* If this was a conditional jump, we need to also delete
	 the insn that set cc0.  */
      if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
	q = PREV_INSN (q);
#endif

      q = PREV_INSN (q);

      /* We don't want a block to end on a line-number note since that has
	 the potential of changing the code between -g and not -g.  */
      while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
	q = PREV_INSN (q);
    }

  /* Selectively unlink the sequence.  */
  if (q != PREV_INSN (c->head))
    delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));

  e->flags |= EDGE_FALLTHRU;
}

/* Fix up edges that now fall through, or rather should now fall through
   but previously required a jump around now deleted blocks.  Simplify
   the search by only examining blocks numerically adjacent, since this
   is how find_basic_blocks created them.  */

void
tidy_fallthru_edges ()
{
1143 1144 1145 1146
  basic_block b, c;

  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
    return;
1147

1148
  FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1149 1150 1151
    {
      edge s;

1152 1153
      c = b->next_bb;

1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164
      /* We care about simple conditional or unconditional jumps with
	 a single successor.

	 If we had a conditional branch to the next instruction when
	 find_basic_blocks was called, then there will only be one
	 out edge for the block which ended with the conditional
	 branch (since we do not create duplicate edges).

	 Furthermore, the edge will be marked as a fallthru because we
	 merge the flags for the duplicate edges.  So we do not want to
	 check that the edge is not a FALLTHRU edge.  */
1165

1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185
      if ((s = b->succ) != NULL
	  && ! (s->flags & EDGE_COMPLEX)
	  && s->succ_next == NULL
	  && s->dest == c
	  /* If the jump insn has side effects, we can't tidy the edge.  */
	  && (GET_CODE (b->end) != JUMP_INSN
	      || onlyjump_p (b->end)))
	tidy_fallthru_edge (s, b, c);
    }
}

/* Helper function for split_edge.  Return true in case edge BB2 to BB1
   is back edge of syntactic loop.  */

static bool
back_edge_of_syntactic_loop_p (bb1, bb2)
	basic_block bb1, bb2;
{
  rtx insn;
  int count = 0;
1186
  basic_block bb;
1187

1188
  if (bb1 == bb2)
1189
    return true;
1190

1191 1192 1193 1194 1195 1196 1197 1198
  /* ??? Could we guarantee that bb indices are monotone, so that we could
     just compare them?  */
  for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
    continue;

  if (!bb)
    return false;

1199 1200 1201 1202 1203 1204
  for (insn = bb1->end; insn != bb2->head && count >= 0;
       insn = NEXT_INSN (insn))
    if (GET_CODE (insn) == NOTE)
      {
	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
	  count++;
1205
	else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230
	  count--;
      }

  return count >= 0;
}

/* Split a (typically critical) edge.  Return the new block.
   Abort on abnormal edges.

   ??? The code generally expects to be called on critical edges.
   The case of a block ending in an unconditional jump to a
   block with multiple predecessors is not handled optimally.  */

basic_block
split_edge (edge_in)
     edge edge_in;
{
  basic_block bb;
  rtx before;

  /* Abnormal edges cannot be split.  */
  if ((edge_in->flags & EDGE_ABNORMAL) != 0)
    abort ();

  /* We are going to place the new block in front of edge destination.
1231
     Avoid existence of fallthru predecessors.  */
1232 1233 1234
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
    {
      edge e;
1235

1236 1237 1238 1239 1240 1241 1242 1243 1244 1245
      for (e = edge_in->dest->pred; e; e = e->pred_next)
	if (e->flags & EDGE_FALLTHRU)
	  break;

      if (e)
	force_nonfallthru (e);
    }

  /* Create the basic block note.

1246
     Where we place the note can have a noticeable impact on the generated
1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264
     code.  Consider this cfg:

		        E
			|
			0
		       / \
		   +->1-->2--->E
                   |  |
		   +--+

      If we need to insert an insn on the edge from block 0 to block 1,
      we want to ensure the instructions we insert are outside of any
      loop notes that physically sit between block 0 and block 1.  Otherwise
      we confuse the loop optimizer into thinking the loop is a phony.  */

  if (edge_in->dest != EXIT_BLOCK_PTR
      && PREV_INSN (edge_in->dest->head)
      && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1265 1266
      && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
	  == NOTE_INSN_LOOP_BEG)
1267 1268 1269 1270 1271 1272 1273
      && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
    before = PREV_INSN (edge_in->dest->head);
  else if (edge_in->dest != EXIT_BLOCK_PTR)
    before = edge_in->dest->head;
  else
    before = NULL_RTX;

1274
  bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1275 1276 1277 1278 1279 1280 1281 1282
  bb->count = edge_in->count;
  bb->frequency = EDGE_FREQUENCY (edge_in);

  /* ??? This info is likely going to be out of date very soon.  */
  if (edge_in->dest->global_live_at_start)
    {
      bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
      bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1283 1284 1285 1286
      COPY_REG_SET (bb->global_live_at_start,
		    edge_in->dest->global_live_at_start);
      COPY_REG_SET (bb->global_live_at_end,
		    edge_in->dest->global_live_at_start);
1287 1288
    }

1289
  make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331

  /* For non-fallthry edges, we must adjust the predecessor's
     jump instruction to target our new block.  */
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
    {
      if (!redirect_edge_and_branch (edge_in, bb))
	abort ();
    }
  else
    redirect_edge_succ (edge_in, bb);

  return bb;
}

/* Queue instructions for insertion on an edge between two basic blocks.
   The new instructions and basic blocks (if any) will not appear in the
   CFG until commit_edge_insertions is called.  */

void
insert_insn_on_edge (pattern, e)
     rtx pattern;
     edge e;
{
  /* We cannot insert instructions on an abnormal critical edge.
     It will be easier to find the culprit if we die now.  */
  if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
    abort ();

  if (e->insns == NULL_RTX)
    start_sequence ();
  else
    push_to_sequence (e->insns);

  emit_insn (pattern);

  e->insns = get_insns ();
  end_sequence ();
}

/* Update the CFG for the instructions queued on edge E.  */

static void
1332
commit_one_edge_insertion (e, watch_calls)
1333
     edge e;
1334
     int watch_calls;
1335 1336
{
  rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1337
  basic_block bb = NULL;
1338 1339 1340 1341 1342

  /* Pull the insns off the edge now since the edge might go away.  */
  insns = e->insns;
  e->insns = NULL_RTX;

1343 1344 1345 1346 1347
  /* Special case -- avoid inserting code between call and storing
     its return value.  */
  if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
      && e->src != ENTRY_BLOCK_PTR
      && GET_CODE (e->src->end) == CALL_INSN)
1348
    {
1349
      rtx next = next_nonnote_insn (e->src->end);
1350

1351 1352 1353 1354
      after = e->dest->head;
      /* The first insn after the call may be a stack pop, skip it.  */
      while (next
	     && keep_with_call_p (next))
1355 1356
	{
	  after = next;
1357 1358 1359
	  next = next_nonnote_insn (next);
	}
      bb = e->dest;
1360
    }
1361
  if (!before && !after)
1362
    {
1363 1364 1365
      /* Figure out where to put these things.  If the destination has
         one predecessor, insert there.  Except for the exit block.  */
      if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1366
	{
1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408
	  bb = e->dest;

	  /* Get the location correct wrt a code label, and "nice" wrt
	     a basic block note, and before everything else.  */
	  tmp = bb->head;
	  if (GET_CODE (tmp) == CODE_LABEL)
	    tmp = NEXT_INSN (tmp);
	  if (NOTE_INSN_BASIC_BLOCK_P (tmp))
	    tmp = NEXT_INSN (tmp);
	  if (tmp == bb->head)
	    before = tmp;
	  else if (tmp)
	    after = PREV_INSN (tmp);
	  else
	    after = get_last_insn ();
	}

      /* If the source has one successor and the edge is not abnormal,
         insert there.  Except for the entry block.  */
      else if ((e->flags & EDGE_ABNORMAL) == 0
	       && e->src->succ->succ_next == NULL
	       && e->src != ENTRY_BLOCK_PTR)
	{
	  bb = e->src;

	  /* It is possible to have a non-simple jump here.  Consider a target
	     where some forms of unconditional jumps clobber a register.  This
	     happens on the fr30 for example.

	     We know this block has a single successor, so we can just emit
	     the queued insns before the jump.  */
	  if (GET_CODE (bb->end) == JUMP_INSN)
	    for (before = bb->end;
		 GET_CODE (PREV_INSN (before)) == NOTE
		 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
		 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
	      ;
	  else
	    {
	      /* We'd better be fallthru, or we've lost track of what's what.  */
	      if ((e->flags & EDGE_FALLTHRU) == 0)
		abort ();
1409

1410 1411 1412 1413 1414 1415 1416
	      after = bb->end;
	    }
	}
      /* Otherwise we must split the edge.  */
      else
	{
	  bb = split_edge (e);
1417 1418 1419 1420 1421 1422 1423 1424
	  after = bb->end;
	}
    }

  /* Now that we've found the spot, do the insertion.  */

  if (before)
    {
1425
      emit_insn_before (insns, before);
1426 1427 1428
      last = prev_nonnote_insn (before);
    }
  else
1429
    last = emit_insn_after (insns, after);
1430 1431 1432 1433 1434

  if (returnjump_p (last))
    {
      /* ??? Remove all outgoing edges from BB and add one for EXIT.
         This is not currently a problem because this only happens
1435 1436
         for the (single) epilogue, which already has a fallthru edge
         to EXIT.  */
1437 1438 1439

      e = bb->succ;
      if (e->dest != EXIT_BLOCK_PTR
1440
	  || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1441 1442
	abort ();

1443
      e->flags &= ~EDGE_FALLTHRU;
1444
      emit_barrier_after (last);
1445

1446 1447 1448 1449 1450
      if (before)
	delete_insn (before);
    }
  else if (GET_CODE (last) == JUMP_INSN)
    abort ();
1451

1452 1453
  /* Mark the basic block for find_sub_basic_blocks.  */
  bb->aux = &bb->aux;
1454 1455 1456 1457 1458 1459 1460 1461
}

/* Update the CFG for all queued instructions.  */

void
commit_edge_insertions ()
{
  basic_block bb;
1462
  sbitmap blocks;
1463
  bool changed = false;
1464 1465 1466 1467 1468

#ifdef ENABLE_CHECKING
  verify_flow_info ();
#endif

1469
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1470 1471 1472 1473 1474 1475 1476
    {
      edge e, next;

      for (e = bb->succ; e; e = next)
	{
	  next = e->succ_next;
	  if (e->insns)
1477 1478 1479 1480
	    {
	       changed = true;
	       commit_one_edge_insertion (e, false);
	    }
1481 1482
	}
    }
1483

1484 1485 1486
  if (!changed)
    return;

1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500
  blocks = sbitmap_alloc (last_basic_block);
  sbitmap_zero (blocks);
  FOR_EACH_BB (bb)
    if (bb->aux)
      {
        SET_BIT (blocks, bb->index);
	/* Check for forgotten bb->aux values before commit_edge_insertions
	   call.  */
	if (bb->aux != &bb->aux)
	  abort ();
	bb->aux = NULL;
      }
  find_many_sub_basic_blocks (blocks);
  sbitmap_free (blocks);
1501 1502 1503 1504 1505 1506 1507 1508 1509
}

/* Update the CFG for all queued instructions, taking special care of inserting
   code on edges between call and storing its return value.  */

void
commit_edge_insertions_watch_calls ()
{
  basic_block bb;
1510 1511
  sbitmap blocks;
  bool changed = false;
1512 1513 1514 1515 1516

#ifdef ENABLE_CHECKING
  verify_flow_info ();
#endif

1517
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1518 1519 1520 1521 1522 1523 1524
    {
      edge e, next;

      for (e = bb->succ; e; e = next)
	{
	  next = e->succ_next;
	  if (e->insns)
1525 1526 1527 1528
	    {
	      changed = true;
	      commit_one_edge_insertion (e, true);
	    }
1529 1530
	}
    }
1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548

  if (!changed)
    return;

  blocks = sbitmap_alloc (last_basic_block);
  sbitmap_zero (blocks);
  FOR_EACH_BB (bb)
    if (bb->aux)
      {
        SET_BIT (blocks, bb->index);
	/* Check for forgotten bb->aux values before commit_edge_insertions
	   call.  */
	if (bb->aux != &bb->aux)
	  abort ();
	bb->aux = NULL;
      }
  find_many_sub_basic_blocks (blocks);
  sbitmap_free (blocks);
1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562
}

/* Print out one basic block with live information at start and end.  */

void
dump_bb (bb, outf)
     basic_block bb;
     FILE *outf;
{
  rtx insn;
  rtx last;
  edge e;

  fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1563
	   bb->index, bb->loop_depth);
1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575
  fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
  putc ('\n', outf);

  fputs (";; Predecessors: ", outf);
  for (e = bb->pred; e; e = e->pred_next)
    dump_edge_info (outf, e, 0);
  putc ('\n', outf);

  fputs (";; Registers live at start:", outf);
  dump_regset (bb->global_live_at_start, outf);
  putc ('\n', outf);

1576
  for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596
       insn = NEXT_INSN (insn))
    print_rtl_single (outf, insn);

  fputs (";; Registers live at end:", outf);
  dump_regset (bb->global_live_at_end, outf);
  putc ('\n', outf);

  fputs (";; Successors: ", outf);
  for (e = bb->succ; e; e = e->succ_next)
    dump_edge_info (outf, e, 1);
  putc ('\n', outf);
}

void
debug_bb (bb)
     basic_block bb;
{
  dump_bb (bb, stderr);
}

1597
basic_block
1598 1599 1600
debug_bb_n (n)
     int n;
{
1601 1602 1603
  basic_block bb = BASIC_BLOCK (n);
  dump_bb (bb, stderr);
  return bb;
1604 1605 1606 1607 1608 1609 1610 1611 1612 1613
}

/* Like print_rtl, but also print out live information for the start of each
   basic block.  */

void
print_rtl_with_bb (outf, rtx_first)
     FILE *outf;
     rtx rtx_first;
{
1614
  rtx tmp_rtx;
1615 1616 1617 1618 1619 1620 1621

  if (rtx_first == 0)
    fprintf (outf, "(nil)\n");
  else
    {
      enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
      int max_uid = get_max_uid ();
1622 1623 1624 1625 1626 1627
      basic_block *start
	= (basic_block *) xcalloc (max_uid, sizeof (basic_block));
      basic_block *end
	= (basic_block *) xcalloc (max_uid, sizeof (basic_block));
      enum bb_state *in_bb_p
	= (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1628

1629 1630 1631
      basic_block bb;

      FOR_EACH_BB_REVERSE (bb)
1632 1633 1634 1635 1636 1637 1638 1639
	{
	  rtx x;

	  start[INSN_UID (bb->head)] = bb;
	  end[INSN_UID (bb->end)] = bb;
	  for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
	    {
	      enum bb_state state = IN_MULTIPLE_BB;
1640

1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656
	      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
		state = IN_ONE_BB;
	      in_bb_p[INSN_UID (x)] = state;

	      if (x == bb->end)
		break;
	    }
	}

      for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
	{
	  int did_output;

	  if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
	    {
	      fprintf (outf, ";; Start of basic block %d, registers live:",
1657
		       bb->index);
1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673
	      dump_regset (bb->global_live_at_start, outf);
	      putc ('\n', outf);
	    }

	  if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
	      && GET_CODE (tmp_rtx) != NOTE
	      && GET_CODE (tmp_rtx) != BARRIER)
	    fprintf (outf, ";; Insn is not within a basic block\n");
	  else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
	    fprintf (outf, ";; Insn is in multiple basic blocks\n");

	  did_output = print_rtl_single (outf, tmp_rtx);

	  if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
	    {
	      fprintf (outf, ";; End of basic block %d, registers live:\n",
1674
		       bb->index);
1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696
	      dump_regset (bb->global_live_at_end, outf);
	      putc ('\n', outf);
	    }

	  if (did_output)
	    putc ('\n', outf);
	}

      free (start);
      free (end);
      free (in_bb_p);
    }

  if (current_function_epilogue_delay_list != 0)
    {
      fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
      for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
	   tmp_rtx = XEXP (tmp_rtx, 1))
	print_rtl_single (outf, XEXP (tmp_rtx, 0));
    }
}

1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709
void
update_br_prob_note (bb)
     basic_block bb;
{
  rtx note;
  if (GET_CODE (bb->end) != JUMP_INSN)
    return;
  note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
  if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
    return;
  XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
}

1710 1711 1712 1713 1714 1715 1716 1717 1718 1719
/* Verify the CFG consistency.  This function check some CFG invariants and
   aborts when something is wrong.  Hope that this function will help to
   convert many optimization passes to preserve CFG consistent.

   Currently it does following checks:

   - test head/end pointers
   - overlapping of basic blocks
   - edge list correctness
   - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1720
   - tails of basic blocks (ensure that boundary is necessary)
1721 1722 1723
   - scans body of the basic block for JUMP_INSN, CODE_LABEL
     and NOTE_INSN_BASIC_BLOCK
   - check that all insns are in the basic blocks
1724
     (except the switch handling code, barriers and notes)
1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738
   - check that all returns are followed by barriers

   In future it can be extended check a lot of other stuff as well
   (reachability of basic blocks, life information, etc. etc.).  */

void
verify_flow_info ()
{
  const int max_uid = get_max_uid ();
  const rtx rtx_first = get_insns ();
  rtx last_head = get_last_insn ();
  basic_block *bb_info, *last_visited;
  size_t *edge_checksum;
  rtx x;
1739
  int num_bb_notes, err = 0;
1740
  basic_block bb, last_bb_seen;
1741 1742

  bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1743
  last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1744
					  sizeof (basic_block));
1745
  edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1746

1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767
  /* Check bb chain & numbers.  */
  last_bb_seen = ENTRY_BLOCK_PTR;
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
    {
      if (bb != EXIT_BLOCK_PTR
	  && bb != BASIC_BLOCK (bb->index))
	{
	  error ("bb %d on wrong place", bb->index);
	  err = 1;
	}

      if (bb->prev_bb != last_bb_seen)
	{
	  error ("prev_bb of %d should be %d, not %d",
		 bb->index, last_bb_seen->index, bb->prev_bb->index);
	  err = 1;
	}

      last_bb_seen = bb;
    }

1768
  FOR_EACH_BB_REVERSE (bb)
1769 1770 1771 1772 1773 1774 1775 1776
    {
      rtx head = bb->head;
      rtx end = bb->end;

      /* Verify the end of the basic block is in the INSN chain.  */
      for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
	if (x == end)
	  break;
1777

1778 1779
      if (!x)
	{
1780
	  error ("end insn %d for block %d not found in the insn stream",
1781
		 INSN_UID (end), bb->index);
1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793
	  err = 1;
	}

      /* Work backwards from the end to the head of the basic block
	 to verify the head is in the RTL chain.  */
      for (; x != NULL_RTX; x = PREV_INSN (x))
	{
	  /* While walking over the insn chain, verify insns appear
	     in only one basic block and initialize the BB_INFO array
	     used by other passes.  */
	  if (bb_info[INSN_UID (x)] != NULL)
	    {
1794
	      error ("insn %d is in multiple basic blocks (%d and %d)",
1795
		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1796 1797
	      err = 1;
	    }
1798

1799 1800 1801 1802 1803 1804 1805
	  bb_info[INSN_UID (x)] = bb;

	  if (x == head)
	    break;
	}
      if (!x)
	{
1806
	  error ("head insn %d for block %d not found in the insn stream",
1807
		 INSN_UID (head), bb->index);
1808 1809 1810 1811 1812 1813 1814
	  err = 1;
	}

      last_head = x;
    }

  /* Now check the basic blocks (boundaries etc.) */
1815
  FOR_EACH_BB_REVERSE (bb)
1816
    {
1817
      int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1818
      edge e;
1819
      rtx note;
1820

1821
      if (INSN_P (bb->end)
1822
	  && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1823
	  && bb->succ && bb->succ->succ_next
1824
	  && any_condjump_p (bb->end))
1825 1826 1827 1828 1829 1830 1831 1832 1833
	{
	  if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
	    {
	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
		     INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
	      err = 1;
	    }
	}
      if (bb->count < 0)
1834 1835
	{
	  error ("verify_flow_info: Wrong count of block %i %i",
1836
	         bb->index, (int)bb->count);
1837 1838
	  err = 1;
	}
1839
      if (bb->frequency < 0)
1840 1841
	{
	  error ("verify_flow_info: Wrong frequency of block %i %i",
1842
	         bb->index, bb->frequency);
1843 1844
	  err = 1;
	}
1845
      for (e = bb->succ; e; e = e->succ_next)
1846
	{
1847
	  if (last_visited [e->dest->index + 2] == bb)
1848 1849
	    {
	      error ("verify_flow_info: Duplicate edge %i->%i",
1850
		     e->src->index, e->dest->index);
1851 1852
	      err = 1;
	    }
1853 1854 1855
	  if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
	    {
	      error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1856
		     e->src->index, e->dest->index, e->probability);
1857 1858 1859 1860 1861
	      err = 1;
	    }
	  if (e->count < 0)
	    {
	      error ("verify_flow_info: Wrong count of edge %i->%i %i",
1862
		     e->src->index, e->dest->index, (int)e->count);
1863 1864
	      err = 1;
	    }
1865

1866
	  last_visited [e->dest->index + 2] = bb;
1867 1868

	  if (e->flags & EDGE_FALLTHRU)
1869 1870
	    n_fallthru++;

1871
	  if ((e->flags & ~(EDGE_DFS_BACK | EDGE_CAN_FALLTHRU | EDGE_IRREDUCIBLE_LOOP)) == 0)
1872 1873 1874 1875 1876 1877 1878 1879 1880
	    n_branch++;

	  if (e->flags & EDGE_ABNORMAL_CALL)
	    n_call++;

	  if (e->flags & EDGE_EH)
	    n_eh++;
	  else if (e->flags & EDGE_ABNORMAL)
	    n_abnormal++;
1881 1882 1883 1884 1885 1886

	  if ((e->flags & EDGE_FALLTHRU)
	      && e->src != ENTRY_BLOCK_PTR
	      && e->dest != EXIT_BLOCK_PTR)
	    {
	      rtx insn;
1887

1888
	      if (e->src->next_bb != e->dest)
1889
		{
1890 1891
		  error
		    ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1892
		     e->src->index, e->dest->index);
1893
		  err = 1;
1894 1895 1896 1897
		}
	      else
		for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
		     insn = NEXT_INSN (insn))
1898 1899
		  if (GET_CODE (insn) == BARRIER
#ifndef CASE_DROPS_THROUGH
1900
		      || INSN_P (insn)
1901
#else
1902
		      || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1903
#endif
1904
		      )
1905 1906
		    {
		      error ("verify_flow_info: Incorrect fallthru %i->%i",
1907
			     e->src->index, e->dest->index);
1908
		      fatal_insn ("wrong insn in the fallthru edge", insn);
1909 1910 1911
		      err = 1;
		    }
	    }
1912

1913 1914 1915
	  if (e->src != bb)
	    {
	      error ("verify_flow_info: Basic block %d succ edge is corrupted",
1916
		     bb->index);
1917 1918 1919 1920 1921 1922 1923
	      fprintf (stderr, "Predecessor: ");
	      dump_edge_info (stderr, e, 0);
	      fprintf (stderr, "\nSuccessor: ");
	      dump_edge_info (stderr, e, 1);
	      fprintf (stderr, "\n");
	      err = 1;
	    }
1924

1925
	  edge_checksum[e->dest->index + 2] += (size_t) e;
1926
	}
1927

1928 1929
      if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
	  && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1930
	{
1931
	  error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1932 1933 1934 1935 1936 1937 1938
	  err = 1;
	}
      if (n_branch
	  && (GET_CODE (bb->end) != JUMP_INSN
	      || (n_branch > 1 && (any_uncondjump_p (bb->end)
				   || any_condjump_p (bb->end)))))
	{
1939
	  error ("Too many outgoing branch edges from bb %i", bb->index);
1940 1941 1942 1943
	  err = 1;
	}
      if (n_fallthru && any_uncondjump_p (bb->end))
	{
1944
	  error ("Fallthru edge after unconditional jump %i", bb->index);
1945 1946 1947 1948
	  err = 1;
	}
      if (n_branch != 1 && any_uncondjump_p (bb->end))
	{
1949
	  error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1950 1951 1952
	  err = 1;
	}
      if (n_branch != 1 && any_condjump_p (bb->end)
1953
	  && JUMP_LABEL (bb->end) != bb->next_bb->head)
1954
	{
1955
	  error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1956 1957 1958 1959
	  err = 1;
	}
      if (n_call && GET_CODE (bb->end) != CALL_INSN)
	{
1960
	  error ("Call edges for non-call insn in bb %i", bb->index);
1961 1962 1963 1964 1965 1966 1967 1968
	  err = 1;
	}
      if (n_abnormal
	  && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
	  && (GET_CODE (bb->end) != JUMP_INSN
	      || any_condjump_p (bb->end)
	      || any_uncondjump_p (bb->end)))
	{
1969
	  error ("Abnormal edges for no purpose in bb %i", bb->index);
1970 1971
	  err = 1;
	}
1972

1973
      if (!n_fallthru)
1974
	{
1975
	  rtx insn;
1976 1977

	  /* Ensure existence of barrier in BB with no fallthru edges.  */
1978
	  for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1979 1980 1981 1982 1983
	       insn = NEXT_INSN (insn))
	    if (!insn
		|| (GET_CODE (insn) == NOTE
		    && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
		{
1984
		  error ("missing barrier after block %i", bb->index);
1985
		  err = 1;
1986
		  break;
1987 1988 1989
		}
	}

1990
      for (e = bb->pred; e; e = e->pred_next)
1991 1992 1993
	{
	  if (e->dest != bb)
	    {
1994
	      error ("basic block %d pred edge is corrupted", bb->index);
1995 1996 1997 1998 1999 2000 2001
	      fputs ("Predecessor: ", stderr);
	      dump_edge_info (stderr, e, 0);
	      fputs ("\nSuccessor: ", stderr);
	      dump_edge_info (stderr, e, 1);
	      fputc ('\n', stderr);
	      err = 1;
	    }
2002
	  edge_checksum[e->dest->index + 2] -= (size_t) e;
2003
	}
2004 2005

      for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
2006
	if (BLOCK_FOR_INSN (x) != bb)
2007 2008 2009 2010 2011
	  {
	    debug_rtx (x);
	    if (! BLOCK_FOR_INSN (x))
	      error
		("insn %d inside basic block %d but block_for_insn is NULL",
2012
		 INSN_UID (x), bb->index);
2013 2014 2015
	    else
	      error
		("insn %d inside basic block %d but block_for_insn is %i",
2016
		 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2017 2018 2019

	    err = 1;
	  }
2020 2021 2022 2023 2024 2025 2026 2027 2028 2029

      /* OK pointers are correct.  Now check the header of basic
         block.  It ought to contain optional CODE_LABEL followed
	 by NOTE_BASIC_BLOCK.  */
      x = bb->head;
      if (GET_CODE (x) == CODE_LABEL)
	{
	  if (bb->end == x)
	    {
	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2030
		     bb->index);
2031 2032
	      err = 1;
	    }
2033

2034 2035
	  x = NEXT_INSN (x);
	}
2036

2037 2038 2039
      if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
	{
	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2040
		 bb->index);
2041 2042 2043 2044
	  err = 1;
	}

      if (bb->end == x)
2045 2046
	/* Do checks for empty blocks her. e */
	;
2047
      else
2048 2049 2050 2051 2052
	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
	  {
	    if (NOTE_INSN_BASIC_BLOCK_P (x))
	      {
		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2053
		       INSN_UID (x), bb->index);
2054 2055 2056 2057 2058
		err = 1;
	      }

	    if (x == bb->end)
	      break;
2059

2060
	    if (control_flow_insn_p (x))
2061
	      {
2062
		error ("in basic block %d:", bb->index);
2063 2064 2065
		fatal_insn ("flow control insn inside a basic block", x);
	      }
	  }
2066 2067 2068 2069 2070
    }

  /* Complete edge checksumming for ENTRY and EXIT.  */
  {
    edge e;
2071

2072
    for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2073
      edge_checksum[e->dest->index + 2] += (size_t) e;
2074

2075
    for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2076
      edge_checksum[e->dest->index + 2] -= (size_t) e;
2077 2078
  }

2079 2080
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
    if (edge_checksum[bb->index + 2])
2081
      {
2082
	error ("basic block %i edge lists are corrupted", bb->index);
2083 2084 2085 2086
	err = 1;
      }

  num_bb_notes = 0;
2087 2088
  last_bb_seen = ENTRY_BLOCK_PTR;

2089
  for (x = rtx_first; x; x = NEXT_INSN (x))
2090 2091 2092
    {
      if (NOTE_INSN_BASIC_BLOCK_P (x))
	{
2093
	  bb = NOTE_BASIC_BLOCK (x);
2094

2095
	  num_bb_notes++;
2096
	  if (bb != last_bb_seen->next_bb)
2097
	    internal_error ("basic blocks not numbered consecutively");
2098

2099
	  last_bb_seen = bb;
2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115
	}

      if (!bb_info[INSN_UID (x)])
	{
	  switch (GET_CODE (x))
	    {
	    case BARRIER:
	    case NOTE:
	      break;

	    case CODE_LABEL:
	      /* An addr_vec is placed outside any block block.  */
	      if (NEXT_INSN (x)
		  && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
		  && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
		      || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2116
		x = NEXT_INSN (x);
2117 2118 2119 2120 2121

	      /* But in any case, non-deletable labels can appear anywhere.  */
	      break;

	    default:
2122
	      fatal_insn ("insn outside basic block", x);
2123 2124 2125 2126 2127 2128 2129
	    }
	}

      if (INSN_P (x)
	  && GET_CODE (x) == JUMP_INSN
	  && returnjump_p (x) && ! condjump_p (x)
	  && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2130
	    fatal_insn ("return not followed by barrier", x);
2131 2132
    }

2133
  if (num_bb_notes != n_basic_blocks)
2134
    internal_error
2135 2136
      ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
       num_bb_notes, n_basic_blocks);
2137 2138

  if (err)
2139
    internal_error ("verify_flow_info failed");
2140 2141 2142 2143 2144 2145 2146

  /* Clean up.  */
  free (bb_info);
  free (last_visited);
  free (edge_checksum);
}

2147
/* Assume that the preceding pass has possibly eliminated jump instructions
2148 2149 2150 2151 2152 2153 2154 2155
   or converted the unconditional jumps.  Eliminate the edges from CFG.
   Return true if any edges are eliminated.  */

bool
purge_dead_edges (bb)
     basic_block bb;
{
  edge e, next;
2156
  rtx insn = bb->end, note;
2157 2158
  bool purged = false;

2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170
  /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
  if (GET_CODE (insn) == INSN
      && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
    {
      rtx eqnote;

      if (! may_trap_p (PATTERN (insn))
	  || ((eqnote = find_reg_equal_equiv_note (insn))
	      && ! may_trap_p (XEXP (eqnote, 0))))
	remove_note (insn, note);
    }

2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193
  /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
  for (e = bb->succ; e; e = next)
    {
      next = e->succ_next;
      if (e->flags & EDGE_EH)
	{
	  if (can_throw_internal (bb->end))
	    continue;
	}
      else if (e->flags & EDGE_ABNORMAL_CALL)
	{
	  if (GET_CODE (bb->end) == CALL_INSN
	      && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
		  || INTVAL (XEXP (note, 0)) >= 0))
	    continue;
	}
      else
	continue;

      remove_edge (e);
      bb->flags |= BB_DIRTY;
      purged = true;
    }
2194

2195 2196 2197 2198
  if (GET_CODE (insn) == JUMP_INSN)
    {
      rtx note;
      edge b,f;
2199

2200 2201 2202 2203
      /* We do care only about conditional jumps and simplejumps.  */
      if (!any_condjump_p (insn)
	  && !returnjump_p (insn)
	  && !simplejump_p (insn))
2204
	return purged;
2205

2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216
      /* Branch probability/prediction notes are defined only for
	 condjumps.  We've possibly turned condjump into simplejump.  */
      if (simplejump_p (insn))
	{
	  note = find_reg_note (insn, REG_BR_PROB, NULL);
	  if (note)
	    remove_note (insn, note);
	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
	    remove_note (insn, note);
	}

2217 2218 2219 2220
      for (e = bb->succ; e; e = next)
	{
	  next = e->succ_next;

2221 2222
	  /* Avoid abnormal flags to leak from computed jumps turned
	     into simplejumps.  */
2223

2224
	  e->flags &= ~EDGE_ABNORMAL;
2225

2226 2227 2228 2229
	  /* See if this edge is one we should keep.  */
	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
	    /* A conditional jump can fall through into the next
	       block, so we should keep the edge.  */
2230
	    continue;
2231 2232
	  else if (e->dest != EXIT_BLOCK_PTR
		   && e->dest->head == JUMP_LABEL (insn))
2233 2234 2235 2236 2237 2238
	    /* If the destination block is the target of the jump,
	       keep the edge.  */
	    continue;
	  else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
	    /* If the destination block is the exit block, and this
	       instruction is a return, then keep the edge.  */
2239
	    continue;
2240 2241 2242
	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
	    /* Keep the edges that correspond to exceptions thrown by
	       this instruction.  */
2243
	    continue;
2244

2245
	  /* We do not need this edge.  */
2246
	  bb->flags |= BB_DIRTY;
2247 2248 2249
	  purged = true;
	  remove_edge (e);
	}
2250

2251
      if (!bb->succ || !purged)
2252
	return purged;
2253

2254
      if (rtl_dump_file)
2255
	fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2256

2257 2258 2259 2260 2261 2262 2263 2264
      if (!optimize)
	return purged;

      /* Redistribute probabilities.  */
      if (!bb->succ->succ_next)
	{
	  bb->succ->probability = REG_BR_PROB_BASE;
	  bb->succ->count = bb->count;
2265
	}
2266 2267 2268 2269 2270
      else
	{
	  note = find_reg_note (insn, REG_BR_PROB, NULL);
	  if (!note)
	    return purged;
2271

2272 2273 2274 2275 2276 2277 2278
	  b = BRANCH_EDGE (bb);
	  f = FALLTHRU_EDGE (bb);
	  b->probability = INTVAL (XEXP (note, 0));
	  f->probability = REG_BR_PROB_BASE - b->probability;
	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
	}
2279

2280 2281 2282 2283 2284 2285 2286 2287 2288
      return purged;
    }

  /* If we don't see a jump insn, we don't know exactly why the block would
     have been broken at this point.  Look for a simple, non-fallthru edge,
     as these are only created by conditional branches.  If we find such an
     edge we know that there used to be a jump here and can then safely
     remove all non-fallthru edges.  */
  for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2289 2290 2291
       e = e->succ_next)
    ;

2292 2293
  if (!e)
    return purged;
2294

2295 2296 2297 2298
  for (e = bb->succ; e; e = next)
    {
      next = e->succ_next;
      if (!(e->flags & EDGE_FALLTHRU))
2299 2300 2301 2302 2303
	{
	  bb->flags |= BB_DIRTY;
	  remove_edge (e);
	  purged = true;
	}
2304
    }
2305

2306 2307
  if (!bb->succ || bb->succ->succ_next)
    abort ();
2308

2309 2310 2311 2312 2313
  bb->succ->probability = REG_BR_PROB_BASE;
  bb->succ->count = bb->count;

  if (rtl_dump_file)
    fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2314
	     bb->index);
2315 2316 2317
  return purged;
}

2318 2319
/* Search all basic blocks for potentially dead edges and purge them.  Return
   true if some edge has been eliminated.  */
2320 2321

bool
2322
purge_all_dead_edges (update_life_p)
2323
     int update_life_p;
2324
{
2325
  int purged = false;
2326
  sbitmap blocks = 0;
2327
  basic_block bb;
2328 2329 2330

  if (update_life_p)
    {
2331
      blocks = sbitmap_alloc (last_basic_block);
2332 2333
      sbitmap_zero (blocks);
    }
2334

2335
  FOR_EACH_BB (bb)
2336
    {
2337
      bool purged_here = purge_dead_edges (bb);
2338

2339 2340
      purged |= purged_here;
      if (purged_here && update_life_p)
2341
	SET_BIT (blocks, bb->index);
2342
    }
2343

2344 2345 2346 2347
  if (update_life_p && purged)
    update_life_info (blocks, UPDATE_LIFE_GLOBAL,
		      PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
		      | PROP_KILL_DEAD_CODE);
2348

2349 2350
  if (update_life_p)
    sbitmap_free (blocks);
2351 2352
  return purged;
}