cfgrtl.c 140 KB
Newer Older
1
/* Control flow graph manipulation code for GNU compiler.
2
   Copyright (C) 1987-2016 Free Software Foundation, Inc.
3 4 5 6 7

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9 10 11 12 13 14 15 16
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
19

20 21
/* This file contains low level functions to manipulate the CFG and analyze it
   that are aware of the RTL intermediate language.
22 23

   Available functionality:
24
     - Basic CFG/RTL manipulation API documented in cfghooks.h
25
     - CFG-aware instruction chain manipulation
26
	 delete_insn, delete_insn_chain
27 28 29 30
     - Edge splitting and committing to edges
	 insert_insn_on_edge, commit_edge_insertions
     - CFG updating after insn simplification
	 purge_dead_edges, purge_all_dead_edges
31 32
     - CFG fixing after coarse manipulation
	fixup_abnormal_edges
33 34

   Functions not supposed for generic use:
35
     - Infrastructure to determine quickly basic block for insn
36
	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37
     - Edge redirection with updating and optimizing of insn chain
38
	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 40 41

#include "config.h"
#include "system.h"
42
#include "coretypes.h"
43
#include "backend.h"
44
#include "target.h"
45
#include "rtl.h"
46 47
#include "tree.h"
#include "cfghooks.h"
48
#include "df.h"
49 50
#include "insn-config.h"
#include "emit-rtl.h"
51 52 53 54 55
#include "cfgrtl.h"
#include "cfganal.h"
#include "cfgbuild.h"
#include "cfgcleanup.h"
#include "bb-reorder.h"
56
#include "rtl-error.h"
57
#include "insn-attr.h"
58
#include "dojump.h"
59
#include "expr.h"
60
#include "cfgloop.h"
61
#include "tree-pass.h"
62
#include "print-rtl.h"
63

Steven Bosscher committed
64 65
/* Holds the interesting leading and trailing notes for the function.
   Only applicable if the CFG is in cfglayout mode.  */
66
static GTY(()) rtx_insn *cfg_layout_function_footer;
67
static GTY(()) rtx_insn *cfg_layout_function_header;
Steven Bosscher committed
68

69
static rtx_insn *skip_insns_after_block (basic_block);
Steven Bosscher committed
70 71 72 73 74
static void record_effective_endpoints (void);
static void fixup_reorder_chain (void);

void verify_insn_chain (void);
static void fixup_fallthru_exit_predecessor (void);
75 76
static int can_delete_note_p (const rtx_note *);
static int can_delete_label_p (const rtx_code_label *);
77
static basic_block rtl_split_edge (edge);
78
static bool rtl_move_block_after (basic_block, basic_block);
79
static int rtl_verify_flow_info (void);
80
static basic_block cfg_layout_split_block (basic_block, void *);
81
static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
82 83 84 85
static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
static void cfg_layout_delete_block (basic_block);
static void rtl_delete_block (basic_block);
static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
86
static edge rtl_redirect_edge_and_branch (edge, basic_block);
87
static basic_block rtl_split_block (basic_block, void *);
88
static void rtl_dump_bb (FILE *, basic_block, int, int);
89
static int rtl_verify_flow_info_1 (void);
90
static void rtl_make_forwarder_block (edge);
91 92

/* Return true if NOTE is not one of the ones that must be kept paired,
93
   so that we may simply delete it.  */
94 95

static int
96
can_delete_note_p (const rtx_note *note)
97
{
98 99 100 101 102 103 104 105 106 107
  switch (NOTE_KIND (note))
    {
    case NOTE_INSN_DELETED:
    case NOTE_INSN_BASIC_BLOCK:
    case NOTE_INSN_EPILOGUE_BEG:
      return true;

    default:
      return false;
    }
108 109 110 111 112
}

/* True if a given label can be deleted.  */

static int
113
can_delete_label_p (const rtx_code_label *label)
114
{
115 116 117
  return (!LABEL_PRESERVE_P (label)
	  /* User declared labels must be preserved.  */
	  && LABEL_NAME (label) == 0
118
	  && !in_insn_list_p (forced_labels, label));
119 120
}

121
/* Delete INSN by patching it out.  */
122

123
void
124
delete_insn (rtx uncast_insn)
125
{
126
  rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
127 128 129
  rtx note;
  bool really_delete = true;

130
  if (LABEL_P (insn))
131 132
    {
      /* Some labels can't be directly removed from the INSN chain, as they
Mike Stump committed
133 134
	 might be references via variables, constant pool etc.
	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
135
      if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
136 137
	{
	  const char *name = LABEL_NAME (insn);
138
	  basic_block bb = BLOCK_FOR_INSN (insn);
139
	  rtx_insn *bb_note = NEXT_INSN (insn);
140 141 142

	  really_delete = false;
	  PUT_CODE (insn, NOTE);
143
	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
144
	  NOTE_DELETED_LABEL_NAME (insn) = name;
145

146
	  /* If the note following the label starts a basic block, and the
147
	     label is a member of the same basic block, interchange the two.  */
148 149
	  if (bb_note != NULL_RTX
	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
150 151
	      && bb != NULL
	      && bb == BLOCK_FOR_INSN (bb_note))
152 153
	    {
	      reorder_insns_nobb (insn, insn, bb_note);
154
	      BB_HEAD (bb) = bb_note;
155
	      if (BB_END (bb) == bb_note)
156
		BB_END (bb) = insn;
157
	    }
158
	}
159

160
      remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
161 162 163 164
    }

  if (really_delete)
    {
165
      /* If this insn has already been deleted, something is very wrong.  */
166
      gcc_assert (!insn->deleted ());
167 168
      if (INSN_P (insn))
	df_insn_delete (insn);
169
      remove_insn (insn);
170
      insn->set_deleted ();
171 172 173 174
    }

  /* If deleting a jump, decrement the use count of the label.  Deleting
     the label itself should happen in the normal course of block merging.  */
175
  if (JUMP_P (insn))
176
    {
177 178 179 180 181 182 183
      if (JUMP_LABEL (insn)
	  && LABEL_P (JUMP_LABEL (insn)))
	LABEL_NUSES (JUMP_LABEL (insn))--;

      /* If there are more targets, remove them too.  */
      while ((note
	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
184
	     && LABEL_P (XEXP (note, 0)))
185 186 187 188 189
	{
	  LABEL_NUSES (XEXP (note, 0))--;
	  remove_note (insn, note);
	}
    }
190

191 192 193 194 195 196 197 198
  /* Also if deleting any insn that references a label as an operand.  */
  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
	 && LABEL_P (XEXP (note, 0)))
    {
      LABEL_NUSES (XEXP (note, 0))--;
      remove_note (insn, note);
    }

199
  if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
200
    {
201 202
      rtvec vec = table->get_labels ();
      int len = GET_NUM_ELEM (vec);
203 204 205
      int i;

      for (i = 0; i < len; i++)
206
	{
207
	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
208 209 210 211

	  /* When deleting code in bulk (e.g. removing many unreachable
	     blocks) we can delete a label that's a target of the vector
	     before deleting the vector itself.  */
212
	  if (!NOTE_P (label))
213 214
	    LABEL_NUSES (label)--;
	}
215 216 217
    }
}

218
/* Like delete_insn but also purge dead edges from BB.  */
219

220
void
221
delete_insn_and_edges (rtx_insn *insn)
222 223 224
{
  bool purge = false;

225
  if (INSN_P (insn)
226
      && BLOCK_FOR_INSN (insn)
227
      && BB_END (BLOCK_FOR_INSN (insn)) == insn)
228
    purge = true;
229
  delete_insn (insn);
230 231 232 233
  if (purge)
    purge_dead_edges (BLOCK_FOR_INSN (insn));
}

234
/* Unlink a chain of insns between START and FINISH, leaving notes
235 236
   that must be paired.  If CLEAR_BB is true, we set bb field for
   insns that cannot be removed to NULL.  */
237 238

void
239
delete_insn_chain (rtx start, rtx finish, bool clear_bb)
240
{
241
  rtx_insn *prev, *current;
242

243 244 245
  /* Unchain the insns one by one.  It would be quicker to delete all of these
     with a single unchaining, rather than one at a time, but we need to keep
     the NOTE's.  */
246
  current = safe_as_a <rtx_insn *> (finish);
247 248
  while (1)
    {
249
      prev = PREV_INSN (current);
250
      if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
251 252
	;
      else
253
	delete_insn (current);
254

255
      if (clear_bb && !current->deleted ())
256
	set_block_for_insn (current, NULL);
257

258
      if (current == start)
259
	break;
260
      current = prev;
261 262 263
    }
}

264 265 266 267 268
/* Create a new basic block consisting of the instructions between HEAD and END
   inclusive.  This function is designed to allow fast BB construction - reuses
   the note and basic block struct in BB_NOTE, if any and do not grow
   BASIC_BLOCK chain and should be used directly only by CFG construction code.
   END can be NULL in to create new empty basic block before HEAD.  Both END
269 270
   and HEAD can be NULL to create basic block at the end of INSN chain.
   AFTER is the basic block we should be put after.  */
271 272

basic_block
273
create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
274
			      basic_block after)
275 276 277 278 279 280 281 282 283
{
  basic_block bb;

  if (bb_note
      && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
      && bb->aux == NULL)
    {
      /* If we found an existing note, thread it back onto the chain.  */

284
      rtx_insn *after;
285

286
      if (LABEL_P (head))
287 288 289 290 291 292 293 294
	after = head;
      else
	{
	  after = PREV_INSN (head);
	  head = bb_note;
	}

      if (after != bb_note && NEXT_INSN (after) != bb_note)
295
	reorder_insns_nobb (bb_note, bb_note, after);
296 297 298 299 300 301 302
    }
  else
    {
      /* Otherwise we must create a note and a basic block structure.  */

      bb = alloc_block ();

303
      init_rtl_bb_info (bb);
304
      if (!head && !end)
305 306
	head = end = bb_note
	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
307
      else if (LABEL_P (head) && end)
308 309 310 311 312 313 314 315 316 317 318 319
	{
	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
	  if (head == end)
	    end = bb_note;
	}
      else
	{
	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
	  head = bb_note;
	  if (!end)
	    end = head;
	}
320

321 322 323 324 325 326 327
      NOTE_BASIC_BLOCK (bb_note) = bb;
    }

  /* Always include the bb note in the block.  */
  if (NEXT_INSN (end) == bb_note)
    end = bb_note;

328 329
  BB_HEAD (bb) = head;
  BB_END (bb) = end;
330
  bb->index = last_basic_block_for_fn (cfun)++;
331
  bb->flags = BB_NEW | BB_RTL;
332
  link_block (bb, after);
333
  SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
334
  df_bb_refs_record (bb->index, false);
335
  update_bb_for_insn (bb);
336
  BB_SET_PARTITION (bb, BB_UNPARTITIONED);
337 338 339 340 341 342 343 344

  /* Tag the block so that we know it has been used when considering
     other basic block notes.  */
  bb->aux = bb;

  return bb;
}

345
/* Create new basic block consisting of instructions in between HEAD and END
346 347 348
   and place it to the BB chain after block AFTER.  END can be NULL to
   create a new empty basic block before HEAD.  Both END and HEAD can be
   NULL to create basic block at the end of INSN chain.  */
349

350 351
static basic_block
rtl_create_basic_block (void *headp, void *endp, basic_block after)
352
{
353 354
  rtx_insn *head = (rtx_insn *) headp;
  rtx_insn *end = (rtx_insn *) endp;
355
  basic_block bb;
356

357
  /* Grow the basic block array if needed.  */
358 359
  if ((size_t) last_basic_block_for_fn (cfun)
      >= basic_block_info_for_fn (cfun)->length ())
360
    {
361 362 363
      size_t new_size =
	(last_basic_block_for_fn (cfun)
	 + (last_basic_block_for_fn (cfun) + 3) / 4);
364
      vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
365
    }
366

367
  n_basic_blocks_for_fn (cfun)++;
368

369
  bb = create_basic_block_structure (head, end, NULL, after);
370 371 372
  bb->aux = NULL;
  return bb;
}
373 374 375 376 377 378 379 380

static basic_block
cfg_layout_create_basic_block (void *head, void *end, basic_block after)
{
  basic_block newbb = rtl_create_basic_block (head, end, after);

  return newbb;
}
381 382 383 384 385 386 387 388 389

/* Delete the insns in a (non-live) block.  We physically delete every
   non-deleted-note insn, and update the flow graph appropriately.

   Return nonzero if we deleted an exception handler.  */

/* ??? Preserving all such notes strikes me as wrong.  It would be nice
   to post-process the stream to remove empty blocks, loops, ranges, etc.  */

390
static void
391
rtl_delete_block (basic_block b)
392
{
393
  rtx_insn *insn, *end;
394 395

  /* If the head of this block is a CODE_LABEL, then it might be the
396 397
     label for an exception handler which can't be reached.  We need
     to remove the label from the exception_handler_label list.  */
398
  insn = BB_HEAD (b);
399

400
  end = get_last_bb_insn (b);
401 402

  /* Selectively delete the entire chain.  */
403
  BB_HEAD (b) = NULL;
404 405
  delete_insn_chain (insn, end, true);

406 407 408 409

  if (dump_file)
    fprintf (dump_file, "deleting block %d\n", b->index);
  df_bb_delete (b->index);
410 411
}

412
/* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
413 414

void
415
compute_bb_for_insn (void)
416
{
417
  basic_block bb;
418

419
  FOR_EACH_BB_FN (bb, cfun)
420
    {
421 422
      rtx_insn *end = BB_END (bb);
      rtx_insn *insn;
423

424
      for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
425
	{
426
	  BLOCK_FOR_INSN (insn) = bb;
427 428 429 430 431 432 433 434
	  if (insn == end)
	    break;
	}
    }
}

/* Release the basic_block_for_insn array.  */

435
unsigned int
436
free_bb_for_insn (void)
437
{
438
  rtx_insn *insn;
439
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
440
    if (!BARRIER_P (insn))
441
      BLOCK_FOR_INSN (insn) = NULL;
442
  return 0;
443 444
}

445 446 447 448 449 450 451 452 453 454 455 456 457
namespace {

const pass_data pass_data_free_cfg =
{
  RTL_PASS, /* type */
  "*free_cfg", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  PROP_cfg, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
458 459
};

460 461 462
class pass_free_cfg : public rtl_opt_pass
{
public:
463 464
  pass_free_cfg (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_free_cfg, ctxt)
465 466 467
  {}

  /* opt_pass methods: */
468
  virtual unsigned int execute (function *);
469 470 471

}; // class pass_free_cfg

472 473 474 475 476
unsigned int
pass_free_cfg::execute (function *)
{
  /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
     valid at that point so it would be too late to call df_analyze.  */
477
  if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
478 479 480 481 482 483 484 485 486 487 488 489
    {
      df_note_add_problem ();
      df_analyze ();
    }

  if (crtl->has_bb_partition)
    insert_section_boundary_note ();

  free_bb_for_insn ();
  return 0;
}

490 491 492 493 494 495 496 497
} // anon namespace

rtl_opt_pass *
make_pass_free_cfg (gcc::context *ctxt)
{
  return new pass_free_cfg (ctxt);
}

498
/* Return RTX to emit after when we want to emit code on the entry of function.  */
499
rtx_insn *
500 501
entry_of_function (void)
{
502
  return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
503
	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
504 505
}

506 507 508 509 510
/* Emit INSN at the entry point of the function, ensuring that it is only
   executed once per function.  */
void
emit_insn_at_entry (rtx insn)
{
511
  edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
512
  edge e = ei_safe_edge (ei);
513
  gcc_assert (e->flags & EDGE_FALLTHRU);
514 515 516 517 518

  insert_insn_on_edge (insn, e);
  commit_edge_insertions ();
}

519
/* Update BLOCK_FOR_INSN of insns between BEGIN and END
H.J. Lu committed
520
   (or BARRIER if found) and notify df of the bb change.
521 522
   The insn chain range is inclusive
   (i.e. both BEGIN and END will be updated. */
523

524
static void
525
update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
526
{
527
  rtx_insn *insn;
528

529 530
  end = NEXT_INSN (end);
  for (insn = begin; insn != end; insn = NEXT_INSN (insn))
531 532
    if (!BARRIER_P (insn))
      df_insn_change_bb (insn, bb);
533
}
534 535 536 537 538 539 540 541 542 543

/* Update BLOCK_FOR_INSN of insns in BB to BB,
   and notify df of the change.  */

void
update_bb_for_insn (basic_block bb)
{
  update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
}

544

545 546 547 548
/* Like active_insn_p, except keep the return value clobber around
   even after reload.  */

static bool
549
flow_active_insn_p (const rtx_insn *insn)
550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570
{
  if (active_insn_p (insn))
    return true;

  /* A clobber of the function return value exists for buggy
     programs that fail to return a value.  Its effect is to
     keep the return value from being live across the entire
     function.  If we allow it to be skipped, we introduce the
     possibility for register lifetime confusion.  */
  if (GET_CODE (PATTERN (insn)) == CLOBBER
      && REG_P (XEXP (PATTERN (insn), 0))
      && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
    return true;

  return false;
}

/* Return true if the block has no effect and only forwards control flow to
   its single destination.  */

bool
571
contains_no_active_insn_p (const_basic_block bb)
572
{
573
  rtx_insn *insn;
574

575
  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
576 577 578
      || !single_succ_p (bb))
    return false;

579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596
  for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
    if (INSN_P (insn) && flow_active_insn_p (insn))
      return false;

  return (!INSN_P (insn)
	  || (JUMP_P (insn) && simplejump_p (insn))
	  || !flow_active_insn_p (insn));
}

/* Likewise, but protect loop latches, headers and preheaders.  */
/* FIXME: Make this a cfg hook.  */

bool
forwarder_block_p (const_basic_block bb)
{
  if (!contains_no_active_insn_p (bb))
    return false;

597 598 599 600 601 602 603 604 605 606 607
  /* Protect loop latches, headers and preheaders.  */
  if (current_loops)
    {
      basic_block dest;
      if (bb->loop_father->header == bb)
	return false;
      dest = EDGE_SUCC (bb, 0)->dest;
      if (dest->loop_father->header == dest)
	return false;
    }

608
  return true;
609 610 611
}

/* Return nonzero if we can reach target from src by falling through.  */
612
/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
613 614 615 616

bool
can_fallthru (basic_block src, basic_block target)
{
617 618
  rtx_insn *insn = BB_END (src);
  rtx_insn *insn2;
619 620 621
  edge e;
  edge_iterator ei;

622
  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
623 624
    return true;
  if (src->next_bb != target)
625 626 627 628 629 630
    return false;

  /* ??? Later we may add code to move jump tables offline.  */
  if (tablejump_p (insn, NULL, NULL))
    return false;

631
  FOR_EACH_EDGE (e, ei, src->succs)
632
    if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
633
	&& e->flags & EDGE_FALLTHRU)
634
      return false;
635 636

  insn2 = BB_HEAD (target);
637
  if (!active_insn_p (insn2))
638 639 640 641 642 643 644 645 646 647 648 649 650 651
    insn2 = next_active_insn (insn2);

  return next_active_insn (insn) == insn2;
}

/* Return nonzero if we could reach target from src by falling through,
   if the target was made adjacent.  If we already have a fall-through
   edge to the exit block, we can't do that.  */
static bool
could_fall_through (basic_block src, basic_block target)
{
  edge e;
  edge_iterator ei;

652
  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
653 654
    return true;
  FOR_EACH_EDGE (e, ei, src->succs)
655
    if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
656 657 658 659 660
	&& e->flags & EDGE_FALLTHRU)
      return 0;
  return true;
}

661
/* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
662
rtx_note *
663 664
bb_note (basic_block bb)
{
665
  rtx_insn *note;
666 667 668 669 670 671

  note = BB_HEAD (bb);
  if (LABEL_P (note))
    note = NEXT_INSN (note);

  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
672
  return as_a <rtx_note *> (note);
673 674
}

675 676 677
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
   note associated with the BLOCK.  */

678
static rtx_insn *
679 680
first_insn_after_basic_block_note (basic_block block)
{
681
  rtx_insn *insn;
682 683 684 685 686

  /* Get the first instruction in the block.  */
  insn = BB_HEAD (block);

  if (insn == NULL_RTX)
687
    return NULL;
688 689 690 691 692 693 694
  if (LABEL_P (insn))
    insn = NEXT_INSN (insn);
  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));

  return NEXT_INSN (insn);
}

695 696
/* Creates a new basic block just after basic block BB by splitting
   everything after specified instruction INSNP.  */
697

698
static basic_block
699
rtl_split_block (basic_block bb, void *insnp)
700 701
{
  basic_block new_bb;
702
  rtx_insn *insn = (rtx_insn *) insnp;
703
  edge e;
704
  edge_iterator ei;
705

706 707 708 709 710
  if (!insn)
    {
      insn = first_insn_after_basic_block_note (bb);

      if (insn)
711
	{
712
	  rtx_insn *next = insn;
713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730

	  insn = PREV_INSN (insn);

	  /* If the block contains only debug insns, insn would have
	     been NULL in a non-debug compilation, and then we'd end
	     up emitting a DELETED note.  For -fcompare-debug
	     stability, emit the note too.  */
	  if (insn != BB_END (bb)
	      && DEBUG_INSN_P (next)
	      && DEBUG_INSN_P (BB_END (bb)))
	    {
	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
		next = NEXT_INSN (next);

	      if (next == BB_END (bb))
		emit_note_after (NOTE_INSN_DELETED, next);
	    }
	}
731 732 733 734 735 736 737 738 739
      else
	insn = get_last_insn ();
    }

  /* We probably should check type of the insn so that we do not create
     inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
     bother.  */
  if (insn == BB_END (bb))
    emit_note_after (NOTE_INSN_DELETED, insn);
740 741

  /* Create the new basic block.  */
742
  new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
743
  BB_COPY_PARTITION (new_bb, bb);
744
  BB_END (bb) = insn;
745 746

  /* Redirect the outgoing edges.  */
747 748 749
  new_bb->succs = bb->succs;
  bb->succs = NULL;
  FOR_EACH_EDGE (e, ei, new_bb->succs)
750 751
    e->src = new_bb;

752 753
  /* The new block starts off being dirty.  */
  df_set_bb_dirty (bb);
754
  return new_bb;
755 756
}

757 758 759 760 761 762
/* Return true if the single edge between blocks A and B is the only place
   in RTL which holds some unique locus.  */

static bool
unique_locus_on_edge_between_p (basic_block a, basic_block b)
{
763
  const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
764
  rtx_insn *insn, *end;
765

766
  if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
767 768 769 770 771
    return false;

  /* First scan block A backward.  */
  insn = BB_END (a);
  end = PREV_INSN (BB_HEAD (a));
772
  while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
773 774
    insn = PREV_INSN (insn);

775
  if (insn != end && INSN_LOCATION (insn) == goto_locus)
776 777 778 779 780 781 782 783 784 785
    return false;

  /* Then scan block B forward.  */
  insn = BB_HEAD (b);
  if (insn)
    {
      end = NEXT_INSN (BB_END (b));
      while (insn != end && !NONDEBUG_INSN_P (insn))
	insn = NEXT_INSN (insn);

786 787
      if (insn != end && INSN_HAS_LOCATION (insn)
	  && INSN_LOCATION (insn) == goto_locus)
788 789 790 791 792 793 794 795 796 797 798 799 800 801 802
	return false;
    }

  return true;
}

/* If the single edge between blocks A and B is the only place in RTL which
   holds some unique locus, emit a nop with that locus between the blocks.  */

static void
emit_nop_for_unique_locus_between (basic_block a, basic_block b)
{
  if (!unique_locus_on_edge_between_p (a, b))
    return;

803
  BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
804
  INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
805 806
}

807
/* Blocks A and B are to be merged into a single block A.  The insns
808
   are already contiguous.  */
809

810 811
static void
rtl_merge_blocks (basic_block a, basic_block b)
812
{
813 814 815
  rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
  rtx_insn *del_first = NULL, *del_last = NULL;
  rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
816
  bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
817 818
  int b_empty = 0;

819
  if (dump_file)
820 821
    fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
	     a->index);
822

823 824 825
  while (DEBUG_INSN_P (b_end))
    b_end = PREV_INSN (b_debug_start = b_end);

826
  /* If there was a CODE_LABEL beginning B, delete it.  */
827
  if (LABEL_P (b_head))
828 829 830 831 832
    {
      /* Detect basic blocks with nothing but a label.  This can happen
	 in particular at the end of a function.  */
      if (b_head == b_end)
	b_empty = 1;
833

834 835 836 837
      del_first = del_last = b_head;
      b_head = NEXT_INSN (b_head);
    }

838 839
  /* Delete the basic block note and handle blocks containing just that
     note.  */
840 841 842 843 844 845
  if (NOTE_INSN_BASIC_BLOCK_P (b_head))
    {
      if (b_head == b_end)
	b_empty = 1;
      if (! del_last)
	del_first = b_head;
846

847 848 849 850 851
      del_last = b_head;
      b_head = NEXT_INSN (b_head);
    }

  /* If there was a jump out of A, delete it.  */
852
  if (JUMP_P (a_end))
853
    {
854
      rtx_insn *prev;
855 856

      for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
857
	if (!NOTE_P (prev)
858
	    || NOTE_INSN_BASIC_BLOCK_P (prev)
859
	    || prev == BB_HEAD (a))
860 861 862 863 864 865
	  break;

      del_first = a_end;

      /* If this was a conditional jump, we need to also delete
	 the insn that set cc0.  */
866
      if (HAVE_cc0 && only_sets_cc0_p (prev))
867
	{
868
	  rtx_insn *tmp = prev;
869

870 871
	  prev = prev_nonnote_insn (prev);
	  if (!prev)
872
	    prev = BB_HEAD (a);
873 874 875 876 877
	  del_first = tmp;
	}

      a_end = PREV_INSN (del_first);
    }
878
  else if (BARRIER_P (NEXT_INSN (a_end)))
879 880 881 882
    del_first = NEXT_INSN (a_end);

  /* Delete everything marked above as well as crap that might be
     hanging out between the two blocks.  */
883 884
  BB_END (a) = a_end;
  BB_HEAD (b) = b_empty ? NULL : b_head;
885
  delete_insn_chain (del_first, del_last, true);
886

887
  /* When not optimizing and the edge is the only place in RTL which holds
888 889 890 891 892 893 894
     some unique locus, emit a nop with that locus in between.  */
  if (!optimize)
    {
      emit_nop_for_unique_locus_between (a, b);
      a_end = BB_END (a);
    }

895 896 897
  /* Reassociate the insns of B with A.  */
  if (!b_empty)
    {
898
      update_bb_for_insn_chain (a_end, b_debug_end, a);
899

900 901
      BB_END (a) = b_debug_end;
      BB_HEAD (b) = NULL;
902 903 904 905 906 907 908 909 910 911 912
    }
  else if (b_end != b_debug_end)
    {
      /* Move any deleted labels and other notes between the end of A
	 and the debug insns that make up B after the debug insns,
	 bringing the debug insns into A while keeping the notes after
	 the end of A.  */
      if (NEXT_INSN (a_end) != b_debug_start)
	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
			    b_debug_end);
      update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
913
      BB_END (a) = b_debug_end;
914
    }
915

916
  df_bb_delete (b->index);
917 918

  /* If B was a forwarder block, propagate the locus on the edge.  */
919 920
  if (forwarder_p
      && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
921 922 923 924
    EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;

  if (dump_file)
    fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
925
}
926

927

928
/* Return true when block A and B can be merged.  */
929

930 931
static bool
rtl_can_merge_blocks (basic_block a, basic_block b)
932
{
933 934
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
935 936
     and cold sections.

937
     Basic block partitioning may result in some jumps that appear to
Mike Stump committed
938 939 940
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
941
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
942

943
  if (BB_PARTITION (a) != BB_PARTITION (b))
944
    return false;
945

946 947 948 949
  /* Protect the loop latches.  */
  if (current_loops && b->loop_father->latch == b)
    return false;

950
  /* There must be exactly one edge in between the blocks.  */
951 952 953
  return (single_succ_p (a)
	  && single_succ (a) == b
	  && single_pred_p (b)
954
	  && a != b
955
	  /* Must be simple edge.  */
956
	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
957
	  && a->next_bb == b
958 959
	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
960 961
	  /* If the jump insn has side effects,
	     we can't kill the edge.  */
962
	  && (!JUMP_P (BB_END (a))
963
	      || (reload_completed
964
		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
965
}
966

967 968
/* Return the label in the head of basic block BLOCK.  Create one if it doesn't
   exist.  */
969

970
rtx_code_label *
971
block_label (basic_block block)
972
{
973
  if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
974
    return NULL;
975

976
  if (!LABEL_P (BB_HEAD (block)))
977
    {
978
      BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
979
    }
980

981
  return as_a <rtx_code_label *> (BB_HEAD (block));
982 983 984
}

/* Attempt to perform edge redirection by replacing possibly complex jump
985 986 987
   instruction by unconditional jump or removing jump completely.  This can
   apply only if all edges now point to the same block.  The parameters and
   return values are equivalent to redirect_edge_and_branch.  */
988

989
edge
990
try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
991 992
{
  basic_block src = e->src;
993
  rtx_insn *insn = BB_END (src), *kill_from;
994
  rtx set;
995
  int fallthru = 0;
996 997 998

  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
999 1000 1001
     and cold sections.

     Basic block partitioning may result in some jumps that appear to
Mike Stump committed
1002 1003 1004
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
1005
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
Mike Stump committed
1006

1007
  if (BB_PARTITION (src) != BB_PARTITION (target))
1008
    return NULL;
1009

1010 1011 1012 1013 1014 1015 1016 1017 1018
  /* We can replace or remove a complex jump only when we have exactly
     two edges.  Also, if we have exactly one outgoing edge, we can
     redirect that.  */
  if (EDGE_COUNT (src->succs) >= 3
      /* Verify that all targets will be TARGET.  Specifically, the
	 edge that is not E must also go to TARGET.  */
      || (EDGE_COUNT (src->succs) == 2
	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
    return NULL;
1019

1020
  if (!onlyjump_p (insn))
1021
    return NULL;
1022
  if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1023
    return NULL;
1024 1025 1026 1027

  /* Avoid removing branch with side effects.  */
  set = single_set (insn);
  if (!set || side_effects_p (set))
1028
    return NULL;
1029 1030 1031 1032

  /* In case we zap a conditional jump, we'll need to kill
     the cc0 setter too.  */
  kill_from = insn;
1033
  if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1034
      && only_sets_cc0_p (PREV_INSN (insn)))
1035 1036 1037
    kill_from = PREV_INSN (insn);

  /* See if we can create the fallthru edge.  */
1038
  if (in_cfglayout || can_fallthru (src, target))
1039
    {
1040 1041
      if (dump_file)
	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1042 1043
      fallthru = 1;

1044
      /* Selectively unlink whole insn chain.  */
1045 1046
      if (in_cfglayout)
	{
1047
	  rtx_insn *insn = BB_FOOTER (src);
1048

1049
	  delete_insn_chain (kill_from, BB_END (src), false);
1050 1051 1052 1053

	  /* Remove barriers but keep jumptables.  */
	  while (insn)
	    {
1054
	      if (BARRIER_P (insn))
1055 1056
		{
		  if (PREV_INSN (insn))
1057
		    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1058
		  else
1059
		    BB_FOOTER (src) = NEXT_INSN (insn);
1060
		  if (NEXT_INSN (insn))
1061
		    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1062
		}
1063
	      if (LABEL_P (insn))
1064 1065 1066 1067 1068
		break;
	      insn = NEXT_INSN (insn);
	    }
	}
      else
1069 1070
	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
			   false);
1071
    }
1072

1073 1074 1075 1076
  /* If this already is simplejump, redirect it.  */
  else if (simplejump_p (insn))
    {
      if (e->dest == target)
1077
	return NULL;
1078 1079
      if (dump_file)
	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1080
		 INSN_UID (insn), e->dest->index, target->index);
1081 1082
      if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
			  block_label (target), 0))
1083
	{
1084
	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1085
	  return NULL;
1086
	}
1087
    }
1088

1089
  /* Cannot do anything for target exit block.  */
1090
  else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1091
    return NULL;
1092

1093 1094 1095
  /* Or replace possibly complicated jump insn by simple jump insn.  */
  else
    {
1096
      rtx_code_label *target_label = block_label (target);
1097 1098
      rtx_insn *barrier;
      rtx label;
1099
      rtx_jump_table_data *table;
1100

1101
      emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1102
      JUMP_LABEL (BB_END (src)) = target_label;
1103
      LABEL_NUSES (target_label)++;
1104 1105
      if (dump_file)
	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1106
		 INSN_UID (insn), INSN_UID (BB_END (src)));
1107

1108

1109 1110
      delete_insn_chain (kill_from, insn, false);

1111 1112 1113
      /* Recognize a tablejump that we are converting to a
	 simple jump and remove its associated CODE_LABEL
	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1114 1115
      if (tablejump_p (insn, &label, &table))
	delete_insn_chain (label, table, false);
1116

1117
      barrier = next_nonnote_insn (BB_END (src));
1118
      if (!barrier || !BARRIER_P (barrier))
1119
	emit_barrier_after (BB_END (src));
1120 1121
      else
	{
1122
	  if (barrier != NEXT_INSN (BB_END (src)))
1123 1124 1125 1126
	    {
	      /* Move the jump before barrier so that the notes
		 which originally were or were created before jump table are
		 inside the basic block.  */
1127
	      rtx_insn *new_insn = BB_END (src);
1128

1129 1130
	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
				        PREV_INSN (barrier), src);
1131

1132 1133
	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1134

1135 1136
	      SET_NEXT_INSN (new_insn) = barrier;
	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1137

1138 1139
	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
	      SET_PREV_INSN (barrier) = new_insn;
1140 1141
	    }
	}
1142 1143 1144
    }

  /* Keep only one edge out and set proper flags.  */
1145
  if (!single_succ_p (src))
1146
    remove_edge (e);
1147
  gcc_assert (single_succ_p (src));
1148

1149
  e = single_succ_edge (src);
1150 1151 1152 1153
  if (fallthru)
    e->flags = EDGE_FALLTHRU;
  else
    e->flags = 0;
1154

1155 1156 1157 1158 1159
  e->probability = REG_BR_PROB_BASE;
  e->count = src->count;

  if (e->dest != target)
    redirect_edge_succ (e, target);
1160
  return e;
1161 1162
}

Michael Matz committed
1163 1164 1165 1166 1167 1168 1169
/* Subroutine of redirect_branch_edge that tries to patch the jump
   instruction INSN so that it reaches block NEW.  Do this
   only when it originally reached block OLD.  Return true if this
   worked or the original target wasn't OLD, return false if redirection
   doesn't work.  */

static bool
1170
patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1171
{
1172
  rtx_jump_table_data *table;
1173 1174
  rtx tmp;
  /* Recognize a tablejump and adjust all matching cases.  */
1175
  if (tablejump_p (insn, NULL, &table))
1176 1177 1178
    {
      rtvec vec;
      int j;
1179
      rtx_code_label *new_label = block_label (new_bb);
1180

1181
      if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
Michael Matz committed
1182
	return false;
1183
      vec = table->get_labels ();
1184 1185 1186 1187 1188 1189 1190 1191 1192

      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
	  {
	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
	    --LABEL_NUSES (old_label);
	    ++LABEL_NUSES (new_label);
	  }

1193
      /* Handle casesi dispatch insns.  */
1194 1195 1196 1197
      if ((tmp = single_set (insn)) != NULL
	  && SET_DEST (tmp) == pc_rtx
	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1198
	  && LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)) == old_label)
1199
	{
1200
	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1201 1202 1203 1204 1205
						       new_label);
	  --LABEL_NUSES (old_label);
	  ++LABEL_NUSES (new_label);
	}
    }
1206 1207 1208
  else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
    {
      int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1209
      rtx note;
1210

1211
      if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1212
	return false;
1213
      rtx_code_label *new_label = block_label (new_bb);
1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243

      for (i = 0; i < n; ++i)
	{
	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
	  if (XEXP (old_ref, 0) == old_label)
	    {
	      ASM_OPERANDS_LABEL (tmp, i)
		= gen_rtx_LABEL_REF (Pmode, new_label);
	      --LABEL_NUSES (old_label);
	      ++LABEL_NUSES (new_label);
	    }
	}

      if (JUMP_LABEL (insn) == old_label)
	{
	  JUMP_LABEL (insn) = new_label;
	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
	  if (note)
	    remove_note (insn, note);
	}
      else
	{
	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
	  if (note)
	    remove_note (insn, note);
	  if (JUMP_LABEL (insn) != new_label
	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
	}
1244 1245 1246
      while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
	     != NULL_RTX)
	XEXP (note, 0) = new_label;
1247
    }
1248 1249 1250 1251 1252
  else
    {
      /* ?? We may play the games with moving the named labels from
	 one basic block to the other in case only one computed_jump is
	 available.  */
1253 1254 1255
      if (computed_jump_p (insn)
	  /* A return instruction can't be redirected.  */
	  || returnjump_p (insn))
Michael Matz committed
1256
	return false;
1257

Michael Matz committed
1258
      if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1259
	{
Michael Matz committed
1260 1261 1262 1263 1264 1265
	  /* If the insn doesn't go where we think, we're confused.  */
	  gcc_assert (JUMP_LABEL (insn) == old_label);

	  /* If the substitution doesn't succeed, die.  This can happen
	     if the back end emitted unrecognizable instructions or if
	     target is exit block on some arches.  */
1266 1267
	  if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
			      block_label (new_bb), 0))
Michael Matz committed
1268
	    {
1269
	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
Michael Matz committed
1270 1271
	      return false;
	    }
1272
	}
1273
    }
Michael Matz committed
1274 1275 1276 1277 1278 1279 1280 1281 1282
  return true;
}


/* Redirect edge representing branch of (un)conditional jump or tablejump,
   NULL on failure  */
static edge
redirect_branch_edge (edge e, basic_block target)
{
1283
  rtx_insn *old_label = BB_HEAD (e->dest);
Michael Matz committed
1284
  basic_block src = e->src;
1285
  rtx_insn *insn = BB_END (src);
Michael Matz committed
1286 1287 1288 1289 1290 1291 1292 1293 1294

  /* We can only redirect non-fallthru edges of jump insn.  */
  if (e->flags & EDGE_FALLTHRU)
    return NULL;
  else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
    return NULL;

  if (!currently_expanding_to_rtl)
    {
1295
      if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
Michael Matz committed
1296 1297 1298 1299 1300 1301
	return NULL;
    }
  else
    /* When expanding this BB might actually contain multiple
       jumps (i.e. not yet split by find_many_sub_basic_blocks).
       Redirect all of those that match our label.  */
1302
    FOR_BB_INSNS (src, insn)
1303 1304
      if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
					     old_label, target))
Michael Matz committed
1305
	return NULL;
1306

1307 1308
  if (dump_file)
    fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1309
	     e->src->index, e->dest->index, target->index);
1310

1311
  if (e->dest != target)
1312
    e = redirect_edge_succ_nodup (e, target);
1313

1314
  return e;
1315 1316
}

1317 1318 1319 1320 1321 1322 1323
/* Called when edge E has been redirected to a new destination,
   in order to update the region crossing flag on the edge and
   jump.  */

static void
fixup_partition_crossing (edge e)
{
1324 1325
  if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
      == EXIT_BLOCK_PTR_FOR_FN (cfun))
1326 1327 1328 1329 1330 1331 1332 1333 1334
    return;
  /* If we redirected an existing edge, it may already be marked
     crossing, even though the new src is missing a reg crossing note.
     But make sure reg crossing note doesn't already exist before
     inserting.  */
  if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
    {
      e->flags |= EDGE_CROSSING;
      if (JUMP_P (BB_END (e->src))
1335 1336
	  && !CROSSING_JUMP_P (BB_END (e->src)))
	CROSSING_JUMP_P (BB_END (e->src)) = 1;
1337 1338 1339 1340 1341 1342 1343
    }
  else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
    {
      e->flags &= ~EDGE_CROSSING;
      /* Remove the section crossing note from jump at end of
         src if it exists, and if no other successors are
         still crossing.  */
1344
      if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355
        {
          bool has_crossing_succ = false;
          edge e2;
          edge_iterator ei;
          FOR_EACH_EDGE (e2, ei, e->src->succs)
            {
              has_crossing_succ |= (e2->flags & EDGE_CROSSING);
              if (has_crossing_succ)
                break;
            }
          if (!has_crossing_succ)
1356
	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
1357 1358 1359 1360
        }
    }
}

1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390
/* Called when block BB has been reassigned to the cold partition,
   because it is now dominated by another cold block,
   to ensure that the region crossing attributes are updated.  */

static void
fixup_new_cold_bb (basic_block bb)
{
  edge e;
  edge_iterator ei;

  /* This is called when a hot bb is found to now be dominated
     by a cold bb and therefore needs to become cold. Therefore,
     its preds will no longer be region crossing. Any non-dominating
     preds that were previously hot would also have become cold
     in the caller for the same region. Any preds that were previously
     region-crossing will be adjusted in fixup_partition_crossing.  */
  FOR_EACH_EDGE (e, ei, bb->preds)
    {
      fixup_partition_crossing (e);
    }

  /* Possibly need to make bb's successor edges region crossing,
     or remove stale region crossing.  */
  FOR_EACH_EDGE (e, ei, bb->succs)
    {
      /* We can't have fall-through edges across partition boundaries.
         Note that force_nonfallthru will do any necessary partition
         boundary fixup by calling fixup_partition_crossing itself.  */
      if ((e->flags & EDGE_FALLTHRU)
          && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1391
	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1392 1393 1394 1395 1396 1397
        force_nonfallthru (e);
      else
        fixup_partition_crossing (e);
    }
}

1398 1399 1400 1401 1402 1403
/* Attempt to change code to redirect edge E to TARGET.  Don't do that on
   expense of adding new instructions or reordering basic blocks.

   Function can be also called with edge destination equivalent to the TARGET.
   Then it should try the simplifications and do nothing if none is possible.

1404 1405 1406 1407
   Return edge representing the branch if transformation succeeded.  Return NULL
   on failure.
   We still return NULL in case E already destinated TARGET and we didn't
   managed to simplify instruction stream.  */
1408

1409
static edge
1410
rtl_redirect_edge_and_branch (edge e, basic_block target)
1411
{
1412
  edge ret;
1413
  basic_block src = e->src;
1414
  basic_block dest = e->dest;
1415

1416
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1417
    return NULL;
1418

1419
  if (dest == target)
1420
    return e;
1421

1422
  if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1423
    {
1424
      df_set_bb_dirty (src);
1425
      fixup_partition_crossing (ret);
1426
      return ret;
1427
    }
1428

1429 1430 1431
  ret = redirect_branch_edge (e, target);
  if (!ret)
    return NULL;
1432

1433
  df_set_bb_dirty (src);
1434
  fixup_partition_crossing (ret);
1435
  return ret;
1436 1437
}

1438 1439 1440 1441 1442
/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */

void
emit_barrier_after_bb (basic_block bb)
{
1443
  rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1444
  gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1445 1446
              || current_ir_type () == IR_RTL_CFGLAYOUT);
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464
    {
      rtx_insn *insn = unlink_insn_chain (barrier, barrier);

      if (BB_FOOTER (bb))
	{
          rtx_insn *footer_tail = BB_FOOTER (bb);

          while (NEXT_INSN (footer_tail))
            footer_tail = NEXT_INSN (footer_tail);
          if (!BARRIER_P (footer_tail))
            {
              SET_NEXT_INSN (footer_tail) = insn;
              SET_PREV_INSN (insn) = footer_tail;
            }
	}
      else
        BB_FOOTER (bb) = insn;
    }
1465 1466
}

1467
/* Like force_nonfallthru below, but additionally performs redirection
1468 1469 1470 1471
   Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
   when redirecting to the EXIT_BLOCK, it is either ret_rtx or
   simple_return_rtx, indicating which kind of returnjump to create.
   It should be NULL otherwise.  */
1472

1473
basic_block
1474
force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1475
{
1476
  basic_block jump_block, new_bb = NULL, src = e->src;
1477 1478
  rtx note;
  edge new_edge;
1479
  int abnormal_edge_flags = 0;
1480
  bool asm_goto_edge = false;
1481
  int loc;
1482

1483 1484
  /* In the case the last instruction is conditional jump to the next
     instruction, first redirect the jump itself and then continue
Kazu Hirata committed
1485
     by creating a basic block afterwards to redirect fallthru edge.  */
1486 1487
  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
      && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1488 1489
      && any_condjump_p (BB_END (e->src))
      && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1490 1491
    {
      rtx note;
1492
      edge b = unchecked_make_edge (e->src, target, 0);
1493
      bool redirected;
1494

1495 1496
      redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
				  block_label (target), 0);
1497
      gcc_assert (redirected);
Mike Stump committed
1498

1499
      note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1500 1501
      if (note)
	{
1502
	  int prob = XINT (note, 0);
1503 1504

	  b->probability = prob;
1505
          /* Update this to use GCOV_COMPUTE_SCALE.  */
1506 1507 1508 1509 1510 1511 1512 1513 1514 1515
	  b->count = e->count * prob / REG_BR_PROB_BASE;
	  e->probability -= e->probability;
	  e->count -= b->count;
	  if (e->probability < 0)
	    e->probability = 0;
	  if (e->count < 0)
	    e->count = 0;
	}
    }

1516
  if (e->flags & EDGE_ABNORMAL)
1517 1518 1519 1520
    {
      /* Irritating special case - fallthru edge to the same block as abnormal
	 edge.
	 We can't redirect abnormal edge, but we still can split the fallthru
1521
	 one and create separate abnormal edge to original destination.
1522
	 This allows bb-reorder to make such edge non-fallthru.  */
1523
      gcc_assert (e->dest == target);
1524 1525
      abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
      e->flags &= EDGE_FALLTHRU;
1526
    }
1527
  else
1528
    {
1529
      gcc_assert (e->flags & EDGE_FALLTHRU);
1530
      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1531 1532
	{
	  /* We can't redirect the entry block.  Create an empty block
1533 1534 1535 1536 1537
	     at the start of the function which we use to add the new
	     jump.  */
	  edge tmp;
	  edge_iterator ei;
	  bool found = false;
Mike Stump committed
1538

1539 1540
	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
Mike Stump committed
1541

1542 1543 1544
	  /* Change the existing edge's source to be the new block, and add
	     a new edge from the entry block to the new block.  */
	  e->src = bb;
1545 1546
	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
	       (tmp = ei_safe_edge (ei)); )
1547 1548 1549
	    {
	      if (tmp == e)
		{
1550
		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1551 1552 1553 1554 1555 1556
		  found = true;
		  break;
		}
	      else
		ei_next (&ei);
	    }
Mike Stump committed
1557

1558
	  gcc_assert (found);
Mike Stump committed
1559

1560
	  vec_safe_push (bb->succs, e);
1561 1562
	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
				 EDGE_FALLTHRU);
1563
	}
1564 1565
    }

1566
  /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1567
     don't point to the target or fallthru label.  */
1568
  if (JUMP_P (BB_END (e->src))
1569
      && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1570 1571
      && (e->flags & EDGE_FALLTHRU)
      && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1572
    {
1573
      int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1574
      bool adjust_jump_target = false;
1575 1576

      for (i = 0; i < n; ++i)
1577 1578
	{
	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1579 1580 1581 1582 1583 1584
	    {
	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
	      adjust_jump_target = true;
	    }
1585
	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1586
	    asm_goto_edge = true;
1587
	}
1588 1589
      if (adjust_jump_target)
	{
1590 1591 1592 1593
	  rtx_insn *insn = BB_END (e->src);
	  rtx note;
	  rtx_insn *old_label = BB_HEAD (e->dest);
	  rtx_insn *new_label = BB_HEAD (target);
1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614

	  if (JUMP_LABEL (insn) == old_label)
	    {
	      JUMP_LABEL (insn) = new_label;
	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
	      if (note)
		remove_note (insn, note);
	    }
	  else
	    {
	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
	      if (note)
		remove_note (insn, note);
	      if (JUMP_LABEL (insn) != new_label
		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
		add_reg_note (insn, REG_LABEL_TARGET, new_label);
	    }
	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
		 != NULL_RTX)
	    XEXP (note, 0) = new_label;
	}
1615 1616 1617 1618
    }

  if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
    {
1619
      rtx_insn *new_head;
1620 1621
      gcov_type count = e->count;
      int probability = e->probability;
1622
      /* Create the new structures.  */
1623

1624 1625 1626
      /* If the old block ended with a tablejump, skip its table
	 by searching forward from there.  Otherwise start searching
	 forward from the last instruction of the old block.  */
1627 1628
      rtx_jump_table_data *table;
      if (tablejump_p (BB_END (e->src), NULL, &table))
1629
	new_head = table;
1630
      else
1631 1632
	new_head = BB_END (e->src);
      new_head = NEXT_INSN (new_head);
1633

1634
      jump_block = create_basic_block (new_head, NULL, e->src);
1635
      jump_block->count = count;
1636 1637
      jump_block->frequency = EDGE_FREQUENCY (e);

1638 1639
      /* Make sure new block ends up in correct hot/cold section.  */

1640
      BB_COPY_PARTITION (jump_block, e->src);
Mike Stump committed
1641

1642 1643
      /* Wire edge in.  */
      new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1644 1645
      new_edge->probability = probability;
      new_edge->count = count;
1646 1647 1648 1649 1650

      /* Redirect old edge.  */
      redirect_edge_pred (e, jump_block);
      e->probability = REG_BR_PROB_BASE;

1651 1652 1653 1654
      /* If e->src was previously region crossing, it no longer is
         and the reg crossing note should be removed.  */
      fixup_partition_crossing (new_edge);

1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668
      /* If asm goto has any label refs to target's label,
	 add also edge from asm goto bb to target.  */
      if (asm_goto_edge)
	{
	  new_edge->probability /= 2;
	  new_edge->count /= 2;
	  jump_block->count /= 2;
	  jump_block->frequency /= 2;
	  new_edge = make_edge (new_edge->src, target,
				e->flags & ~EDGE_FALLTHRU);
	  new_edge->probability = probability - probability / 2;
	  new_edge->count = count - count / 2;
	}

1669 1670 1671 1672
      new_bb = jump_block;
    }
  else
    jump_block = e->src;
1673

1674
  loc = e->goto_locus;
1675
  e->flags &= ~EDGE_FALLTHRU;
1676
  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1677
    {
1678
      if (jump_label == ret_rtx)
1679 1680
	emit_jump_insn_after_setloc (targetm.gen_return (),
				     BB_END (jump_block), loc);
1681 1682 1683
      else
	{
	  gcc_assert (jump_label == simple_return_rtx);
1684
	  emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1685 1686
				       BB_END (jump_block), loc);
	}
1687
      set_return_jump_label (BB_END (jump_block));
1688 1689 1690
    }
  else
    {
1691
      rtx_code_label *label = block_label (target);
1692 1693
      emit_jump_insn_after_setloc (targetm.gen_jump (label),
				   BB_END (jump_block), loc);
1694
      JUMP_LABEL (BB_END (jump_block)) = label;
1695 1696
      LABEL_NUSES (label)++;
    }
1697

1698 1699 1700
  /* We might be in cfg layout mode, and if so, the following routine will
     insert the barrier correctly.  */
  emit_barrier_after_bb (jump_block);
1701 1702
  redirect_edge_succ_nodup (e, target);

1703 1704 1705
  if (abnormal_edge_flags)
    make_edge (src, target, abnormal_edge_flags);

H.J. Lu committed
1706
  df_mark_solutions_dirty ();
1707
  fixup_partition_crossing (e);
1708 1709 1710 1711 1712 1713
  return new_bb;
}

/* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
   (and possibly create new basic block) to make edge non-fallthru.
   Return newly created BB or NULL if none.  */
1714

1715 1716
static basic_block
rtl_force_nonfallthru (edge e)
1717
{
1718
  return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1719 1720 1721 1722
}

/* Redirect edge even at the expense of creating new jump insn or
   basic block.  Return new basic block if created, NULL otherwise.
1723
   Conversion must be possible.  */
1724

1725
static basic_block
1726
rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1727
{
1728 1729
  if (redirect_edge_and_branch (e, target)
      || e->dest == target)
1730 1731 1732 1733
    return NULL;

  /* In case the edge redirection failed, try to force it to be non-fallthru
     and redirect newly created simplejump.  */
1734
  df_set_bb_dirty (e->src);
1735
  return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1736 1737 1738 1739 1740
}

/* The given edge should potentially be a fallthru edge.  If that is in
   fact true, delete the jump and barriers that are in the way.  */

1741 1742
static void
rtl_tidy_fallthru_edge (edge e)
1743
{
1744
  rtx_insn *q;
1745 1746
  basic_block b = e->src, c = b->next_bb;

1747 1748
  /* ??? In a late-running flow pass, other folks may have deleted basic
     blocks by nopping out blocks, leaving multiple BARRIERs between here
1749
     and the target label. They ought to be chastised and fixed.
1750 1751 1752 1753 1754 1755 1756

     We can also wind up with a sequence of undeletable labels between
     one block and the next.

     So search through a sequence of barriers, labels, and notes for
     the head of block C and assert that we really do fall through.  */

1757
  for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1758 1759
    if (INSN_P (q))
      return;
1760 1761 1762 1763

  /* Remove what will soon cease being the jump insn from the source block.
     If block B consisted only of this single jump, turn it into a deleted
     note.  */
1764
  q = BB_END (b);
1765
  if (JUMP_P (q)
1766 1767
      && onlyjump_p (q)
      && (any_uncondjump_p (q)
1768
	  || single_succ_p (b)))
1769
    {
1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787
      rtx label;
      rtx_jump_table_data *table;

      if (tablejump_p (q, &label, &table))
	{
	  /* The label is likely mentioned in some instruction before
	     the tablejump and might not be DCEd, so turn it into
	     a note instead and move before the tablejump that is going to
	     be deleted.  */
	  const char *name = LABEL_NAME (label);
	  PUT_CODE (label, NOTE);
	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
	  NOTE_DELETED_LABEL_NAME (label) = name;
	  rtx_insn *lab = safe_as_a <rtx_insn *> (label);
	  reorder_insns (lab, lab, PREV_INSN (q));
	  delete_insn (table);
	}

1788 1789
      /* If this was a conditional jump, we need to also delete
	 the insn that set cc0.  */
1790
      if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1791 1792 1793 1794 1795 1796
	q = PREV_INSN (q);

      q = PREV_INSN (q);
    }

  /* Selectively unlink the sequence.  */
1797
  if (q != PREV_INSN (BB_HEAD (c)))
1798
    delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1799 1800 1801 1802

  e->flags |= EDGE_FALLTHRU;
}

1803 1804 1805 1806 1807 1808 1809 1810 1811
/* Should move basic block BB after basic block AFTER.  NIY.  */

static bool
rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
		      basic_block after ATTRIBUTE_UNUSED)
{
  return false;
}

1812 1813 1814 1815 1816 1817
/* Locate the last bb in the same partition as START_BB.  */

static basic_block
last_bb_in_partition (basic_block start_bb)
{
  basic_block bb;
1818
  FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1819 1820 1821 1822
    {
      if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
        return bb;
    }
1823
  /* Return bb before the exit block.  */
1824 1825 1826
  return bb->prev_bb;
}

1827
/* Split a (typically critical) edge.  Return the new block.
1828
   The edge must not be abnormal.
1829 1830 1831 1832 1833

   ??? The code generally expects to be called on critical edges.
   The case of a block ending in an unconditional jump to a
   block with multiple predecessors is not handled optimally.  */

1834
static basic_block
1835
rtl_split_edge (edge edge_in)
1836
{
1837
  basic_block bb, new_bb;
1838
  rtx_insn *before;
1839 1840

  /* Abnormal edges cannot be split.  */
1841
  gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1842 1843

  /* We are going to place the new block in front of edge destination.
1844
     Avoid existence of fallthru predecessors.  */
1845 1846
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
    {
1847
      edge e = find_fallthru_edge (edge_in->dest->preds);
1848 1849 1850 1851 1852

      if (e)
	force_nonfallthru (e);
    }

1853
  /* Create the basic block note.  */
1854
  if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1855
    before = BB_HEAD (edge_in->dest);
1856
  else
1857
    before = NULL;
1858

1859
  /* If this is a fall through edge to the exit block, the blocks might be
1860
     not adjacent, and the right place is after the source.  */
1861 1862
  if ((edge_in->flags & EDGE_FALLTHRU)
      && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1863 1864 1865
    {
      before = NEXT_INSN (BB_END (edge_in->src));
      bb = create_basic_block (before, NULL, edge_in->src);
1866
      BB_COPY_PARTITION (bb, edge_in->src);
1867 1868
    }
  else
1869
    {
1870
      if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888
        {
          bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
          BB_COPY_PARTITION (bb, edge_in->dest);
        }
      else
        {
          basic_block after = edge_in->dest->prev_bb;
          /* If this is post-bb reordering, and the edge crosses a partition
             boundary, the new block needs to be inserted in the bb chain
             at the end of the src partition (since we put the new bb into
             that partition, see below). Otherwise we may end up creating
             an extra partition crossing in the chain, which is illegal.
             It can't go after the src, because src may have a fall-through
             to a different block.  */
          if (crtl->bb_reorder_complete
              && (edge_in->flags & EDGE_CROSSING))
            {
              after = last_bb_in_partition (edge_in->src);
1889
              before = get_last_bb_insn (after);
1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901
              /* The instruction following the last bb in partition should
                 be a barrier, since it cannot end in a fall-through.  */
              gcc_checking_assert (BARRIER_P (before));
              before = NEXT_INSN (before);
            }
          bb = create_basic_block (before, NULL, after);
          /* Put the split bb into the src partition, to avoid creating
             a situation where a cold bb dominates a hot bb, in the case
             where src is cold and dest is hot. The src will dominate
             the new bb (whereas it might not have dominated dest).  */
          BB_COPY_PARTITION (bb, edge_in->src);
        }
1902
    }
1903

1904
  make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1905

1906 1907
  /* Can't allow a region crossing edge to be fallthrough.  */
  if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1908
      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1909 1910 1911 1912 1913
    {
      new_bb = force_nonfallthru (single_succ_edge (bb));
      gcc_assert (!new_bb);
    }

1914
  /* For non-fallthru edges, we must adjust the predecessor's
1915 1916 1917
     jump instruction to target our new block.  */
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
    {
1918 1919
      edge redirected = redirect_edge_and_branch (edge_in, bb);
      gcc_assert (redirected);
1920 1921
    }
  else
1922
    {
1923
      if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1924 1925 1926 1927
	{
	  /* For asm goto even splitting of fallthru edge might
	     need insn patching, as other labels might point to the
	     old label.  */
1928
	  rtx_insn *last = BB_END (edge_in->src);
1929 1930
	  if (last
	      && JUMP_P (last)
1931
	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1932 1933 1934 1935 1936 1937
	      && extract_asm_operands (PATTERN (last)) != NULL_RTX
	      && patch_jump_insn (last, before, bb))
	    df_set_bb_dirty (edge_in->src);
	}
      redirect_edge_succ (edge_in, bb);
    }
1938 1939 1940 1941 1942 1943 1944 1945 1946

  return bb;
}

/* Queue instructions for insertion on an edge between two basic blocks.
   The new instructions and basic blocks (if any) will not appear in the
   CFG until commit_edge_insertions is called.  */

void
1947
insert_insn_on_edge (rtx pattern, edge e)
1948 1949 1950
{
  /* We cannot insert instructions on an abnormal critical edge.
     It will be easier to find the culprit if we die now.  */
1951
  gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1952

1953
  if (e->insns.r == NULL_RTX)
1954 1955
    start_sequence ();
  else
1956
    push_to_sequence (e->insns.r);
1957 1958 1959

  emit_insn (pattern);

1960
  e->insns.r = get_insns ();
1961 1962 1963 1964 1965
  end_sequence ();
}

/* Update the CFG for the instructions queued on edge E.  */

Michael Matz committed
1966
void
1967
commit_one_edge_insertion (edge e)
1968
{
1969
  rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1970
  basic_block bb;
1971 1972

  /* Pull the insns off the edge now since the edge might go away.  */
1973 1974
  insns = e->insns.r;
  e->insns.r = NULL;
1975

1976 1977
  /* Figure out where to put these insns.  If the destination has
     one predecessor, insert there.  Except for the exit block.  */
1978
  if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1979
    {
1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995
      bb = e->dest;

      /* Get the location correct wrt a code label, and "nice" wrt
	 a basic block note, and before everything else.  */
      tmp = BB_HEAD (bb);
      if (LABEL_P (tmp))
	tmp = NEXT_INSN (tmp);
      if (NOTE_INSN_BASIC_BLOCK_P (tmp))
	tmp = NEXT_INSN (tmp);
      if (tmp == BB_HEAD (bb))
	before = tmp;
      else if (tmp)
	after = PREV_INSN (tmp);
      else
	after = get_last_insn ();
    }
1996

1997
  /* If the source has one successor and the edge is not abnormal,
1998 1999 2000 2001 2002 2003 2004
     insert there.  Except for the entry block.
     Don't do this if the predecessor ends in a jump other than
     unconditional simple jump.  E.g. for asm goto that points all
     its labels at the fallthru basic block, we can't insert instructions
     before the asm goto, as the asm goto can have various of side effects,
     and can't emit instructions after the asm goto, as it must end
     the basic block.  */
2005 2006
  else if ((e->flags & EDGE_ABNORMAL) == 0
	   && single_succ_p (e->src)
2007
	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2008 2009
	   && (!JUMP_P (BB_END (e->src))
	       || simplejump_p (BB_END (e->src))))
2010 2011
    {
      bb = e->src;
2012

2013 2014 2015
      /* It is possible to have a non-simple jump here.  Consider a target
	 where some forms of unconditional jumps clobber a register.  This
	 happens on the fr30 for example.
2016

2017 2018 2019 2020
	 We know this block has a single successor, so we can just emit
	 the queued insns before the jump.  */
      if (JUMP_P (BB_END (bb)))
	before = BB_END (bb);
2021 2022
      else
	{
2023 2024
	  /* We'd better be fallthru, or we've lost track of what's what.  */
	  gcc_assert (e->flags & EDGE_FALLTHRU);
2025

2026
	  after = BB_END (bb);
2027 2028 2029
	}
    }

2030 2031 2032 2033
  /* Otherwise we must split the edge.  */
  else
    {
      bb = split_edge (e);
2034 2035 2036 2037 2038 2039 2040

      /* If E crossed a partition boundary, we needed to make bb end in
         a region-crossing jump, even though it was originally fallthru.  */
      if (JUMP_P (BB_END (bb)))
	before = BB_END (bb);
      else
        after = BB_END (bb);
2041 2042 2043
    }

  /* Now that we've found the spot, do the insertion.  */
2044 2045
  if (before)
    {
2046
      emit_insn_before_noloc (insns, before, bb);
2047 2048 2049
      last = prev_nonnote_insn (before);
    }
  else
2050
    last = emit_insn_after_noloc (insns, after, bb);
2051 2052 2053 2054

  if (returnjump_p (last))
    {
      /* ??? Remove all outgoing edges from BB and add one for EXIT.
Mike Stump committed
2055 2056 2057
	 This is not currently a problem because this only happens
	 for the (single) epilogue, which already has a fallthru edge
	 to EXIT.  */
2058

2059
      e = single_succ_edge (bb);
2060
      gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2061
		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2062

2063
      e->flags &= ~EDGE_FALLTHRU;
2064
      emit_barrier_after (last);
2065

2066 2067 2068
      if (before)
	delete_insn (before);
    }
2069 2070
  else
    gcc_assert (!JUMP_P (last));
2071 2072 2073 2074 2075
}

/* Update the CFG for all queued instructions.  */

void
2076
commit_edge_insertions (void)
2077 2078 2079
{
  basic_block bb;

2080 2081 2082 2083 2084 2085 2086 2087
  /* Optimization passes that invoke this routine can cause hot blocks
     previously reached by both hot and cold blocks to become dominated only
     by cold blocks. This will cause the verification below to fail,
     and lead to now cold code in the hot section. In some cases this
     may only be visible after newly unreachable blocks are deleted,
     which will be done by fixup_partitions.  */
  fixup_partitions ();

2088
  checking_verify_flow_info ();
2089

2090 2091
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2092
    {
2093 2094
      edge e;
      edge_iterator ei;
2095

2096 2097
      FOR_EACH_EDGE (e, ei, bb->succs)
	if (e->insns.r)
2098
	  commit_one_edge_insertion (e);
2099
    }
2100 2101
}

2102

2103
/* Print out RTL-specific basic block information (live information
2104 2105
   at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
   documented in dumpfile.h.  */
2106

2107
static void
2108
rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
2109
{
2110 2111
  rtx_insn *insn;
  rtx_insn *last;
2112
  char *s_indent;
2113

2114
  s_indent = (char *) alloca ((size_t) indent + 1);
2115
  memset (s_indent, ' ', (size_t) indent);
2116
  s_indent[indent] = '\0';
H.J. Lu committed
2117

2118
  if (df && (flags & TDF_DETAILS))
2119 2120 2121 2122
    {
      df_dump_top (bb, outf);
      putc ('\n', outf);
    }
2123

2124 2125 2126
  if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
    for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
	 insn = NEXT_INSN (insn))
2127
      {
2128 2129
	if (flags & TDF_DETAILS)
	  df_dump_insn_top (insn, outf);
2130 2131 2132 2133
	if (! (flags & TDF_SLIM))
	  print_rtl_single (outf, insn);
	else
	  dump_insn_slim (outf, insn);
2134 2135
	if (flags & TDF_DETAILS)
	  df_dump_insn_bottom (insn, outf);
2136 2137 2138
      }

  if (df && (flags & TDF_DETAILS))
2139 2140 2141 2142 2143
    {
      df_dump_bottom (bb, outf);
      putc ('\n', outf);
    }

2144 2145
}

2146 2147 2148
/* Like dump_function_to_file, but for RTL.  Print out dataflow information
   for the start of each basic block.  FLAGS are the TDF_* masks documented
   in dumpfile.h.  */
2149 2150

void
2151
print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, int flags)
2152
{
2153
  const rtx_insn *tmp_rtx;
2154 2155 2156 2157 2158 2159
  if (rtx_first == 0)
    fprintf (outf, "(nil)\n");
  else
    {
      enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
      int max_uid = get_max_uid ();
2160 2161 2162
      basic_block *start = XCNEWVEC (basic_block, max_uid);
      basic_block *end = XCNEWVEC (basic_block, max_uid);
      enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2163 2164
      basic_block bb;

2165 2166 2167 2168 2169 2170
      /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
	 insns, but the CFG is not maintained so the basic block info
	 is not reliable.  Therefore it's omitted from the dumps.  */
      if (! (cfun->curr_properties & PROP_cfg))
        flags &= ~TDF_BLOCKS;

2171 2172 2173
      if (df)
	df_dump_start (outf);

2174
      if (flags & TDF_BLOCKS)
2175
	{
2176
	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2177
	    {
2178
	      rtx_insn *x;
2179 2180 2181 2182 2183 2184

	      start[INSN_UID (BB_HEAD (bb))] = bb;
	      end[INSN_UID (BB_END (bb))] = bb;
	      for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
		{
		  enum bb_state state = IN_MULTIPLE_BB;
2185

2186 2187 2188
		  if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
		    state = IN_ONE_BB;
		  in_bb_p[INSN_UID (x)] = state;
2189

2190 2191 2192
		  if (x == BB_END (bb))
		    break;
		}
2193 2194 2195 2196 2197
	    }
	}

      for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
	{
2198 2199 2200 2201 2202 2203 2204 2205 2206
	  if (flags & TDF_BLOCKS)
	    {
	      bb = start[INSN_UID (tmp_rtx)];
	      if (bb != NULL)
		{
		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
		  if (df && (flags & TDF_DETAILS))
		    df_dump_top (bb, outf);
		}
2207

2208 2209 2210 2211 2212 2213 2214
	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
		  && !NOTE_P (tmp_rtx)
		  && !BARRIER_P (tmp_rtx))
		fprintf (outf, ";; Insn is not within a basic block\n");
	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
		fprintf (outf, ";; Insn is in multiple basic blocks\n");
	    }
2215

2216 2217
	  if (flags & TDF_DETAILS)
	    df_dump_insn_top (tmp_rtx, outf);
2218 2219 2220 2221
	  if (! (flags & TDF_SLIM))
	    print_rtl_single (outf, tmp_rtx);
	  else
	    dump_insn_slim (outf, tmp_rtx);
2222 2223
	  if (flags & TDF_DETAILS)
	    df_dump_insn_bottom (tmp_rtx, outf);
2224

2225 2226 2227 2228 2229 2230 2231 2232
	  if (flags & TDF_BLOCKS)
	    {
	      bb = end[INSN_UID (tmp_rtx)];
	      if (bb != NULL)
		{
		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
		  if (df && (flags & TDF_DETAILS))
		    df_dump_bottom (bb, outf);
2233
		  putc ('\n', outf);
2234 2235
		}
	    }
2236 2237 2238 2239 2240 2241 2242 2243
	}

      free (start);
      free (end);
      free (in_bb_p);
    }
}

2244 2245
/* Update the branch probability of BB if a REG_BR_PROB is present.  */

2246
void
2247
update_br_prob_note (basic_block bb)
2248 2249
{
  rtx note;
2250
  if (!JUMP_P (BB_END (bb)))
2251
    return;
2252
  note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2253
  if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
2254
    return;
2255
  XINT (note, 0) = BRANCH_EDGE (bb)->probability;
2256
}
2257 2258 2259

/* Get the last insn associated with block BB (that includes barriers and
   tablejumps after BB).  */
2260
rtx_insn *
2261 2262
get_last_bb_insn (basic_block bb)
{
2263
  rtx_jump_table_data *table;
2264 2265
  rtx_insn *tmp;
  rtx_insn *end = BB_END (bb);
2266 2267

  /* Include any jump table following the basic block.  */
2268 2269
  if (tablejump_p (end, NULL, &table))
    end = table;
2270 2271

  /* Include any barriers that may follow the basic block.  */
2272
  tmp = next_nonnote_insn_bb (end);
2273 2274 2275
  while (tmp && BARRIER_P (tmp))
    {
      end = tmp;
2276
      tmp = next_nonnote_insn_bb (end);
2277 2278 2279 2280
    }

  return end;
}
2281

2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298
/* Sanity check partition hotness to ensure that basic blocks in
   the cold partition don't dominate basic blocks in the hot partition.
   If FLAG_ONLY is true, report violations as errors. Otherwise
   re-mark the dominated blocks as cold, since this is run after
   cfg optimizations that may make hot blocks previously reached
   by both hot and cold blocks now only reachable along cold paths.  */

static vec<basic_block>
find_partition_fixes (bool flag_only)
{
  basic_block bb;
  vec<basic_block> bbs_in_cold_partition = vNULL;
  vec<basic_block> bbs_to_fix = vNULL;

  /* Callers check this.  */
  gcc_checking_assert (crtl->has_bb_partition);

2299
  FOR_EACH_BB_FN (bb, cfun)
2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376
    if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
      bbs_in_cold_partition.safe_push (bb);

  if (bbs_in_cold_partition.is_empty ())
    return vNULL;

  bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);

  if (dom_calculated_here)
    calculate_dominance_info (CDI_DOMINATORS);

  while (! bbs_in_cold_partition.is_empty  ())
    {
      bb = bbs_in_cold_partition.pop ();
      /* Any blocks dominated by a block in the cold section
         must also be cold.  */
      basic_block son;
      for (son = first_dom_son (CDI_DOMINATORS, bb);
           son;
           son = next_dom_son (CDI_DOMINATORS, son))
        {
          /* If son is not yet cold, then mark it cold here and
             enqueue it for further processing.  */
          if ((BB_PARTITION (son) != BB_COLD_PARTITION))
            {
              if (flag_only)
                error ("non-cold basic block %d dominated "
                       "by a block in the cold partition (%d)", son->index, bb->index);
              else
                BB_SET_PARTITION (son, BB_COLD_PARTITION);
              bbs_to_fix.safe_push (son);
              bbs_in_cold_partition.safe_push (son);
            }
        }
    }

  if (dom_calculated_here)
    free_dominance_info (CDI_DOMINATORS);

  return bbs_to_fix;
}

/* Perform cleanup on the hot/cold bb partitioning after optimization
   passes that modify the cfg.  */

void
fixup_partitions (void)
{
  basic_block bb;

  if (!crtl->has_bb_partition)
    return;

  /* Delete any blocks that became unreachable and weren't
     already cleaned up, for example during edge forwarding
     and convert_jumps_to_returns. This will expose more
     opportunities for fixing the partition boundaries here.
     Also, the calculation of the dominance graph during verification
     will assert if there are unreachable nodes.  */
  delete_unreachable_blocks ();

  /* If there are partitions, do a sanity check on them: A basic block in
     a cold partition cannot dominate a basic block in a hot partition.
     Fixup any that now violate this requirement, as a result of edge
     forwarding and unreachable block deletion.  */
  vec<basic_block> bbs_to_fix = find_partition_fixes (false);

  /* Do the partition fixup after all necessary blocks have been converted to
     cold, so that we only update the region crossings the minimum number of
     places, which can require forcing edges to be non fallthru.  */
  while (! bbs_to_fix.is_empty ())
    {
      bb = bbs_to_fix.pop ();
      fixup_new_cold_bb (bb);
    }
}

2377 2378 2379 2380
/* Verify, in the basic block chain, that there is at most one switch
   between hot/cold partitions. This condition will not be true until
   after reorder_basic_blocks is called.  */

2381
static int
2382 2383 2384 2385 2386 2387 2388
verify_hot_cold_block_grouping (void)
{
  basic_block bb;
  int err = 0;
  bool switched_sections = false;
  int current_partition = BB_UNPARTITIONED;

2389 2390 2391 2392
  /* Even after bb reordering is complete, we go into cfglayout mode
     again (in compgoto). Ensure we don't call this before going back
     into linearized RTL when any layout fixes would have been committed.  */
  if (!crtl->bb_reorder_complete
2393
      || current_ir_type () != IR_RTL_CFGRTL)
2394
    return err;
2395

2396
  FOR_EACH_BB_FN (bb, cfun)
2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415
    {
      if (current_partition != BB_UNPARTITIONED
          && BB_PARTITION (bb) != current_partition)
	{
	  if (switched_sections)
	    {
	      error ("multiple hot/cold transitions found (bb %i)",
		     bb->index);
	      err = 1;
	    }
	  else
            switched_sections = true;

          if (!crtl->has_bb_partition)
            error ("partition found but function partition flag not set");
	}
      current_partition = BB_PARTITION (bb);
    }

2416
  return err;
2417
}
2418

2419

2420 2421 2422
/* Perform several checks on the edges out of each block, such as
   the consistency of the branch probabilities, the correctness
   of hot/cold partition crossing edges, and the number of expected
2423 2424
   successor edges.  Also verify that the dominance relationship
   between hot/cold blocks is sane.  */
2425

2426
static int
2427
rtl_verify_edges (void)
2428
{
2429
  int err = 0;
2430
  basic_block bb;
2431

2432
  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2433
    {
2434 2435
      int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
      int n_eh = 0, n_abnormal = 0;
2436
      edge e, fallthru = NULL;
2437
      edge_iterator ei;
2438
      rtx note;
2439
      bool has_crossing_edge = false;
2440

2441
      if (JUMP_P (BB_END (bb))
2442
	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2443
	  && EDGE_COUNT (bb->succs) >= 2
2444
	  && any_condjump_p (BB_END (bb)))
2445
	{
2446
	  if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
2447
	      && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2448
	    {
2449 2450
	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
		     XINT (note, 0), BRANCH_EDGE (bb)->probability);
2451 2452 2453
	      err = 1;
	    }
	}
2454

2455
      FOR_EACH_EDGE (e, ei, bb->succs)
2456
	{
2457 2458
	  bool is_crossing;

2459
	  if (e->flags & EDGE_FALLTHRU)
2460 2461 2462
	    n_fallthru++, fallthru = e;

	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2463 2464
			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2465
          has_crossing_edge |= is_crossing;
2466
	  if (e->flags & EDGE_CROSSING)
2467
	    {
2468 2469 2470 2471 2472 2473 2474
	      if (!is_crossing)
		{
		  error ("EDGE_CROSSING incorrectly set across same section");
		  err = 1;
		}
	      if (e->flags & EDGE_FALLTHRU)
		{
2475
		  error ("fallthru edge crosses section boundary in bb %i",
2476 2477 2478
			 e->src->index);
		  err = 1;
		}
2479 2480
	      if (e->flags & EDGE_EH)
		{
2481
		  error ("EH edge crosses section boundary in bb %i",
2482 2483 2484
			 e->src->index);
		  err = 1;
		}
2485
              if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2486 2487 2488 2489 2490
		{
		  error ("No region crossing jump at section boundary in bb %i",
			 bb->index);
		  err = 1;
		}
2491 2492 2493 2494 2495
	    }
	  else if (is_crossing)
	    {
	      error ("EDGE_CROSSING missing across section boundary");
	      err = 1;
2496
	    }
2497

2498 2499 2500
	  if ((e->flags & ~(EDGE_DFS_BACK
			    | EDGE_CAN_FALLTHRU
			    | EDGE_IRREDUCIBLE_LOOP
2501
			    | EDGE_LOOP_EXIT
2502 2503
			    | EDGE_CROSSING
			    | EDGE_PRESERVE)) == 0)
2504 2505 2506
	    n_branch++;

	  if (e->flags & EDGE_ABNORMAL_CALL)
2507 2508 2509 2510
	    n_abnormal_call++;

	  if (e->flags & EDGE_SIBCALL)
	    n_sibcall++;
2511 2512 2513

	  if (e->flags & EDGE_EH)
	    n_eh++;
2514 2515

	  if (e->flags & EDGE_ABNORMAL)
2516
	    n_abnormal++;
2517
	}
2518

2519
        if (!has_crossing_edge
2520 2521
	    && JUMP_P (BB_END (bb))
	    && CROSSING_JUMP_P (BB_END (bb)))
2522 2523 2524 2525 2526 2527 2528
          {
            print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
            error ("Region crossing jump across same section in bb %i",
                   bb->index);
            err = 1;
          }

2529
      if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2530
	{
2531
	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2532 2533
	  err = 1;
	}
2534 2535
      if (n_eh > 1)
	{
2536
	  error ("too many exception handling edges in bb %i", bb->index);
2537 2538
	  err = 1;
	}
2539
      if (n_branch
2540
	  && (!JUMP_P (BB_END (bb))
2541 2542
	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
				   || any_condjump_p (BB_END (bb))))))
2543
	{
2544
	  error ("too many outgoing branch edges from bb %i", bb->index);
2545 2546
	  err = 1;
	}
2547
      if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2548
	{
2549
	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2550 2551
	  err = 1;
	}
2552
      if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2553
	{
2554 2555
	  error ("wrong number of branch edges after unconditional jump"
		 " in bb %i", bb->index);
2556 2557
	  err = 1;
	}
2558
      if (n_branch != 1 && any_condjump_p (BB_END (bb))
2559
	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2560
	{
2561 2562 2563 2564 2565 2566 2567
	  error ("wrong amount of branch edges after conditional jump"
		 " in bb %i", bb->index);
	  err = 1;
	}
      if (n_abnormal_call && !CALL_P (BB_END (bb)))
	{
	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2568 2569
	  err = 1;
	}
2570
      if (n_sibcall && !CALL_P (BB_END (bb)))
2571
	{
2572
	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2573 2574
	  err = 1;
	}
2575 2576 2577
      if (n_abnormal > n_eh
	  && !(CALL_P (BB_END (bb))
	       && n_abnormal == n_abnormal_call + n_sibcall)
2578
	  && (!JUMP_P (BB_END (bb))
2579 2580
	      || any_condjump_p (BB_END (bb))
	      || any_uncondjump_p (BB_END (bb))))
2581
	{
2582
	  error ("abnormal edges for no purpose in bb %i", bb->index);
2583 2584
	  err = 1;
	}
2585
    }
2586

2587 2588 2589 2590 2591 2592 2593 2594
  /* If there are partitions, do a sanity check on them: A basic block in
     a cold partition cannot dominate a basic block in a hot partition.  */
  if (crtl->has_bb_partition && !err)
    {
      vec<basic_block> bbs_to_fix = find_partition_fixes (true);
      err = !bbs_to_fix.is_empty ();
    }

2595 2596 2597
  /* Clean up.  */
  return err;
}
2598

2599 2600 2601
/* Checks on the instructions within blocks. Currently checks that each
   block starts with a basic block note, and that basic block notes and
   control flow jumps are not found in the middle of the block.  */
2602

2603 2604 2605
static int
rtl_verify_bb_insns (void)
{
2606
  rtx_insn *x;
2607 2608 2609
  int err = 0;
  basic_block bb;

2610
  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2611 2612
    {
      /* Now check the header of basic
Mike Stump committed
2613
	 block.  It ought to contain optional CODE_LABEL followed
2614
	 by NOTE_BASIC_BLOCK.  */
2615
      x = BB_HEAD (bb);
2616
      if (LABEL_P (x))
2617
	{
2618
	  if (BB_END (bb) == x)
2619 2620
	    {
	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2621
		     bb->index);
2622 2623
	      err = 1;
	    }
2624

2625 2626
	  x = NEXT_INSN (x);
	}
2627

2628 2629 2630
      if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
	{
	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2631
		 bb->index);
2632 2633 2634
	  err = 1;
	}

2635
      if (BB_END (bb) == x)
2636
	/* Do checks for empty blocks here.  */
2637
	;
2638
      else
2639 2640 2641 2642 2643
	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
	  {
	    if (NOTE_INSN_BASIC_BLOCK_P (x))
	      {
		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2644
		       INSN_UID (x), bb->index);
2645 2646 2647
		err = 1;
	      }

2648
	    if (x == BB_END (bb))
2649
	      break;
2650

2651
	    if (control_flow_insn_p (x))
2652
	      {
2653
		error ("in basic block %d:", bb->index);
2654 2655 2656
		fatal_insn ("flow control insn inside a basic block", x);
	      }
	  }
2657 2658
    }

2659 2660 2661 2662 2663 2664 2665 2666 2667 2668 2669 2670 2671 2672
  /* Clean up.  */
  return err;
}

/* Verify that block pointers for instructions in basic blocks, headers and
   footers are set appropriately.  */

static int
rtl_verify_bb_pointers (void)
{
  int err = 0;
  basic_block bb;

  /* Check the general integrity of the basic blocks.  */
2673
  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2674
    {
2675
      rtx_insn *insn;
2676 2677 2678 2679 2680 2681 2682 2683 2684 2685 2686 2687 2688 2689 2690 2691 2692 2693 2694 2695 2696 2697 2698 2699 2700 2701 2702 2703 2704 2705 2706 2707 2708 2709

      if (!(bb->flags & BB_RTL))
	{
	  error ("BB_RTL flag not set for block %d", bb->index);
	  err = 1;
	}

      FOR_BB_INSNS (bb, insn)
	if (BLOCK_FOR_INSN (insn) != bb)
	  {
	    error ("insn %d basic block pointer is %d, should be %d",
		   INSN_UID (insn),
		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
		   bb->index);
	    err = 1;
	  }

      for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
	if (!BARRIER_P (insn)
	    && BLOCK_FOR_INSN (insn) != NULL)
	  {
	    error ("insn %d in header of bb %d has non-NULL basic block",
		   INSN_UID (insn), bb->index);
	    err = 1;
	  }
      for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
	if (!BARRIER_P (insn)
	    && BLOCK_FOR_INSN (insn) != NULL)
	  {
	    error ("insn %d in footer of bb %d has non-NULL basic block",
		   INSN_UID (insn), bb->index);
	    err = 1;
	  }
    }
2710

2711 2712 2713
  /* Clean up.  */
  return err;
}
2714

2715 2716
/* Verify the CFG and RTL consistency common for both underlying RTL and
   cfglayout RTL.
2717

2718
   Currently it does following checks:
2719 2720 2721 2722 2723 2724 2725 2726 2727

   - overlapping of basic blocks
   - insns with wrong BLOCK_FOR_INSN pointers
   - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
   - tails of basic blocks (ensure that boundary is necessary)
   - scans body of the basic block for JUMP_INSN, CODE_LABEL
     and NOTE_INSN_BASIC_BLOCK
   - verify that no fall_thru edge crosses hot/cold partition boundaries
   - verify that there are no pending RTL branch predictions
2728
   - verify that hot blocks are not dominated by cold blocks
2729 2730 2731

   In future it can be extended check a lot of other stuff as well
   (reachability of basic blocks, life information, etc. etc.).  */
2732

2733
static int
2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752
rtl_verify_flow_info_1 (void)
{
  int err = 0;

  err |= rtl_verify_bb_pointers ();

  err |= rtl_verify_bb_insns ();

  err |= rtl_verify_edges ();

  return err;
}

/* Walk the instruction chain and verify that bb head/end pointers
  are correct, and that instructions are in exactly one bb and have
  correct block pointers.  */

static int
rtl_verify_bb_insn_chain (void)
2753 2754
{
  basic_block bb;
2755
  int err = 0;
2756 2757
  rtx_insn *x;
  rtx_insn *last_head = get_last_insn ();
2758 2759 2760 2761
  basic_block *bb_info;
  const int max_uid = get_max_uid ();

  bb_info = XCNEWVEC (basic_block, max_uid);
2762

2763
  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2764
    {
2765 2766
      rtx_insn *head = BB_HEAD (bb);
      rtx_insn *end = BB_END (bb);
2767

2768
      for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2769 2770 2771 2772 2773
	{
	  /* Verify the end of the basic block is in the INSN chain.  */
	  if (x == end)
	    break;

2774 2775 2776 2777 2778 2779 2780 2781
            /* And that the code outside of basic blocks has NULL bb field.  */
          if (!BARRIER_P (x)
              && BLOCK_FOR_INSN (x) != NULL)
            {
              error ("insn %d outside of basic blocks has non-NULL bb field",
                     INSN_UID (x));
              err = 1;
            }
2782
	}
2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793

      if (!x)
	{
	  error ("end insn %d for block %d not found in the insn stream",
		 INSN_UID (end), bb->index);
	  err = 1;
	}

      /* Work backwards from the end to the head of the basic block
	 to verify the head is in the RTL chain.  */
      for (; x != NULL_RTX; x = PREV_INSN (x))
2794
	{
2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811 2812
	  /* While walking over the insn chain, verify insns appear
	     in only one basic block.  */
	  if (bb_info[INSN_UID (x)] != NULL)
	    {
	      error ("insn %d is in multiple basic blocks (%d and %d)",
		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
	      err = 1;
	    }

	  bb_info[INSN_UID (x)] = bb;

	  if (x == head)
	    break;
	}
      if (!x)
	{
	  error ("head insn %d for block %d not found in the insn stream",
		 INSN_UID (head), bb->index);
2813 2814 2815
	  err = 1;
	}

2816
      last_head = PREV_INSN (x);
2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844
    }

  for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
    {
      /* Check that the code before the first basic block has NULL
	 bb field.  */
      if (!BARRIER_P (x)
	  && BLOCK_FOR_INSN (x) != NULL)
	{
	  error ("insn %d outside of basic blocks has non-NULL bb field",
		 INSN_UID (x));
	  err = 1;
	}
    }
  free (bb_info);

  return err;
}

/* Verify that fallthru edges point to adjacent blocks in layout order and
   that barriers exist after non-fallthru blocks.  */

static int
rtl_verify_fallthru (void)
{
  basic_block bb;
  int err = 0;

2845
  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2846 2847
    {
      edge e;
2848

2849
      e = find_fallthru_edge (bb->succs);
2850 2851
      if (!e)
	{
2852
	  rtx_insn *insn;
2853 2854

	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2855 2856 2857
	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
	    {
	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2858 2859 2860 2861 2862
		{
		  error ("missing barrier after block %i", bb->index);
		  err = 1;
		  break;
		}
2863 2864 2865
	      if (BARRIER_P (insn))
		break;
	    }
2866
	}
2867 2868
      else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
Mike Stump committed
2869
	{
2870
	  rtx_insn *insn;
2871 2872 2873 2874 2875 2876 2877 2878 2879

	  if (e->src->next_bb != e->dest)
	    {
	      error
		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
		 e->src->index, e->dest->index);
	      err = 1;
	    }
	  else
2880
	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2881
		 insn = NEXT_INSN (insn))
2882
	      if (BARRIER_P (insn) || INSN_P (insn))
2883 2884 2885 2886 2887 2888
		{
		  error ("verify_flow_info: Incorrect fallthru %i->%i",
			 e->src->index, e->dest->index);
		  fatal_insn ("wrong insn in the fallthru edge", insn);
		  err = 1;
		}
Mike Stump committed
2889
	}
2890
    }
2891

2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903
   return err;
}

/* Verify that blocks are laid out in consecutive order. While walking the
   instructions, verify that all expected instructions are inside the basic
   blocks, and that all returns are followed by barriers.  */

static int
rtl_verify_bb_layout (void)
{
  basic_block bb;
  int err = 0;
2904
  rtx_insn *x;
2905
  int num_bb_notes;
2906
  rtx_insn * const rtx_first = get_insns ();
2907
  basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2908

2909
  num_bb_notes = 0;
2910
  last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
2911

2912
  for (x = rtx_first; x; x = NEXT_INSN (x))
2913 2914 2915
    {
      if (NOTE_INSN_BASIC_BLOCK_P (x))
	{
2916
	  bb = NOTE_BASIC_BLOCK (x);
2917

2918
	  num_bb_notes++;
2919
	  if (bb != last_bb_seen->next_bb)
2920
	    internal_error ("basic blocks not laid down consecutively");
2921

2922
	  curr_bb = last_bb_seen = bb;
2923 2924
	}

2925
      if (!curr_bb)
2926 2927 2928 2929 2930 2931 2932 2933
	{
	  switch (GET_CODE (x))
	    {
	    case BARRIER:
	    case NOTE:
	      break;

	    case CODE_LABEL:
2934
	      /* An ADDR_VEC is placed outside any basic block.  */
2935
	      if (NEXT_INSN (x)
Shujing Zhao committed
2936
		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2937
		x = NEXT_INSN (x);
2938 2939 2940 2941 2942

	      /* But in any case, non-deletable labels can appear anywhere.  */
	      break;

	    default:
2943
	      fatal_insn ("insn outside basic block", x);
2944 2945 2946
	    }
	}

2947
      if (JUMP_P (x)
2948
	  && returnjump_p (x) && ! condjump_p (x)
2949
	  && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2950
	    fatal_insn ("return not followed by barrier", x);
2951

2952
      if (curr_bb && x == BB_END (curr_bb))
2953
	curr_bb = NULL;
2954 2955
    }

2956
  if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
2957
    internal_error
2958
      ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2959
       num_bb_notes, n_basic_blocks_for_fn (cfun));
2960

2961
   return err;
2962
}
2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973

/* Verify the CFG and RTL consistency common for both underlying RTL and
   cfglayout RTL, plus consistency checks specific to linearized RTL mode.

   Currently it does following checks:
   - all checks of rtl_verify_flow_info_1
   - test head/end pointers
   - check that blocks are laid out in consecutive order
   - check that all insns are in the basic blocks
     (except the switch handling code, barriers and notes)
   - check that all returns are followed by barriers
2974 2975
   - check that all fallthru edge points to the adjacent blocks
   - verify that there is a single hot/cold partition boundary after bbro  */
2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986 2987 2988 2989

static int
rtl_verify_flow_info (void)
{
  int err = 0;

  err |= rtl_verify_flow_info_1 ();

  err |= rtl_verify_bb_insn_chain ();

  err |= rtl_verify_fallthru ();

  err |= rtl_verify_bb_layout ();

2990 2991
  err |= verify_hot_cold_block_grouping ();

2992 2993
  return err;
}
2994

2995
/* Assume that the preceding pass has possibly eliminated jump instructions
2996 2997 2998 2999
   or converted the unconditional jumps.  Eliminate the edges from CFG.
   Return true if any edges are eliminated.  */

bool
3000
purge_dead_edges (basic_block bb)
3001
{
3002
  edge e;
3003 3004
  rtx_insn *insn = BB_END (bb);
  rtx note;
3005
  bool purged = false;
3006 3007
  bool found;
  edge_iterator ei;
3008

3009 3010 3011 3012 3013
  if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
    do
      insn = PREV_INSN (insn);
    while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));

3014
  /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
3015
  if (NONJUMP_INSN_P (insn)
3016 3017 3018 3019 3020 3021 3022 3023 3024 3025
      && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
    {
      rtx eqnote;

      if (! may_trap_p (PATTERN (insn))
	  || ((eqnote = find_reg_equal_equiv_note (insn))
	      && ! may_trap_p (XEXP (eqnote, 0))))
	remove_note (insn, note);
    }

3026
  /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
3027
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3028
    {
3029 3030
      bool remove = false;

3031 3032 3033
      /* There are three types of edges we need to handle correctly here: EH
	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
	 latter can appear when nonlocal gotos are used.  */
3034
      if (e->flags & EDGE_ABNORMAL_CALL)
3035
	{
3036 3037 3038 3039 3040 3041
	  if (!CALL_P (insn))
	    remove = true;
	  else if (can_nonlocal_goto (insn))
	    ;
	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
	    ;
3042 3043
	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
	    ;
3044 3045
	  else
	    remove = true;
3046
	}
3047 3048 3049 3050
      else if (e->flags & EDGE_EH)
	remove = !can_throw_internal (insn);

      if (remove)
3051
	{
3052 3053 3054
	  remove_edge (e);
	  df_set_bb_dirty (bb);
	  purged = true;
3055 3056
	}
      else
3057
	ei_next (&ei);
3058
    }
3059

3060
  if (JUMP_P (insn))
3061 3062 3063
    {
      rtx note;
      edge b,f;
3064
      edge_iterator ei;
3065

3066 3067 3068 3069
      /* We do care only about conditional jumps and simplejumps.  */
      if (!any_condjump_p (insn)
	  && !returnjump_p (insn)
	  && !simplejump_p (insn))
3070
	return purged;
3071

3072 3073 3074 3075 3076 3077 3078 3079 3080 3081 3082
      /* Branch probability/prediction notes are defined only for
	 condjumps.  We've possibly turned condjump into simplejump.  */
      if (simplejump_p (insn))
	{
	  note = find_reg_note (insn, REG_BR_PROB, NULL);
	  if (note)
	    remove_note (insn, note);
	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
	    remove_note (insn, note);
	}

3083
      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3084
	{
3085 3086
	  /* Avoid abnormal flags to leak from computed jumps turned
	     into simplejumps.  */
3087

3088
	  e->flags &= ~EDGE_ABNORMAL;
3089

3090 3091 3092 3093
	  /* See if this edge is one we should keep.  */
	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
	    /* A conditional jump can fall through into the next
	       block, so we should keep the edge.  */
3094 3095 3096 3097
	    {
	      ei_next (&ei);
	      continue;
	    }
3098
	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3099
		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3100 3101
	    /* If the destination block is the target of the jump,
	       keep the edge.  */
3102 3103 3104 3105
	    {
	      ei_next (&ei);
	      continue;
	    }
3106 3107
	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
		   && returnjump_p (insn))
3108 3109
	    /* If the destination block is the exit block, and this
	       instruction is a return, then keep the edge.  */
3110 3111 3112 3113
	    {
	      ei_next (&ei);
	      continue;
	    }
3114 3115
	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
	    /* Keep the edges that correspond to exceptions thrown by
3116 3117 3118 3119
	       this instruction and rematerialize the EDGE_ABNORMAL
	       flag we just cleared above.  */
	    {
	      e->flags |= EDGE_ABNORMAL;
3120
	      ei_next (&ei);
3121 3122
	      continue;
	    }
3123

3124
	  /* We do not need this edge.  */
3125
	  df_set_bb_dirty (bb);
3126 3127 3128
	  purged = true;
	  remove_edge (e);
	}
3129

3130
      if (EDGE_COUNT (bb->succs) == 0 || !purged)
3131
	return purged;
3132

3133 3134
      if (dump_file)
	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3135

3136 3137 3138 3139
      if (!optimize)
	return purged;

      /* Redistribute probabilities.  */
3140
      if (single_succ_p (bb))
3141
	{
3142 3143
	  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
	  single_succ_edge (bb)->count = bb->count;
3144
	}
3145 3146 3147 3148 3149
      else
	{
	  note = find_reg_note (insn, REG_BR_PROB, NULL);
	  if (!note)
	    return purged;
3150

3151 3152
	  b = BRANCH_EDGE (bb);
	  f = FALLTHRU_EDGE (bb);
3153
	  b->probability = XINT (note, 0);
3154
	  f->probability = REG_BR_PROB_BASE - b->probability;
3155
          /* Update these to use GCOV_COMPUTE_SCALE.  */
3156 3157 3158
	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
	}
3159

3160 3161
      return purged;
    }
3162
  else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3163 3164 3165 3166 3167
    {
      /* First, there should not be any EH or ABCALL edges resulting
	 from non-local gotos and the like.  If there were, we shouldn't
	 have created the sibcall in the first place.  Second, there
	 should of course never have been a fallthru edge.  */
3168 3169 3170
      gcc_assert (single_succ_p (bb));
      gcc_assert (single_succ_edge (bb)->flags
		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
3171 3172 3173

      return 0;
    }
3174 3175 3176 3177 3178 3179

  /* If we don't see a jump insn, we don't know exactly why the block would
     have been broken at this point.  Look for a simple, non-fallthru edge,
     as these are only created by conditional branches.  If we find such an
     edge we know that there used to be a jump here and can then safely
     remove all non-fallthru edges.  */
3180 3181 3182 3183 3184 3185 3186
  found = false;
  FOR_EACH_EDGE (e, ei, bb->succs)
    if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
      {
	found = true;
	break;
      }
3187

3188
  if (!found)
3189
    return purged;
3190

3191 3192 3193
  /* Remove all but the fake and fallthru edges.  The fake edge may be
     the only successor for this block in the case of noreturn
     calls.  */
3194
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3195
    {
3196
      if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3197
	{
3198
	  df_set_bb_dirty (bb);
3199 3200 3201
	  remove_edge (e);
	  purged = true;
	}
3202 3203
      else
	ei_next (&ei);
3204
    }
3205

3206
  gcc_assert (single_succ_p (bb));
3207

3208 3209
  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
  single_succ_edge (bb)->count = bb->count;
3210

3211 3212
  if (dump_file)
    fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3213
	     bb->index);
3214 3215 3216
  return purged;
}

3217 3218
/* Search all basic blocks for potentially dead edges and purge them.  Return
   true if some edge has been eliminated.  */
3219 3220

bool
3221
purge_all_dead_edges (void)
3222
{
3223 3224
  int purged = false;
  basic_block bb;
3225

3226
  FOR_EACH_BB_FN (bb, cfun)
3227
    {
3228
      bool purged_here = purge_dead_edges (bb);
3229

3230 3231
      purged |= purged_here;
    }
3232

3233 3234
  return purged;
}
3235

3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249 3250
/* This is used by a few passes that emit some instructions after abnormal
   calls, moving the basic block's end, while they in fact do want to emit
   them on the fallthru edge.  Look for abnormal call edges, find backward
   the call in the block and insert the instructions on the edge instead.

   Similarly, handle instructions throwing exceptions internally.

   Return true when instructions have been found and inserted on edges.  */

bool
fixup_abnormal_edges (void)
{
  bool inserted = false;
  basic_block bb;

3251
  FOR_EACH_BB_FN (bb, cfun)
3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265
    {
      edge e;
      edge_iterator ei;

      /* Look for cases we are interested in - calls or instructions causing
         exceptions.  */
      FOR_EACH_EDGE (e, ei, bb->succs)
	if ((e->flags & EDGE_ABNORMAL_CALL)
	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
		== (EDGE_ABNORMAL | EDGE_EH)))
	  break;

      if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
	{
3266
	  rtx_insn *insn;
3267 3268 3269 3270 3271 3272 3273 3274 3275 3276 3277

	  /* Get past the new insns generated.  Allow notes, as the insns
	     may be already deleted.  */
	  insn = BB_END (bb);
	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
		 && !can_throw_internal (insn)
		 && insn != BB_HEAD (bb))
	    insn = PREV_INSN (insn);

	  if (CALL_P (insn) || can_throw_internal (insn))
	    {
3278
	      rtx_insn *stop, *next;
3279 3280 3281 3282

	      e = find_fallthru_edge (bb->succs);

	      stop = NEXT_INSN (BB_END (bb));
3283
	      BB_END (bb) = insn;
3284 3285 3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299

	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
		{
		  next = NEXT_INSN (insn);
		  if (INSN_P (insn))
		    {
		      delete_insn (insn);

		      /* Sometimes there's still the return value USE.
			 If it's placed after a trapping call (i.e. that
			 call is the last insn anyway), we have no fallthru
			 edge.  Simply delete this use and don't try to insert
			 on the non-existent edge.  */
		      if (GET_CODE (PATTERN (insn)) != USE)
			{
			  /* We're not deleting it, we're moving it.  */
3300
			  insn->set_undeleted ();
3301 3302
			  SET_PREV_INSN (insn) = NULL_RTX;
			  SET_NEXT_INSN (insn) = NULL_RTX;
3303 3304 3305 3306 3307 3308 3309 3310 3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323

			  insert_insn_on_edge (insn, e);
			  inserted = true;
			}
		    }
		  else if (!BARRIER_P (insn))
		    set_block_for_insn (insn, NULL);
		}
	    }

	  /* It may be that we don't find any trapping insn.  In this
	     case we discovered quite late that the insn that had been
	     marked as can_throw_internal in fact couldn't trap at all.
	     So we should in fact delete the EH edges out of the block.  */
	  else
	    purge_dead_edges (bb);
	}
    }

  return inserted;
}
Steven Bosscher committed
3324 3325 3326

/* Cut the insns from FIRST to LAST out of the insns stream.  */

3327
rtx_insn *
3328
unlink_insn_chain (rtx_insn *first, rtx_insn *last)
Steven Bosscher committed
3329
{
3330 3331
  rtx_insn *prevfirst = PREV_INSN (first);
  rtx_insn *nextlast = NEXT_INSN (last);
Steven Bosscher committed
3332

3333 3334
  SET_PREV_INSN (first) = NULL;
  SET_NEXT_INSN (last) = NULL;
Steven Bosscher committed
3335
  if (prevfirst)
3336
    SET_NEXT_INSN (prevfirst) = nextlast;
Steven Bosscher committed
3337
  if (nextlast)
3338
    SET_PREV_INSN (nextlast) = prevfirst;
Steven Bosscher committed
3339 3340 3341 3342
  else
    set_last_insn (prevfirst);
  if (!prevfirst)
    set_first_insn (nextlast);
3343
  return first;
Steven Bosscher committed
3344 3345 3346 3347 3348 3349
}

/* Skip over inter-block insns occurring after BB which are typically
   associated with BB (e.g., barriers). If there are any such insns,
   we return the last one. Otherwise, we return the end of BB.  */

3350
static rtx_insn *
Steven Bosscher committed
3351 3352
skip_insns_after_block (basic_block bb)
{
3353
  rtx_insn *insn, *last_insn, *next_head, *prev;
Steven Bosscher committed
3354

3355
  next_head = NULL;
3356
  if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
Steven Bosscher committed
3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390 3391 3392 3393 3394 3395 3396 3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417 3418 3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431
    next_head = BB_HEAD (bb->next_bb);

  for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
    {
      if (insn == next_head)
	break;

      switch (GET_CODE (insn))
	{
	case BARRIER:
	  last_insn = insn;
	  continue;

	case NOTE:
	  switch (NOTE_KIND (insn))
	    {
	    case NOTE_INSN_BLOCK_END:
	      gcc_unreachable ();
	      continue;
	    default:
	      continue;
	      break;
	    }
	  break;

	case CODE_LABEL:
	  if (NEXT_INSN (insn)
	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
	    {
	      insn = NEXT_INSN (insn);
	      last_insn = insn;
	      continue;
	    }
	  break;

	default:
	  break;
	}

      break;
    }

  /* It is possible to hit contradictory sequence.  For instance:

     jump_insn
     NOTE_INSN_BLOCK_BEG
     barrier

     Where barrier belongs to jump_insn, but the note does not.  This can be
     created by removing the basic block originally following
     NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */

  for (insn = last_insn; insn != BB_END (bb); insn = prev)
    {
      prev = PREV_INSN (insn);
      if (NOTE_P (insn))
	switch (NOTE_KIND (insn))
	  {
	  case NOTE_INSN_BLOCK_END:
	    gcc_unreachable ();
	    break;
	  case NOTE_INSN_DELETED:
	  case NOTE_INSN_DELETED_LABEL:
	  case NOTE_INSN_DELETED_DEBUG_LABEL:
	    continue;
	  default:
	    reorder_insns (insn, insn, last_insn);
	  }
    }

  return last_insn;
}

/* Locate or create a label for a given basic block.  */

3432
static rtx_insn *
Steven Bosscher committed
3433 3434
label_for_bb (basic_block bb)
{
3435
  rtx_insn *label = BB_HEAD (bb);
Steven Bosscher committed
3436 3437 3438 3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449 3450 3451 3452 3453

  if (!LABEL_P (label))
    {
      if (dump_file)
	fprintf (dump_file, "Emitting label for block %d\n", bb->index);

      label = block_label (bb);
    }

  return label;
}

/* Locate the effective beginning and end of the insn chain for each
   block, as defined by skip_insns_after_block above.  */

static void
record_effective_endpoints (void)
{
3454
  rtx_insn *next_insn;
Steven Bosscher committed
3455
  basic_block bb;
3456
  rtx_insn *insn;
Steven Bosscher committed
3457 3458 3459 3460 3461 3462 3463 3464 3465 3466 3467 3468 3469 3470

  for (insn = get_insns ();
       insn
       && NOTE_P (insn)
       && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
       insn = NEXT_INSN (insn))
    continue;
  /* No basic blocks at all?  */
  gcc_assert (insn);

  if (PREV_INSN (insn))
    cfg_layout_function_header =
	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
  else
3471
    cfg_layout_function_header = NULL;
Steven Bosscher committed
3472 3473

  next_insn = get_insns ();
3474
  FOR_EACH_BB_FN (bb, cfun)
Steven Bosscher committed
3475
    {
3476
      rtx_insn *end;
Steven Bosscher committed
3477 3478

      if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3479
	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3480
						PREV_INSN (BB_HEAD (bb)));
Steven Bosscher committed
3481 3482
      end = skip_insns_after_block (bb);
      if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3483
	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
Steven Bosscher committed
3484 3485 3486 3487 3488 3489 3490 3491
      next_insn = NEXT_INSN (BB_END (bb));
    }

  cfg_layout_function_footer = next_insn;
  if (cfg_layout_function_footer)
    cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
}

3492 3493 3494 3495 3496 3497 3498 3499 3500 3501 3502 3503 3504
namespace {

const pass_data pass_data_into_cfg_layout_mode =
{
  RTL_PASS, /* type */
  "into_cfglayout", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CFG, /* tv_id */
  0, /* properties_required */
  PROP_cfglayout, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
Steven Bosscher committed
3505 3506
};

3507 3508 3509
class pass_into_cfg_layout_mode : public rtl_opt_pass
{
public:
3510 3511
  pass_into_cfg_layout_mode (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3512 3513 3514
  {}

  /* opt_pass methods: */
3515 3516 3517 3518 3519
  virtual unsigned int execute (function *)
    {
      cfg_layout_initialize (0);
      return 0;
    }
3520 3521 3522 3523 3524 3525 3526 3527 3528 3529 3530 3531 3532 3533 3534 3535 3536 3537 3538 3539 3540 3541 3542 3543

}; // class pass_into_cfg_layout_mode

} // anon namespace

rtl_opt_pass *
make_pass_into_cfg_layout_mode (gcc::context *ctxt)
{
  return new pass_into_cfg_layout_mode (ctxt);
}

namespace {

const pass_data pass_data_outof_cfg_layout_mode =
{
  RTL_PASS, /* type */
  "outof_cfglayout", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CFG, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  PROP_cfglayout, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
Steven Bosscher committed
3544
};
3545 3546 3547 3548

class pass_outof_cfg_layout_mode : public rtl_opt_pass
{
public:
3549 3550
  pass_outof_cfg_layout_mode (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3551 3552 3553
  {}

  /* opt_pass methods: */
3554
  virtual unsigned int execute (function *);
3555 3556 3557

}; // class pass_outof_cfg_layout_mode

3558 3559 3560 3561 3562 3563 3564 3565 3566 3567 3568 3569 3570 3571
unsigned int
pass_outof_cfg_layout_mode::execute (function *fun)
{
  basic_block bb;

  FOR_EACH_BB_FN (bb, fun)
    if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
      bb->aux = bb->next_bb;

  cfg_layout_finalize ();

  return 0;
}

3572 3573 3574 3575 3576 3577 3578
} // anon namespace

rtl_opt_pass *
make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
{
  return new pass_outof_cfg_layout_mode (ctxt);
}
Steven Bosscher committed
3579 3580 3581 3582 3583 3584 3585 3586 3587 3588


/* Link the basic blocks in the correct order, compacting the basic
   block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
   function also clears the basic block header and footer fields.

   This function is usually called after a pass (e.g. tracer) finishes
   some transformations while in cfglayout mode.  The required sequence
   of the basic blocks is in a linked list along the bb->aux field.
   This functions re-links the basic block prev_bb and next_bb pointers
3589 3590 3591 3592 3593 3594 3595
   accordingly, and it compacts and renumbers the blocks.

   FIXME: This currently works only for RTL, but the only RTL-specific
   bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
   to GIMPLE a long time ago, but it doesn't relink the basic block
   chain.  It could do that (to give better initial RTL) if this function
   is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
Steven Bosscher committed
3596 3597 3598 3599 3600 3601 3602 3603 3604 3605 3606

void
relink_block_chain (bool stay_in_cfglayout_mode)
{
  basic_block bb, prev_bb;
  int index;

  /* Maybe dump the re-ordered sequence.  */
  if (dump_file)
    {
      fprintf (dump_file, "Reordered sequence:\n");
3607 3608
      for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
	   NUM_FIXED_BLOCKS;
Steven Bosscher committed
3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625
	   bb;
	   bb = (basic_block) bb->aux, index++)
	{
	  fprintf (dump_file, " %i ", index);
	  if (get_bb_original (bb))
	    fprintf (dump_file, "duplicate of %i ",
		     get_bb_original (bb)->index);
	  else if (forwarder_block_p (bb)
		   && !LABEL_P (BB_HEAD (bb)))
	    fprintf (dump_file, "compensation ");
	  else
	    fprintf (dump_file, "bb %i ", bb->index);
	  fprintf (dump_file, " [%i]\n", bb->frequency);
	}
    }

  /* Now reorder the blocks.  */
3626 3627
  prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
  bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
Steven Bosscher committed
3628 3629 3630 3631 3632
  for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
    {
      bb->prev_bb = prev_bb;
      prev_bb->next_bb = bb;
    }
3633 3634
  prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
  EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
Steven Bosscher committed
3635 3636

  /* Then, clean up the aux fields.  */
3637
  FOR_ALL_BB_FN (bb, cfun)
Steven Bosscher committed
3638 3639 3640
    {
      bb->aux = NULL;
      if (!stay_in_cfglayout_mode)
3641
	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
Steven Bosscher committed
3642 3643 3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661
    }

  /* Maybe reset the original copy tables, they are not valid anymore
     when we renumber the basic blocks in compact_blocks.  If we are
     are going out of cfglayout mode, don't re-allocate the tables.  */
  free_original_copy_tables ();
  if (stay_in_cfglayout_mode)
    initialize_original_copy_tables ();

  /* Finally, put basic_block_info in the new order.  */
  compact_blocks ();
}


/* Given a reorder chain, rearrange the code to match.  */

static void
fixup_reorder_chain (void)
{
  basic_block bb;
3662
  rtx_insn *insn = NULL;
Steven Bosscher committed
3663 3664 3665 3666 3667 3668 3669 3670 3671 3672 3673 3674

  if (cfg_layout_function_header)
    {
      set_first_insn (cfg_layout_function_header);
      insn = cfg_layout_function_header;
      while (NEXT_INSN (insn))
	insn = NEXT_INSN (insn);
    }

  /* First do the bulk reordering -- rechain the blocks without regard to
     the needed changes to jumps and labels.  */

3675 3676
  for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
       bb->aux)
Steven Bosscher committed
3677 3678 3679 3680
    {
      if (BB_HEADER (bb))
	{
	  if (insn)
3681
	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
Steven Bosscher committed
3682 3683
	  else
	    set_first_insn (BB_HEADER (bb));
3684
	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
Steven Bosscher committed
3685 3686 3687 3688 3689
	  insn = BB_HEADER (bb);
	  while (NEXT_INSN (insn))
	    insn = NEXT_INSN (insn);
	}
      if (insn)
3690
	SET_NEXT_INSN (insn) = BB_HEAD (bb);
Steven Bosscher committed
3691 3692
      else
	set_first_insn (BB_HEAD (bb));
3693
      SET_PREV_INSN (BB_HEAD (bb)) = insn;
Steven Bosscher committed
3694 3695 3696
      insn = BB_END (bb);
      if (BB_FOOTER (bb))
	{
3697 3698
	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
Steven Bosscher committed
3699 3700 3701 3702 3703
	  while (NEXT_INSN (insn))
	    insn = NEXT_INSN (insn);
	}
    }

3704
  SET_NEXT_INSN (insn) = cfg_layout_function_footer;
Steven Bosscher committed
3705
  if (cfg_layout_function_footer)
3706
    SET_PREV_INSN (cfg_layout_function_footer) = insn;
Steven Bosscher committed
3707 3708 3709 3710 3711

  while (NEXT_INSN (insn))
    insn = NEXT_INSN (insn);

  set_last_insn (insn);
3712 3713
  if (flag_checking)
    verify_insn_chain ();
Steven Bosscher committed
3714 3715

  /* Now add jumps and labels as needed to match the blocks new
3716
     outgoing edges.  */
Steven Bosscher committed
3717

3718 3719
  for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
       bb->aux)
Steven Bosscher committed
3720 3721
    {
      edge e_fall, e_taken, e;
3722
      rtx_insn *bb_end_insn;
Steven Bosscher committed
3723
      rtx ret_label = NULL_RTX;
3724
      basic_block nb;
Steven Bosscher committed
3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740
      edge_iterator ei;

      if (EDGE_COUNT (bb->succs) == 0)
	continue;

      /* Find the old fallthru edge, and another non-EH edge for
	 a taken jump.  */
      e_taken = e_fall = NULL;

      FOR_EACH_EDGE (e, ei, bb->succs)
	if (e->flags & EDGE_FALLTHRU)
	  e_fall = e;
	else if (! (e->flags & EDGE_EH))
	  e_taken = e;

      bb_end_insn = BB_END (bb);
3741
      if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
Steven Bosscher committed
3742
	{
3743 3744
	  ret_label = JUMP_LABEL (bb_end_jump);
	  if (any_condjump_p (bb_end_jump))
Steven Bosscher committed
3745 3746 3747 3748 3749 3750 3751
	    {
	      /* This might happen if the conditional jump has side
		 effects and could therefore not be optimized away.
		 Make the basic block to end with a barrier in order
		 to prevent rtl_verify_flow_info from complaining.  */
	      if (!e_fall)
		{
3752 3753
		  gcc_assert (!onlyjump_p (bb_end_jump)
			      || returnjump_p (bb_end_jump)
3754
                              || (e_taken->flags & EDGE_CROSSING));
3755
		  emit_barrier_after (bb_end_jump);
Steven Bosscher committed
3756 3757 3758 3759 3760
		  continue;
		}

	      /* If the old fallthru is still next, nothing to do.  */
	      if (bb->aux == e_fall->dest
3761
		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
Steven Bosscher committed
3762 3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775 3776
		continue;

	      /* The degenerated case of conditional jump jumping to the next
		 instruction can happen for jumps with side effects.  We need
		 to construct a forwarder block and this will be done just
		 fine by force_nonfallthru below.  */
	      if (!e_taken)
		;

	      /* There is another special case: if *neither* block is next,
		 such as happens at the very end of a function, then we'll
		 need to add a new unconditional jump.  Choose the taken
		 edge based on known or assumed probability.  */
	      else if (bb->aux != e_taken->dest)
		{
3777
		  rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
Steven Bosscher committed
3778 3779

		  if (note
3780
		      && XINT (note, 0) < REG_BR_PROB_BASE / 2
3781
		      && invert_jump (bb_end_jump,
3782 3783
				      (e_fall->dest
				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
Steven Bosscher committed
3784 3785 3786 3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797 3798 3799 3800 3801 3802 3803
				       ? NULL_RTX
				       : label_for_bb (e_fall->dest)), 0))
		    {
		      e_fall->flags &= ~EDGE_FALLTHRU;
		      gcc_checking_assert (could_fall_through
					   (e_taken->src, e_taken->dest));
		      e_taken->flags |= EDGE_FALLTHRU;
		      update_br_prob_note (bb);
		      e = e_fall, e_fall = e_taken, e_taken = e;
		    }
		}

	      /* If the "jumping" edge is a crossing edge, and the fall
		 through edge is non-crossing, leave things as they are.  */
	      else if ((e_taken->flags & EDGE_CROSSING)
		       && !(e_fall->flags & EDGE_CROSSING))
		continue;

	      /* Otherwise we can try to invert the jump.  This will
		 basically never fail, however, keep up the pretense.  */
3804
	      else if (invert_jump (bb_end_jump,
3805 3806
				    (e_fall->dest
				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
Steven Bosscher committed
3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817 3818 3819 3820 3821 3822 3823 3824 3825 3826 3827
				     ? NULL_RTX
				     : label_for_bb (e_fall->dest)), 0))
		{
		  e_fall->flags &= ~EDGE_FALLTHRU;
		  gcc_checking_assert (could_fall_through
				       (e_taken->src, e_taken->dest));
		  e_taken->flags |= EDGE_FALLTHRU;
		  update_br_prob_note (bb);
		  if (LABEL_NUSES (ret_label) == 0
		      && single_pred_p (e_taken->dest))
		    delete_insn (ret_label);
		  continue;
		}
	    }
	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
	    {
	      /* If the old fallthru is still next or if
		 asm goto doesn't have a fallthru (e.g. when followed by
		 __builtin_unreachable ()), nothing to do.  */
	      if (! e_fall
		  || bb->aux == e_fall->dest
3828
		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
Steven Bosscher committed
3829 3830 3831 3832 3833 3834 3835 3836 3837 3838 3839 3840 3841 3842 3843 3844 3845 3846 3847 3848 3849 3850 3851 3852 3853 3854
		continue;

	      /* Otherwise we'll have to use the fallthru fixup below.  */
	    }
	  else
	    {
	      /* Otherwise we have some return, switch or computed
		 jump.  In the 99% case, there should not have been a
		 fallthru edge.  */
	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
	      continue;
	    }
	}
      else
	{
	  /* No fallthru implies a noreturn function with EH edges, or
	     something similarly bizarre.  In any case, we don't need to
	     do anything.  */
	  if (! e_fall)
	    continue;

	  /* If the fallthru block is still next, nothing to do.  */
	  if (bb->aux == e_fall->dest)
	    continue;

	  /* A fallthru to exit block.  */
3855
	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
Steven Bosscher committed
3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867 3868 3869 3870 3871 3872 3873
	    continue;
	}

      /* We got here if we need to add a new jump insn. 
	 Note force_nonfallthru can delete E_FALL and thus we have to
	 save E_FALL->src prior to the call to force_nonfallthru.  */
      nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
      if (nb)
	{
	  nb->aux = bb->aux;
	  bb->aux = nb;
	  /* Don't process this new block.  */
	  bb = nb;
	}
    }

  relink_block_chain (/*stay_in_cfglayout_mode=*/false);

3874
  /* Annoying special case - jump around dead jumptables left in the code.  */
3875
  FOR_EACH_BB_FN (bb, cfun)
Steven Bosscher committed
3876 3877 3878
    {
      edge e = find_fallthru_edge (bb->succs);

3879 3880
      if (e && !can_fallthru (e->src, e->dest))
	force_nonfallthru (e);
Steven Bosscher committed
3881 3882 3883 3884 3885
    }

  /* Ensure goto_locus from edges has some instructions with that locus
     in RTL.  */
  if (!optimize)
3886
    FOR_EACH_BB_FN (bb, cfun)
Steven Bosscher committed
3887 3888 3889 3890 3891
      {
        edge e;
        edge_iterator ei;

        FOR_EACH_EDGE (e, ei, bb->succs)
3892
	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3893
	      && !(e->flags & EDGE_ABNORMAL))
Steven Bosscher committed
3894 3895 3896 3897
	    {
	      edge e2;
	      edge_iterator ei2;
	      basic_block dest, nb;
3898
	      rtx_insn *end;
Steven Bosscher committed
3899 3900 3901 3902

	      insn = BB_END (e->src);
	      end = PREV_INSN (BB_HEAD (e->src));
	      while (insn != end
3903
		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
Steven Bosscher committed
3904 3905
		insn = PREV_INSN (insn);
	      if (insn != end
3906
		  && INSN_LOCATION (insn) == e->goto_locus)
Steven Bosscher committed
3907 3908
		continue;
	      if (simplejump_p (BB_END (e->src))
3909
		  && !INSN_HAS_LOCATION (BB_END (e->src)))
Steven Bosscher committed
3910
		{
3911
		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
Steven Bosscher committed
3912 3913 3914
		  continue;
		}
	      dest = e->dest;
3915
	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
Steven Bosscher committed
3916 3917 3918 3919 3920 3921 3922 3923 3924 3925 3926
		{
		  /* Non-fallthru edges to the exit block cannot be split.  */
		  if (!(e->flags & EDGE_FALLTHRU))
		    continue;
		}
	      else
		{
		  insn = BB_HEAD (dest);
		  end = NEXT_INSN (BB_END (dest));
		  while (insn != end && !NONDEBUG_INSN_P (insn))
		    insn = NEXT_INSN (insn);
3927 3928
		  if (insn != end && INSN_HAS_LOCATION (insn)
		      && INSN_LOCATION (insn) == e->goto_locus)
Steven Bosscher committed
3929 3930 3931 3932
		    continue;
		}
	      nb = split_edge (e);
	      if (!INSN_P (BB_END (nb)))
3933
		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3934
							 nb);
3935
	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
Steven Bosscher committed
3936 3937 3938 3939 3940 3941

	      /* If there are other incoming edges to the destination block
		 with the same goto locus, redirect them to the new block as
		 well, this can prevent other such blocks from being created
		 in subsequent iterations of the loop.  */
	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3942
		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
Steven Bosscher committed
3943
		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3944
		    && e->goto_locus == e2->goto_locus)
Steven Bosscher committed
3945 3946 3947 3948 3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960
		  redirect_edge_and_branch (e2, nb);
		else
		  ei_next (&ei2);
	    }
      }
}

/* Perform sanity checks on the insn chain.
   1. Check that next/prev pointers are consistent in both the forward and
      reverse direction.
   2. Count insns in chain, going both directions, and check if equal.
   3. Check that get_last_insn () returns the actual end of chain.  */

DEBUG_FUNCTION void
verify_insn_chain (void)
{
3961
  rtx_insn *x, *prevx, *nextx;
Steven Bosscher committed
3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992
  int insn_cnt1, insn_cnt2;

  for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
       x != 0;
       prevx = x, insn_cnt1++, x = NEXT_INSN (x))
    gcc_assert (PREV_INSN (x) == prevx);

  gcc_assert (prevx == get_last_insn ());

  for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
       x != 0;
       nextx = x, insn_cnt2++, x = PREV_INSN (x))
    gcc_assert (NEXT_INSN (x) == nextx);

  gcc_assert (insn_cnt1 == insn_cnt2);
}

/* If we have assembler epilogues, the block falling through to exit must
   be the last one in the reordered chain when we reach final.  Ensure
   that this condition is met.  */
static void
fixup_fallthru_exit_predecessor (void)
{
  edge e;
  basic_block bb = NULL;

  /* This transformation is not valid before reload, because we might
     separate a call from the instruction that copies the return
     value.  */
  gcc_assert (reload_completed);

3993
  e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
Steven Bosscher committed
3994 3995 3996 3997 3998
  if (e)
    bb = e->src;

  if (bb && bb->aux)
    {
3999
      basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
Steven Bosscher committed
4000 4001 4002 4003 4004

      /* If the very first block is the one with the fall-through exit
	 edge, we have to split that block.  */
      if (c == bb)
	{
4005
	  bb = split_block_after_labels (bb)->dest;
Steven Bosscher committed
4006 4007
	  bb->aux = c->aux;
	  c->aux = bb;
4008 4009
	  BB_FOOTER (bb) = BB_FOOTER (c);
	  BB_FOOTER (c) = NULL;
Steven Bosscher committed
4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034
	}

      while (c->aux != bb)
	c = (basic_block) c->aux;

      c->aux = bb->aux;
      while (c->aux)
	c = (basic_block) c->aux;

      c->aux = bb;
      bb->aux = NULL;
    }
}

/* In case there are more than one fallthru predecessors of exit, force that
   there is only one.  */

static void
force_one_exit_fallthru (void)
{
  edge e, predecessor = NULL;
  bool more = false;
  edge_iterator ei;
  basic_block forwarder, bb;

4035
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
Steven Bosscher committed
4036 4037 4038 4039 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052
    if (e->flags & EDGE_FALLTHRU)
      {
	if (predecessor == NULL)
	  predecessor = e;
	else
	  {
	    more = true;
	    break;
	  }
      }

  if (!more)
    return;

  /* Exit has several fallthru predecessors.  Create a forwarder block for
     them.  */
  forwarder = split_edge (predecessor);
4053 4054
  for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
       (e = ei_safe_edge (ei)); )
Steven Bosscher committed
4055 4056 4057 4058 4059 4060 4061 4062 4063 4064
    {
      if (e->src == forwarder
	  || !(e->flags & EDGE_FALLTHRU))
	ei_next (&ei);
      else
	redirect_edge_and_branch_force (e, forwarder);
    }

  /* Fix up the chain of blocks -- make FORWARDER immediately precede the
     exit block.  */
4065
  FOR_EACH_BB_FN (bb, cfun)
Steven Bosscher committed
4066 4067 4068 4069 4070 4071 4072 4073 4074 4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088
    {
      if (bb->aux == NULL && bb != forwarder)
	{
	  bb->aux = forwarder;
	  break;
	}
    }
}

/* Return true in case it is possible to duplicate the basic block BB.  */

static bool
cfg_layout_can_duplicate_bb_p (const_basic_block bb)
{
  /* Do not attempt to duplicate tablejumps, as we need to unshare
     the dispatch table.  This is difficult to do, as the instructions
     computing jump destination may be hoisted outside the basic block.  */
  if (tablejump_p (BB_END (bb), NULL, NULL))
    return false;

  /* Do not duplicate blocks containing insns that can't be copied.  */
  if (targetm.cannot_copy_insn_p)
    {
4089
      rtx_insn *insn = BB_HEAD (bb);
Steven Bosscher committed
4090 4091 4092 4093 4094 4095 4096 4097 4098 4099 4100 4101 4102
      while (1)
	{
	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
	    return false;
	  if (insn == BB_END (bb))
	    break;
	  insn = NEXT_INSN (insn);
	}
    }

  return true;
}

4103
rtx_insn *
4104
duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
Steven Bosscher committed
4105
{
4106
  rtx_insn *insn, *next, *copy;
4107
  rtx_note *last;
Steven Bosscher committed
4108 4109 4110 4111 4112 4113 4114 4115 4116 4117 4118 4119 4120 4121 4122 4123 4124 4125 4126 4127 4128 4129 4130 4131 4132 4133

  /* Avoid updating of boundaries of previous basic block.  The
     note will get removed from insn stream in fixup.  */
  last = emit_note (NOTE_INSN_DELETED);

  /* Create copy at the end of INSN chain.  The chain will
     be reordered later.  */
  for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
    {
      switch (GET_CODE (insn))
	{
	case DEBUG_INSN:
	  /* Don't duplicate label debug insns.  */
	  if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
	    break;
	  /* FALLTHRU */
	case INSN:
	case CALL_INSN:
	case JUMP_INSN:
	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
	      && ANY_RETURN_P (JUMP_LABEL (insn)))
	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
          maybe_copy_prologue_epilogue_insn (insn, copy);
	  break;

4134 4135 4136 4137 4138 4139 4140 4141 4142 4143 4144 4145 4146 4147 4148
	case JUMP_TABLE_DATA:
	  /* Avoid copying of dispatch tables.  We never duplicate
	     tablejumps, so this can hit only in case the table got
	     moved far from original jump.
	     Avoid copying following barrier as well if any
	     (and debug insns in between).  */
	  for (next = NEXT_INSN (insn);
	       next != NEXT_INSN (to);
	       next = NEXT_INSN (next))
	    if (!DEBUG_INSN_P (next))
	      break;
	  if (next != NEXT_INSN (to) && BARRIER_P (next))
	    insn = next;
	  break;

Steven Bosscher committed
4149 4150 4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169
	case CODE_LABEL:
	  break;

	case BARRIER:
	  emit_barrier ();
	  break;

	case NOTE:
	  switch (NOTE_KIND (insn))
	    {
	      /* In case prologue is empty and function contain label
		 in first BB, we may want to copy the block.  */
	    case NOTE_INSN_PROLOGUE_END:

	    case NOTE_INSN_DELETED:
	    case NOTE_INSN_DELETED_LABEL:
	    case NOTE_INSN_DELETED_DEBUG_LABEL:
	      /* No problem to strip these.  */
	    case NOTE_INSN_FUNCTION_BEG:
	      /* There is always just single entry to function.  */
	    case NOTE_INSN_BASIC_BLOCK:
4170 4171
              /* We should only switch text sections once.  */
	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
Steven Bosscher committed
4172 4173 4174
	      break;

	    case NOTE_INSN_EPILOGUE_BEG:
4175
	    case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4176
	      emit_note_copy (as_a <rtx_note *> (insn));
Steven Bosscher committed
4177 4178 4179 4180 4181 4182 4183 4184 4185 4186 4187 4188 4189
	      break;

	    default:
	      /* All other notes should have already been eliminated.  */
	      gcc_unreachable ();
	    }
	  break;
	default:
	  gcc_unreachable ();
	}
    }
  insn = NEXT_INSN (last);
  delete_insn (last);
4190
  return insn;
Steven Bosscher committed
4191 4192 4193 4194 4195 4196 4197
}

/* Create a duplicate of the basic block BB.  */

static basic_block
cfg_layout_duplicate_bb (basic_block bb)
{
4198
  rtx_insn *insn;
Steven Bosscher committed
4199 4200 4201 4202 4203
  basic_block new_bb;

  insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
  new_bb = create_basic_block (insn,
			       insn ? get_last_insn () : NULL,
4204
			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
Steven Bosscher committed
4205 4206 4207 4208 4209 4210 4211 4212 4213

  BB_COPY_PARTITION (new_bb, bb);
  if (BB_HEADER (bb))
    {
      insn = BB_HEADER (bb);
      while (NEXT_INSN (insn))
	insn = NEXT_INSN (insn);
      insn = duplicate_insn_chain (BB_HEADER (bb), insn);
      if (insn)
4214
	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
Steven Bosscher committed
4215 4216 4217 4218 4219 4220 4221 4222 4223
    }

  if (BB_FOOTER (bb))
    {
      insn = BB_FOOTER (bb);
      while (NEXT_INSN (insn))
	insn = NEXT_INSN (insn);
      insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
      if (insn)
4224
	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
Steven Bosscher committed
4225 4226 4227 4228 4229 4230 4231 4232 4233 4234 4235 4236 4237 4238
    }

  return new_bb;
}


/* Main entry point to this module - initialize the datastructures for
   CFG layout changes.  It keeps LOOPS up-to-date if not null.

   FLAGS is a set of additional flags to pass to cleanup_cfg().  */

void
cfg_layout_initialize (unsigned int flags)
{
4239
  rtx_insn_list *x;
Steven Bosscher committed
4240 4241
  basic_block bb;

4242 4243 4244 4245 4246 4247 4248 4249
  /* Once bb partitioning is complete, cfg layout mode should not be
     re-entered.  Entering cfg layout mode may require fixups.  As an
     example, if edge forwarding performed when optimizing the cfg
     layout required moving a block from the hot to the cold
     section. This would create an illegal partitioning unless some
     manual fixup was performed.  */
  gcc_assert (!(crtl->bb_reorder_complete
		&& flag_reorder_blocks_and_partition));
4250

Steven Bosscher committed
4251 4252 4253 4254 4255 4256 4257
  initialize_original_copy_tables ();

  cfg_layout_rtl_register_cfg_hooks ();

  record_effective_endpoints ();

  /* Make sure that the targets of non local gotos are marked.  */
4258
  for (x = nonlocal_goto_handler_labels; x; x = x->next ())
Steven Bosscher committed
4259
    {
4260
      bb = BLOCK_FOR_INSN (x->insn ());
Steven Bosscher committed
4261 4262 4263 4264 4265 4266 4267 4268 4269 4270 4271 4272 4273 4274
      bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
    }

  cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
}

/* Splits superblocks.  */
void
break_superblocks (void)
{
  sbitmap superblocks;
  bool need = false;
  basic_block bb;

4275
  superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
4276
  bitmap_clear (superblocks);
Steven Bosscher committed
4277

4278
  FOR_EACH_BB_FN (bb, cfun)
Steven Bosscher committed
4279 4280 4281
    if (bb->flags & BB_SUPERBLOCK)
      {
	bb->flags &= ~BB_SUPERBLOCK;
4282
	bitmap_set_bit (superblocks, bb->index);
Steven Bosscher committed
4283 4284 4285 4286 4287 4288 4289 4290 4291 4292 4293 4294 4295
	need = true;
      }

  if (need)
    {
      rebuild_jump_labels (get_insns ());
      find_many_sub_basic_blocks (superblocks);
    }

  free (superblocks);
}

/* Finalize the changes: reorder insn list according to the sequence specified
4296
   by aux pointers, enter compensation code, rebuild scope forest.  */
Steven Bosscher committed
4297 4298 4299 4300

void
cfg_layout_finalize (void)
{
4301
  checking_verify_flow_info ();
4302
  free_dominance_info (CDI_DOMINATORS);
Steven Bosscher committed
4303 4304
  force_one_exit_fallthru ();
  rtl_register_cfg_hooks ();
4305
  if (reload_completed && !targetm.have_epilogue ())
Steven Bosscher committed
4306 4307 4308
    fixup_fallthru_exit_predecessor ();
  fixup_reorder_chain ();

4309 4310 4311
  rebuild_jump_labels (get_insns ());
  delete_dead_jumptables ();

4312 4313 4314
  if (flag_checking)
    verify_insn_chain ();
  checking_verify_flow_info ();
Steven Bosscher committed
4315 4316
}

4317

4318
/* Same as split_block but update cfg_layout structures.  */
4319 4320

static basic_block
4321
cfg_layout_split_block (basic_block bb, void *insnp)
4322
{
4323
  rtx insn = (rtx) insnp;
4324
  basic_block new_bb = rtl_split_block (bb, insn);
4325

4326 4327
  BB_FOOTER (new_bb) = BB_FOOTER (bb);
  BB_FOOTER (bb) = NULL;
4328

4329
  return new_bb;
4330 4331 4332
}

/* Redirect Edge to DEST.  */
4333
static edge
4334
cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4335 4336
{
  basic_block src = e->src;
4337
  edge ret;
4338

4339
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4340
    return NULL;
4341

4342
  if (e->dest == dest)
4343
    return e;
4344

4345
  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4346
      && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4347
    {
4348
      df_set_bb_dirty (src);
4349
      return ret;
4350
    }
4351

4352
  if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4353 4354
      && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
    {
4355 4356
      if (dump_file)
	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4357 4358
		 e->src->index, dest->index);

4359
      df_set_bb_dirty (e->src);
4360
      redirect_edge_succ (e, dest);
4361
      return e;
4362 4363
    }

4364 4365 4366 4367 4368 4369 4370
  /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
     in the case the basic block appears to be in sequence.  Avoid this
     transformation.  */

  if (e->flags & EDGE_FALLTHRU)
    {
      /* Redirect any branch edges unified with the fallthru one.  */
4371
      if (JUMP_P (BB_END (src))
4372 4373
	  && label_is_jump_target_p (BB_HEAD (e->dest),
				     BB_END (src)))
4374
	{
4375
	  edge redirected;
Mike Stump committed
4376

4377 4378
	  if (dump_file)
	    fprintf (dump_file, "Fallthru edge unified with branch "
4379 4380 4381
		     "%i->%i redirected to %i\n",
		     e->src->index, e->dest->index, dest->index);
	  e->flags &= ~EDGE_FALLTHRU;
4382 4383
	  redirected = redirect_branch_edge (e, dest);
	  gcc_assert (redirected);
4384 4385 4386
	  redirected->flags |= EDGE_FALLTHRU;
	  df_set_bb_dirty (redirected->src);
	  return redirected;
4387 4388
	}
      /* In case we are redirecting fallthru edge to the branch edge
Mike Stump committed
4389
	 of conditional jump, remove it.  */
4390
      if (EDGE_COUNT (src->succs) == 2)
4391
	{
4392 4393
	  /* Find the edge that is different from E.  */
	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4394

4395
	  if (s->dest == dest
4396 4397 4398
	      && any_condjump_p (BB_END (src))
	      && onlyjump_p (BB_END (src)))
	    delete_insn (BB_END (src));
4399
	}
4400
      if (dump_file)
4401
	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4402
		 e->src->index, e->dest->index, dest->index);
4403
      ret = redirect_edge_succ_nodup (e, dest);
4404 4405
    }
  else
4406
    ret = redirect_branch_edge (e, dest);
4407 4408

  /* We don't want simplejumps in the insn stream during cfglayout.  */
4409
  gcc_assert (!simplejump_p (BB_END (src)));
4410

4411
  df_set_bb_dirty (src);
4412 4413 4414 4415 4416
  return ret;
}

/* Simple wrapper as we always can redirect fallthru edges.  */
static basic_block
4417
cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4418
{
4419 4420 4421
  edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);

  gcc_assert (redirected);
4422 4423 4424
  return NULL;
}

4425 4426
/* Same as delete_basic_block but update cfg_layout structures.  */

4427
static void
4428
cfg_layout_delete_block (basic_block bb)
4429
{
4430
  rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4431
  rtx_insn **to;
4432

4433
  if (BB_HEADER (bb))
4434
    {
4435
      next = BB_HEAD (bb);
4436
      if (prev)
4437
	SET_NEXT_INSN (prev) = BB_HEADER (bb);
4438
      else
4439
	set_first_insn (BB_HEADER (bb));
4440
      SET_PREV_INSN (BB_HEADER (bb)) = prev;
4441
      insn = BB_HEADER (bb);
4442 4443
      while (NEXT_INSN (insn))
	insn = NEXT_INSN (insn);
4444 4445
      SET_NEXT_INSN (insn) = next;
      SET_PREV_INSN (next) = insn;
4446
    }
4447
  next = NEXT_INSN (BB_END (bb));
4448
  if (BB_FOOTER (bb))
4449
    {
4450
      insn = BB_FOOTER (bb);
4451 4452
      while (insn)
	{
4453
	  if (BARRIER_P (insn))
4454 4455
	    {
	      if (PREV_INSN (insn))
4456
		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4457
	      else
4458
		BB_FOOTER (bb) = NEXT_INSN (insn);
4459
	      if (NEXT_INSN (insn))
4460
		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4461
	    }
4462
	  if (LABEL_P (insn))
4463 4464 4465
	    break;
	  insn = NEXT_INSN (insn);
	}
4466
      if (BB_FOOTER (bb))
4467
	{
4468
	  insn = BB_END (bb);
4469 4470
	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4471 4472
	  while (NEXT_INSN (insn))
	    insn = NEXT_INSN (insn);
4473
	  SET_NEXT_INSN (insn) = next;
4474
	  if (next)
4475
	    SET_PREV_INSN (next) = insn;
4476 4477 4478
	  else
	    set_last_insn (insn);
	}
4479
    }
4480
  if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4481
    to = &BB_HEADER (bb->next_bb);
4482 4483
  else
    to = &cfg_layout_function_footer;
4484

4485 4486 4487 4488
  rtl_delete_block (bb);

  if (prev)
    prev = NEXT_INSN (prev);
4489
  else
4490 4491 4492
    prev = get_insns ();
  if (next)
    next = PREV_INSN (next);
4493
  else
4494 4495 4496 4497 4498 4499 4500 4501
    next = get_last_insn ();

  if (next && NEXT_INSN (next) != prev)
    {
      remaints = unlink_insn_chain (prev, next);
      insn = remaints;
      while (NEXT_INSN (insn))
	insn = NEXT_INSN (insn);
4502
      SET_NEXT_INSN (insn) = *to;
4503
      if (*to)
4504
	SET_PREV_INSN (*to) = insn;
4505 4506 4507 4508
      *to = remaints;
    }
}

4509
/* Return true when blocks A and B can be safely merged.  */
4510

4511
static bool
4512
cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4513
{
4514 4515
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
4516 4517
     and cold sections.

4518
     Basic block partitioning may result in some jumps that appear to
Mike Stump committed
4519 4520 4521
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
4522 4523
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */

4524
  if (BB_PARTITION (a) != BB_PARTITION (b))
4525
    return false;
4526

4527 4528 4529 4530
  /* Protect the loop latches.  */
  if (current_loops && b->loop_father->latch == b)
    return false;

4531 4532 4533 4534 4535 4536
  /* If we would end up moving B's instructions, make sure it doesn't fall
     through into the exit block, since we cannot recover from a fallthrough
     edge into the exit block occurring in the middle of a function.  */
  if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
    {
      edge e = find_fallthru_edge (b->succs);
4537
      if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4538 4539 4540
	return false;
    }

4541
  /* There must be exactly one edge in between the blocks.  */
4542 4543 4544
  return (single_succ_p (a)
	  && single_succ (a) == b
	  && single_pred_p (b) == 1
4545
	  && a != b
4546
	  /* Must be simple edge.  */
4547
	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4548 4549
	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4550 4551 4552
	  /* If the jump insn has side effects, we can't kill the edge.
	     When not optimizing, try_redirect_by_replacing_jump will
	     not allow us to redirect an edge by replacing a table jump.  */
4553
	  && (!JUMP_P (BB_END (a))
4554
	      || ((!optimize || reload_completed)
4555
		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4556 4557
}

4558 4559
/* Merge block A and B.  The blocks must be mergeable.  */

4560 4561 4562
static void
cfg_layout_merge_blocks (basic_block a, basic_block b)
{
4563
  bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4564
  rtx_insn *insn;
4565

4566
  gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4567

4568
  if (dump_file)
4569 4570
    fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
			 a->index);
4571

4572
  /* If there was a CODE_LABEL beginning B, delete it.  */
4573
  if (LABEL_P (BB_HEAD (b)))
4574 4575 4576
    {
      delete_insn (BB_HEAD (b));
    }
4577 4578 4579

  /* We should have fallthru edge in a, or we can do dummy redirection to get
     it cleaned up.  */
4580
  if (JUMP_P (BB_END (a)))
4581
    try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4582
  gcc_assert (!JUMP_P (BB_END (a)));
4583

4584
  /* When not optimizing and the edge is the only place in RTL which holds
4585
     some unique locus, emit a nop with that locus in between.  */
4586 4587
  if (!optimize)
    emit_nop_for_unique_locus_between (a, b);
4588

4589 4590
  /* Move things from b->footer after a->footer.  */
  if (BB_FOOTER (b))
4591
    {
4592
      if (!BB_FOOTER (a))
4593
	BB_FOOTER (a) = BB_FOOTER (b);
4594 4595
      else
	{
4596
	  rtx_insn *last = BB_FOOTER (a);
4597 4598 4599

	  while (NEXT_INSN (last))
	    last = NEXT_INSN (last);
4600 4601
	  SET_NEXT_INSN (last) = BB_FOOTER (b);
	  SET_PREV_INSN (BB_FOOTER (b)) = last;
4602
	}
4603
      BB_FOOTER (b) = NULL;
4604 4605 4606 4607 4608 4609 4610 4611
    }

  /* Move things from b->header before a->footer.
     Note that this may include dead tablejump data, but we don't clean
     those up until we go out of cfglayout mode.  */
   if (BB_HEADER (b))
     {
      if (! BB_FOOTER (a))
4612
	BB_FOOTER (a) = BB_HEADER (b);
4613 4614
      else
	{
4615
	  rtx_insn *last = BB_HEADER (b);
4616 4617 4618
 
	  while (NEXT_INSN (last))
	    last = NEXT_INSN (last);
4619 4620
	  SET_NEXT_INSN (last) = BB_FOOTER (a);
	  SET_PREV_INSN (BB_FOOTER (a)) = last;
4621
	  BB_FOOTER (a) = BB_HEADER (b);
4622
	}
4623
      BB_HEADER (b) = NULL;
4624 4625 4626
    }

  /* In the case basic blocks are not adjacent, move them around.  */
4627
  if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4628
    {
4629
      insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4630

4631
      emit_insn_after_noloc (insn, BB_END (a), a);
4632 4633 4634 4635
    }
  /* Otherwise just re-associate the instructions.  */
  else
    {
4636
      insn = BB_HEAD (b);
4637
      BB_END (a) = BB_END (b);
4638 4639
    }

4640 4641 4642 4643 4644 4645 4646 4647
  /* emit_insn_after_noloc doesn't call df_insn_change_bb.
     We need to explicitly call. */
  update_bb_for_insn_chain (insn, BB_END (b), a);

  /* Skip possible DELETED_LABEL insn.  */
  if (!NOTE_INSN_BASIC_BLOCK_P (insn))
    insn = NEXT_INSN (insn);
  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4648
  BB_HEAD (b) = BB_END (b) = NULL;
4649 4650
  delete_insn (insn);

4651 4652
  df_bb_delete (b->index);

4653
  /* If B was a forwarder block, propagate the locus on the edge.  */
4654
  if (forwarder_p
4655
      && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4656 4657
    EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;

4658
  if (dump_file)
4659
    fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4660 4661 4662
}

/* Split edge E.  */
4663

4664 4665 4666 4667
static basic_block
cfg_layout_split_edge (edge e)
{
  basic_block new_bb =
4668
    create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4669
			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4670 4671
			NULL_RTX, e->src);

4672
  if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4673 4674 4675
    BB_COPY_PARTITION (new_bb, e->src);
  else
    BB_COPY_PARTITION (new_bb, e->dest);
4676
  make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4677 4678 4679 4680 4681
  redirect_edge_and_branch_force (e, new_bb);

  return new_bb;
}

4682 4683 4684 4685 4686 4687 4688
/* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */

static void
rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
{
}

4689 4690 4691 4692 4693 4694
/* Return true if BB contains only labels or non-executable
   instructions.  */

static bool
rtl_block_empty_p (basic_block bb)
{
4695
  rtx_insn *insn;
4696

4697 4698
  if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
      || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4699 4700 4701 4702 4703 4704 4705 4706 4707 4708 4709 4710 4711 4712 4713
    return true;

  FOR_BB_INSNS (bb, insn)
    if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
      return false;

  return true;
}

/* Split a basic block if it ends with a conditional branch and if
   the other part of the block is not empty.  */

static basic_block
rtl_split_block_before_cond_jump (basic_block bb)
{
4714 4715 4716
  rtx_insn *insn;
  rtx_insn *split_point = NULL;
  rtx_insn *last = NULL;
4717 4718 4719 4720 4721 4722 4723 4724 4725 4726 4727 4728 4729 4730 4731 4732 4733 4734
  bool found_code = false;

  FOR_BB_INSNS (bb, insn)
    {
      if (any_condjump_p (insn))
	split_point = last;
      else if (NONDEBUG_INSN_P (insn))
	found_code = true;
      last = insn;
    }

  /* Did not find everything.  */ 
  if (found_code && split_point)
    return split_block (bb, split_point)->dest;
  else 
    return NULL;
}

4735 4736 4737 4738
/* Return 1 if BB ends with a call, possibly followed by some
   instructions that must stay with the call, 0 otherwise.  */

static bool
4739
rtl_block_ends_with_call_p (basic_block bb)
4740
{
4741
  rtx_insn *insn = BB_END (bb);
4742

4743
  while (!CALL_P (insn)
4744
	 && insn != BB_HEAD (bb)
4745
	 && (keep_with_call_p (insn)
4746 4747
	     || NOTE_P (insn)
	     || DEBUG_INSN_P (insn)))
4748
    insn = PREV_INSN (insn);
4749
  return (CALL_P (insn));
4750 4751 4752 4753 4754
}

/* Return 1 if BB ends with a conditional branch, 0 otherwise.  */

static bool
4755
rtl_block_ends_with_condjump_p (const_basic_block bb)
4756 4757 4758 4759 4760 4761 4762 4763
{
  return any_condjump_p (BB_END (bb));
}

/* Return true if we need to add fake edge to exit.
   Helper function for rtl_flow_call_edges_add.  */

static bool
4764
need_fake_edge_p (const rtx_insn *insn)
4765 4766 4767 4768
{
  if (!INSN_P (insn))
    return false;

4769
  if ((CALL_P (insn)
4770 4771
       && !SIBLING_CALL_P (insn)
       && !find_reg_note (insn, REG_NORETURN, NULL)
Kenneth Zadeck committed
4772
       && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4773 4774 4775 4776 4777 4778 4779 4780 4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791 4792 4793 4794 4795
    return true;

  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
	   && MEM_VOLATILE_P (PATTERN (insn)))
	  || (GET_CODE (PATTERN (insn)) == PARALLEL
	      && asm_noperands (insn) != -1
	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
}

/* Add fake edges to the function exit for any non constant and non noreturn
   calls, volatile inline assembly in the bitmap of blocks specified by
   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
   that were split.

   The goal is to expose cases in which entering a basic block does not imply
   that all subsequent instructions must be executed.  */

static int
rtl_flow_call_edges_add (sbitmap blocks)
{
  int i;
  int blocks_split = 0;
4796
  int last_bb = last_basic_block_for_fn (cfun);
4797 4798
  bool check_last_block = false;

4799
  if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4800 4801 4802 4803 4804
    return 0;

  if (! blocks)
    check_last_block = true;
  else
4805 4806
    check_last_block = bitmap_bit_p (blocks,
				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4807 4808 4809 4810 4811 4812 4813 4814 4815 4816 4817 4818 4819 4820 4821

  /* In the last basic block, before epilogue generation, there will be
     a fallthru edge to EXIT.  Special care is required if the last insn
     of the last basic block is a call because make_edge folds duplicate
     edges, which would result in the fallthru edge also being marked
     fake, which would result in the fallthru edge being removed by
     remove_fake_edges, which would result in an invalid CFG.

     Moreover, we can't elide the outgoing fake edge, since the block
     profiler needs to take this into account in order to solve the minimal
     spanning tree in the case that the call doesn't return.

     Handle this by adding a dummy instruction in a new last basic block.  */
  if (check_last_block)
    {
4822
      basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4823
      rtx_insn *insn = BB_END (bb);
4824 4825 4826 4827 4828 4829 4830 4831 4832 4833

      /* Back up past insns that must be kept in the same block as a call.  */
      while (insn != BB_HEAD (bb)
	     && keep_with_call_p (insn))
	insn = PREV_INSN (insn);

      if (need_fake_edge_p (insn))
	{
	  edge e;

4834
	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4835 4836
	  if (e)
	    {
4837
	      insert_insn_on_edge (gen_use (const0_rtx), e);
4838 4839
	      commit_edge_insertions ();
	    }
4840 4841 4842 4843 4844 4845 4846
	}
    }

  /* Now add fake edges to the function exit for any non constant
     calls since there is no way that we can determine if they will
     return or not...  */

4847
  for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4848
    {
4849
      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4850 4851
      rtx_insn *insn;
      rtx_insn *prev_insn;
4852 4853 4854 4855

      if (!bb)
	continue;

4856
      if (blocks && !bitmap_bit_p (blocks, i))
4857 4858 4859 4860 4861 4862 4863 4864
	continue;

      for (insn = BB_END (bb); ; insn = prev_insn)
	{
	  prev_insn = PREV_INSN (insn);
	  if (need_fake_edge_p (insn))
	    {
	      edge e;
4865
	      rtx_insn *split_at_insn = insn;
4866 4867

	      /* Don't split the block between a call and an insn that should
Mike Stump committed
4868
		 remain in the same block as the call.  */
4869
	      if (CALL_P (insn))
4870 4871 4872 4873 4874
		while (split_at_insn != BB_END (bb)
		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
		  split_at_insn = NEXT_INSN (split_at_insn);

	      /* The handling above of the final block before the epilogue
Mike Stump committed
4875
		 should be enough to verify that there is no edge to the exit
4876 4877 4878
		 block in CFG already.  Calling make_edge in such case would
		 cause us to mark that edge as fake and remove it later.  */

4879
	      if (flag_checking && split_at_insn == BB_END (bb))
4880
		{
4881
		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4882
		  gcc_assert (e == NULL);
4883
		}
4884 4885 4886 4887 4888 4889 4890 4891 4892 4893

	      /* Note that the following may create a new basic block
		 and renumber the existing basic blocks.  */
	      if (split_at_insn != BB_END (bb))
		{
		  e = split_block (bb, split_at_insn);
		  if (e)
		    blocks_split++;
		}

4894
	      make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4895 4896 4897 4898 4899 4900 4901 4902 4903 4904 4905 4906 4907
	    }

	  if (insn == BB_HEAD (bb))
	    break;
	}
    }

  if (blocks_split)
    verify_flow_info ();

  return blocks_split;
}

4908
/* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
4909
   the conditional branch target, SECOND_HEAD should be the fall-thru
4910 4911 4912 4913 4914
   there is no need to handle this here the loop versioning code handles
   this.  the reason for SECON_HEAD is that it is needed for condition
   in trees, and this should be of the same type since it is a hook.  */
static void
rtl_lv_add_condition_to_bb (basic_block first_head ,
Mike Stump committed
4915 4916
			    basic_block second_head ATTRIBUTE_UNUSED,
			    basic_block cond_bb, void *comp_rtx)
4917
{
4918
  rtx_code_label *label;
4919
  rtx_insn *seq, *jump;
4920 4921 4922
  rtx op0 = XEXP ((rtx)comp_rtx, 0);
  rtx op1 = XEXP ((rtx)comp_rtx, 1);
  enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4923
  machine_mode mode;
4924 4925 4926 4927 4928 4929 4930 4931 4932 4933


  label = block_label (first_head);
  mode = GET_MODE (op0);
  if (mode == VOIDmode)
    mode = GET_MODE (op1);

  start_sequence ();
  op0 = force_operand (op0, NULL_RTX);
  op1 = force_operand (op1, NULL_RTX);
4934
  do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label, -1);
4935 4936 4937 4938 4939 4940
  jump = get_last_insn ();
  JUMP_LABEL (jump) = label;
  LABEL_NUSES (label)++;
  seq = get_insns ();
  end_sequence ();

4941
  /* Add the new cond, in the new head.  */
4942
  emit_insn_after (seq, BB_END (cond_bb));
4943 4944 4945 4946 4947 4948 4949 4950 4951 4952 4953 4954 4955 4956 4957 4958 4959 4960 4961 4962 4963 4964 4965 4966
}


/* Given a block B with unconditional branch at its end, get the
   store the return the branch edge and the fall-thru edge in
   BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
static void
rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
			   edge *fallthru_edge)
{
  edge e = EDGE_SUCC (b, 0);

  if (e->flags & EDGE_FALLTHRU)
    {
      *fallthru_edge = e;
      *branch_edge = EDGE_SUCC (b, 1);
    }
  else
    {
      *branch_edge = e;
      *fallthru_edge = EDGE_SUCC (b, 1);
    }
}

4967 4968 4969
void
init_rtl_bb_info (basic_block bb)
{
4970 4971
  gcc_assert (!bb->il.x.rtl);
  bb->il.x.head_ = NULL;
4972
  bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
4973 4974
}

4975 4976 4977 4978
/* Returns true if it is possible to remove edge E by redirecting
   it to the destination of the other edge from E->src.  */

static bool
4979
rtl_can_remove_branch_p (const_edge e)
4980
{
4981 4982
  const_basic_block src = e->src;
  const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4983 4984
  const rtx_insn *insn = BB_END (src);
  rtx set;
4985 4986

  /* The conditions are taken from try_redirect_by_replacing_jump.  */
4987
  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
4988 4989 4990 4991 4992
    return false;

  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
    return false;

4993
  if (BB_PARTITION (src) != BB_PARTITION (target))
4994 4995 4996 4997 4998 4999 5000 5001 5002 5003 5004 5005 5006
    return false;

  if (!onlyjump_p (insn)
      || tablejump_p (insn, NULL, NULL))
    return false;

  set = single_set (insn);
  if (!set || side_effects_p (set))
    return false;

  return true;
}

5007 5008 5009 5010 5011 5012 5013 5014
static basic_block
rtl_duplicate_bb (basic_block bb)
{
  bb = cfg_layout_duplicate_bb (bb);
  bb->aux = NULL;
  return bb;
}

5015 5016 5017 5018 5019 5020 5021
/* Do book-keeping of basic block BB for the profile consistency checker.
   If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
   then do post-pass accounting.  Store the counting in RECORD.  */
static void
rtl_account_profile_record (basic_block bb, int after_pass,
			    struct profile_record *record)
{
5022
  rtx_insn *insn;
5023 5024 5025 5026 5027
  FOR_BB_INSNS (bb, insn)
    if (INSN_P (insn))
      {
	record->size[after_pass]
	  += insn_rtx_cost (PATTERN (insn), false);
5028
	if (profile_status_for_fn (cfun) == PROFILE_READ)
5029 5030
	  record->time[after_pass]
	    += insn_rtx_cost (PATTERN (insn), true) * bb->count;
5031
	else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5032 5033 5034 5035 5036
	  record->time[after_pass]
	    += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
      }
}

5037 5038
/* Implementation of CFG manipulation for linearized RTL.  */
struct cfg_hooks rtl_cfg_hooks = {
5039
  "rtl",
5040
  rtl_verify_flow_info,
5041
  rtl_dump_bb,
5042
  rtl_dump_bb_for_graph,
5043
  rtl_create_basic_block,
5044 5045
  rtl_redirect_edge_and_branch,
  rtl_redirect_edge_and_branch_force,
5046
  rtl_can_remove_branch_p,
5047 5048
  rtl_delete_block,
  rtl_split_block,
5049
  rtl_move_block_after,
5050 5051
  rtl_can_merge_blocks,  /* can_merge_blocks_p */
  rtl_merge_blocks,
5052 5053
  rtl_predict_edge,
  rtl_predicted_by_p,
5054 5055
  cfg_layout_can_duplicate_bb_p,
  rtl_duplicate_bb,
5056 5057
  rtl_split_edge,
  rtl_make_forwarder_block,
5058
  rtl_tidy_fallthru_edge,
5059
  rtl_force_nonfallthru,
5060 5061
  rtl_block_ends_with_call_p,
  rtl_block_ends_with_condjump_p,
5062 5063
  rtl_flow_call_edges_add,
  NULL, /* execute_on_growing_pred */
5064 5065 5066 5067 5068
  NULL, /* execute_on_shrinking_pred */
  NULL, /* duplicate loop for trees */
  NULL, /* lv_add_condition_to_bb */
  NULL, /* lv_adjust_loop_header_phi*/
  NULL, /* extract_cond_bb_edges */
5069 5070 5071
  NULL, /* flush_pending_stmts */
  rtl_block_empty_p, /* block_empty_p */
  rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5072
  rtl_account_profile_record,
5073 5074 5075 5076 5077 5078
};

/* Implementation of CFG manipulation for cfg layout RTL, where
   basic block connected via fallthru edges does not have to be adjacent.
   This representation will hopefully become the default one in future
   version of the compiler.  */
5079

5080
struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5081
  "cfglayout mode",
5082
  rtl_verify_flow_info_1,
5083
  rtl_dump_bb,
5084
  rtl_dump_bb_for_graph,
5085
  cfg_layout_create_basic_block,
5086 5087
  cfg_layout_redirect_edge_and_branch,
  cfg_layout_redirect_edge_and_branch_force,
5088
  rtl_can_remove_branch_p,
5089 5090
  cfg_layout_delete_block,
  cfg_layout_split_block,
5091
  rtl_move_block_after,
5092 5093
  cfg_layout_can_merge_blocks_p,
  cfg_layout_merge_blocks,
5094 5095 5096 5097
  rtl_predict_edge,
  rtl_predicted_by_p,
  cfg_layout_can_duplicate_bb_p,
  cfg_layout_duplicate_bb,
5098 5099
  cfg_layout_split_edge,
  rtl_make_forwarder_block,
5100 5101
  NULL, /* tidy_fallthru_edge */
  rtl_force_nonfallthru,
5102 5103
  rtl_block_ends_with_call_p,
  rtl_block_ends_with_condjump_p,
5104 5105
  rtl_flow_call_edges_add,
  NULL, /* execute_on_growing_pred */
5106 5107 5108 5109 5110
  NULL, /* execute_on_shrinking_pred */
  duplicate_loop_to_header_edge, /* duplicate loop for trees */
  rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
  NULL, /* lv_adjust_loop_header_phi*/
  rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5111 5112 5113
  NULL, /* flush_pending_stmts */  
  rtl_block_empty_p, /* block_empty_p */
  rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5114
  rtl_account_profile_record,
5115
};
Steven Bosscher committed
5116 5117

#include "gt-cfgrtl.h"