cfgbuild.c 18.2 KB
Newer Older
1
/* Control flow graph building code for GNU compiler.
2
   Copyright (C) 1987-2014 Free Software Foundation, Inc.
3 4 5 6 7

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9 10 11 12 13 14 15 16
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
19 20 21 22


#include "config.h"
#include "system.h"
23 24
#include "coretypes.h"
#include "tm.h"
25 26 27 28 29 30
#include "tree.h"
#include "rtl.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "regs.h"
#include "flags.h"
31 32 33 34 35
#include "hashtab.h"
#include "hash-set.h"
#include "vec.h"
#include "machmode.h"
#include "input.h"
36 37
#include "function.h"
#include "except.h"
38
#include "expr.h"
39
#include "diagnostic-core.h"
40
#include "timevar.h"
41
#include "sbitmap.h"
42

43
static void make_edges (basic_block, basic_block, int);
44
static void make_label_edge (sbitmap, basic_block, rtx, int);
45 46
static void find_bb_boundaries (basic_block);
static void compute_outgoing_frequencies (basic_block);
47

48 49 50
/* Return true if insn is something that should be contained inside basic
   block.  */

51
bool
52
inside_basic_block_p (const rtx_insn *insn)
53 54 55 56 57
{
  switch (GET_CODE (insn))
    {
    case CODE_LABEL:
      /* Avoid creating of basic block for jumptables.  */
58
      return (NEXT_INSN (insn) == 0
59
	      || ! JUMP_TABLE_DATA_P (NEXT_INSN (insn)));
60 61 62 63

    case JUMP_INSN:
    case CALL_INSN:
    case INSN:
64
    case DEBUG_INSN:
65 66
      return true;

67
    case JUMP_TABLE_DATA:
68 69 70 71 72
    case BARRIER:
    case NOTE:
      return false;

    default:
73
      gcc_unreachable ();
74 75 76
    }
}

77 78
/* Return true if INSN may cause control flow transfer, so it should be last in
   the basic block.  */
79

80
bool
81
control_flow_insn_p (const rtx_insn *insn)
82 83 84
{
  switch (GET_CODE (insn))
    {
85 86
    case NOTE:
    case CODE_LABEL:
87
    case DEBUG_INSN:
88 89 90
      return false;

    case JUMP_INSN:
91
      return true;
92 93

    case CALL_INSN:
94 95 96 97 98 99
      /* Noreturn and sibling call instructions terminate the basic blocks
	 (but only if they happen unconditionally).  */
      if ((SIBLING_CALL_P (insn)
	   || find_reg_note (insn, REG_NORETURN, 0))
	  && GET_CODE (PATTERN (insn)) != COND_EXEC)
	return true;
100

101
      /* Call insn may return to the nonlocal goto handler.  */
102 103 104
      if (can_nonlocal_goto (insn))
	return true;
      break;
105 106

    case INSN:
107 108 109 110
      /* Treat trap instructions like noreturn calls (same provision).  */
      if (GET_CODE (PATTERN (insn)) == TRAP_IF
	  && XEXP (PATTERN (insn), 0) == const1_rtx)
	return true;
111
      if (!cfun->can_throw_non_call_exceptions)
112 113
	return false;
      break;
114

115
    case JUMP_TABLE_DATA:
116
    case BARRIER:
117
      /* It is nonsense to reach this when looking for the
Mike Stump committed
118 119
	 end of basic block, but before dead code is eliminated
	 this may happen.  */
120 121 122
      return false;

    default:
123
      gcc_unreachable ();
124
    }
125 126

  return can_throw_internal (insn);
127
}
128 129 130 131 132 133 134 135


/* Create an edge between two basic blocks.  FLAGS are auxiliary information
   about the edge that is accumulated between calls.  */

/* Create an edge from a basic block to a label.  */

static void
136
make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags)
137
{
138
  gcc_assert (LABEL_P (label));
139 140 141 142 143 144 145 146 147

  /* If the label was never emitted, this insn is junk, but avoid a
     crash trying to refer to BLOCK_FOR_INSN (label).  This can happen
     as a result of a syntax error and a diagnostic has already been
     printed.  */

  if (INSN_UID (label) == 0)
    return;

148
  cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
149 150 151 152
}

/* Create the edges generated by INSN in REGION.  */

153
void
154
rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
155
{
156
  eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn);
157

158 159 160
  if (lp)
    {
      rtx label = lp->landing_pad;
161

162 163 164 165 166 167
      /* During initial rtl generation, use the post_landing_pad.  */
      if (label == NULL)
	{
	  gcc_assert (lp->post_landing_pad);
	  label = label_rtx (lp->post_landing_pad);
	}
168

169 170 171 172
      make_label_edge (edge_cache, src, label,
		       EDGE_ABNORMAL | EDGE_EH
		       | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0));
    }
173
}
174

175 176 177 178 179 180 181 182 183 184 185 186 187 188
/* States of basic block as seen by find_many_sub_basic_blocks.  */
enum state {
  /* Basic blocks created via split_block belong to this state.
     make_edges will examine these basic blocks to see if we need to
     create edges going out of them.  */
  BLOCK_NEW = 0,

  /* Basic blocks that do not need examining belong to this state.
     These blocks will be left intact.  In particular, make_edges will
     not create edges going out of these basic blocks.  */
  BLOCK_ORIGINAL,

  /* Basic blocks that may need splitting (due to a label appearing in
     the middle, etc) belong to this state.  After splitting them,
189
     make_edges will create edges going out of them as needed.  */
190 191
  BLOCK_TO_SPLIT
};
192 193 194 195 196 197 198 199

#define STATE(BB) (enum state) ((size_t) (BB)->aux)
#define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))

/* Used internally by purge_dead_tablejump_edges, ORed into state.  */
#define BLOCK_USED_BY_TABLEJUMP		32
#define FULL_STATE(BB) ((size_t) (BB)->aux)

200 201 202
/* Identify the edges going out of basic blocks between MIN and MAX,
   inclusive, that have their states set to BLOCK_NEW or
   BLOCK_TO_SPLIT.
203

204 205
   UPDATE_P should be nonzero if we are updating CFG and zero if we
   are building CFG from scratch.  */
206 207

static void
208
make_edges (basic_block min, basic_block max, int update_p)
209
{
210
  basic_block bb;
211
  sbitmap edge_cache = NULL;
212 213 214 215

  /* Heavy use of computed goto in machine-generated code can lead to
     nearly fully-connected CFGs.  In that case we spend a significant
     amount of time searching the edge lists for duplicates.  */
216
  if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
217
    edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun));
218

219 220
  /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
     is always the entry.  */
221 222
  if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
    make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU);
223

224
  FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
225
    {
David Malcolm committed
226
      rtx_insn *insn;
227
      enum rtx_code code;
228
      edge e;
229
      edge_iterator ei;
230

231 232 233
      if (STATE (bb) == BLOCK_ORIGINAL)
	continue;

234 235 236
      /* If we have an edge cache, cache edges going out of BB.  */
      if (edge_cache)
	{
237
	  bitmap_clear (edge_cache);
238 239 240
	  if (update_p)
	    {
	      FOR_EACH_EDGE (e, ei, bb->succs)
241
		if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
242
		  bitmap_set_bit (edge_cache, e->dest->index);
243 244 245
	    }
	}

246
      if (LABEL_P (BB_HEAD (bb))
247
	  && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
248
	cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
249 250 251 252

      /* Examine the last instruction of the block, and discover the
	 ways we can leave the block.  */

253
      insn = BB_END (bb);
254 255 256 257 258 259
      code = GET_CODE (insn);

      /* A branch.  */
      if (code == JUMP_INSN)
	{
	  rtx tmp;
260
	  rtx_jump_table_data *table;
261 262 263

	  /* Recognize a non-local goto as a branch outside the
	     current function.  */
264
	  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
265 266
	    ;

267
	  /* Recognize a tablejump and do the right thing.  */
268
	  else if (tablejump_p (insn, NULL, &table))
269
	    {
270
	      rtvec vec = table->get_labels ();
271 272 273 274 275 276 277 278 279 280 281 282 283 284
	      int j;

	      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
		make_label_edge (edge_cache, bb,
				 XEXP (RTVEC_ELT (vec, j), 0), 0);

	      /* Some targets (eg, ARM) emit a conditional jump that also
		 contains the out-of-range target.  Scan for these and
		 add an edge if necessary.  */
	      if ((tmp = single_set (insn)) != NULL
		  && SET_DEST (tmp) == pc_rtx
		  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
		  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
		make_label_edge (edge_cache, bb,
285
				 LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)), 0);
286 287 288
	    }

	  /* If this is a computed jump, then mark it as reaching
289
	     everything on the forced_labels list.  */
290 291
	  else if (computed_jump_p (insn))
	    {
292 293
	      for (rtx_insn_list *x = forced_labels; x; x = x->next ())
		make_label_edge (edge_cache, bb, x->insn (), EDGE_ABNORMAL);
294 295 296 297
	    }

	  /* Returns create an exit out.  */
	  else if (returnjump_p (insn))
298
	    cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
299

300 301 302 303 304 305 306 307 308
	  /* Recognize asm goto and do the right thing.  */
	  else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
	    {
	      int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
	      for (i = 0; i < n; ++i)
		make_label_edge (edge_cache, bb,
				 XEXP (ASM_OPERANDS_LABEL (tmp, i), 0), 0);
	    }

309 310 311
	  /* Otherwise, we have a plain conditional or unconditional jump.  */
	  else
	    {
312
	      gcc_assert (JUMP_LABEL (insn));
313 314 315 316
	      make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
	    }
	}

317 318 319 320
      /* If this is a sibling call insn, then this is in effect a combined call
	 and return, and so we need an edge to the exit block.  No need to
	 worry about EH edges, since we wouldn't have created the sibling call
	 in the first place.  */
321
      if (code == CALL_INSN && SIBLING_CALL_P (insn))
322
	cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
323
			  EDGE_SIBCALL | EDGE_ABNORMAL);
324 325 326 327 328

      /* If this is a CALL_INSN, then mark it as reaching the active EH
	 handler for this CALL_INSN.  If we're handling non-call
	 exceptions then any insn can reach any of the active handlers.
	 Also mark the CALL_INSN as reaching any nonlocal goto handler.  */
329
      else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions)
330 331
	{
	  /* Add any appropriate EH edges.  */
332
	  rtl_make_eh_edge (edge_cache, bb, insn);
333

334
	  if (code == CALL_INSN)
335
	    {
336
	      if (can_nonlocal_goto (insn))
337 338 339 340 341 342 343 344
		{
		  /* ??? This could be made smarter: in some cases it's
		     possible to tell that certain calls will not do a
		     nonlocal goto.  For example, if the nested functions
		     that do the nonlocal gotos do not have their addresses
		     taken, then only calls to those functions or to other
		     nested functions that use them could possibly do
		     nonlocal gotos.  */
345
		  for (rtx_insn_list *x = nonlocal_goto_handler_labels;
346 347
		       x;
		       x = x->next ())
348
		    make_label_edge (edge_cache, bb, x->insn (),
349 350 351 352 353 354 355 356 357 358 359
				     EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
		}

	      if (flag_tm)
		{
		  rtx note;
		  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
		    if (REG_NOTE_KIND (note) == REG_TM)
		      make_label_edge (edge_cache, bb, XEXP (note, 0),
				       EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
		}
360 361 362 363
	    }
	}

      /* Find out if we can drop through to the next block.  */
364
      insn = NEXT_INSN (insn);
365
      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
366 367 368
      if (e && e->flags & EDGE_FALLTHRU)
	insn = NULL;

369
      while (insn
370
	     && NOTE_P (insn)
371
	     && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
372 373
	insn = NEXT_INSN (insn);

374
      if (!insn)
375 376 377
	cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
			  EDGE_FALLTHRU);
      else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
378
	{
379
	  if (insn == BB_HEAD (bb->next_bb))
380
	    cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
381 382 383 384
	}
    }

  if (edge_cache)
385
    sbitmap_free (edge_cache);
386 387
}

388 389 390 391 392 393 394 395 396 397 398 399 400 401
static void
mark_tablejump_edge (rtx label)
{
  basic_block bb;

  gcc_assert (LABEL_P (label));
  /* See comment in make_label_edge.  */
  if (INSN_UID (label) == 0)
    return;
  bb = BLOCK_FOR_INSN (label);
  SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
}

static void
402
purge_dead_tablejump_edges (basic_block bb, rtx_jump_table_data *table)
403
{
David Malcolm committed
404 405
  rtx_insn *insn = BB_END (bb);
  rtx tmp;
406 407 408 409 410
  rtvec vec;
  int j;
  edge_iterator ei;
  edge e;

411
  vec = table->get_labels ();
412 413 414 415 416 417 418 419 420 421 422

  for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
    mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));

  /* Some targets (eg, ARM) emit a conditional jump that also
     contains the out-of-range target.  Scan for these and
     add an edge if necessary.  */
  if ((tmp = single_set (insn)) != NULL
       && SET_DEST (tmp) == pc_rtx
       && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
       && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
423
    mark_tablejump_edge (LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)));
424 425 426 427

  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
    {
      if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
Mike Stump committed
428 429
	SET_STATE (e->dest, FULL_STATE (e->dest)
			    & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
430
      else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
Mike Stump committed
431 432 433 434
	{
	  remove_edge (e);
	  continue;
	}
435 436 437 438
      ei_next (&ei);
    }
}

439 440 441 442
/* Scan basic block BB for possible BB boundaries inside the block
   and create new basic blocks in the progress.  */

static void
443
find_bb_boundaries (basic_block bb)
444
{
445
  basic_block orig_bb = bb;
David Malcolm committed
446 447
  rtx_insn *insn = BB_HEAD (bb);
  rtx_insn *end = BB_END (bb), *x;
448
  rtx_jump_table_data *table;
David Malcolm committed
449
  rtx_insn *flow_transfer_insn = NULL;
450
  edge fallthru = NULL;
451

452
  if (insn == BB_END (bb))
453 454
    return;

455
  if (LABEL_P (insn))
456 457 458 459 460 461
    insn = NEXT_INSN (insn);

  /* Scan insn chain and try to find new basic block boundaries.  */
  while (1)
    {
      enum rtx_code code = GET_CODE (insn);
462

463 464 465 466
      /* In case we've previously seen an insn that effects a control
	 flow transfer, split the block.  */
      if ((flow_transfer_insn || code == CODE_LABEL)
	  && inside_basic_block_p (insn))
467
	{
468 469
	  fallthru = split_block (bb, PREV_INSN (insn));
	  if (flow_transfer_insn)
470
	    {
471
	      BB_END (bb) = flow_transfer_insn;
472 473 474 475 476 477 478 479

	      /* Clean up the bb field for the insns between the blocks.  */
	      for (x = NEXT_INSN (flow_transfer_insn);
		   x != BB_HEAD (fallthru->dest);
		   x = NEXT_INSN (x))
		if (!BARRIER_P (x))
		  set_block_for_insn (x, NULL);
	    }
480

481 482
	  bb = fallthru->dest;
	  remove_edge (fallthru);
David Malcolm committed
483
	  flow_transfer_insn = NULL;
484
	  if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn))
485
	    make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
486
	}
487 488 489 490 491 492 493 494 495
      else if (code == BARRIER)
	{
	  /* __builtin_unreachable () may cause a barrier to be emitted in
	     the middle of a BB.  We need to split it in the same manner as
	     if the barrier were preceded by a control_flow_insn_p insn.  */
	  if (!flow_transfer_insn)
	    flow_transfer_insn = prev_nonnote_insn_bb (insn);
	}

496 497
      if (control_flow_insn_p (insn))
	flow_transfer_insn = insn;
498 499 500 501 502 503 504 505
      if (insn == end)
	break;
      insn = NEXT_INSN (insn);
    }

  /* In case expander replaced normal insn by sequence terminating by
     return and barrier, or possibly other sequence not behaving like
     ordinary jump, we need to take care and move basic block boundary.  */
506
  if (flow_transfer_insn)
507
    {
508
      BB_END (bb) = flow_transfer_insn;
509 510 511 512 513 514 515 516 517 518

      /* Clean up the bb field for the insns that do not belong to BB.  */
      x = flow_transfer_insn;
      while (x != end)
	{
	  x = NEXT_INSN (x);
	  if (!BARRIER_P (x))
	    set_block_for_insn (x, NULL);
	}
    }
519 520 521 522 523

  /* We've possibly replaced the conditional jump by conditional jump
     followed by cleanup at fallthru edge, so the outgoing edges may
     be dead.  */
  purge_dead_edges (bb);
524 525 526 527 528

  /* purge_dead_edges doesn't handle tablejump's, but if we have split the
     basic block, we might need to kill some edges.  */
  if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
    purge_dead_tablejump_edges (bb, table);
529 530 531 532 533 534
}

/*  Assume that frequency of basic block B is known.  Compute frequencies
    and probabilities of outgoing edges.  */

static void
535
compute_outgoing_frequencies (basic_block b)
536 537
{
  edge e, f;
538
  edge_iterator ei;
539

540
  if (EDGE_COUNT (b->succs) == 2)
541
    {
542
      rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
543 544
      int probability;

545 546
      if (note)
	{
547
	  probability = XINT (note, 0);
548 549
	  e = BRANCH_EDGE (b);
	  e->probability = probability;
550
	  e->count = apply_probability (b->count, probability);
551 552 553 554 555
	  f = FALLTHRU_EDGE (b);
	  f->probability = REG_BR_PROB_BASE - probability;
	  f->count = b->count - e->count;
	  return;
	}
Easwaran Raman committed
556 557 558 559
      else
        {
          guess_outgoing_edge_probabilities (b);
        }
560
    }
Easwaran Raman committed
561
  else if (single_succ_p (b))
562
    {
563
      e = single_succ_edge (b);
564 565
      e->probability = REG_BR_PROB_BASE;
      e->count = b->count;
566
      return;
567
    }
Easwaran Raman committed
568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584
  else
    {
      /* We rely on BBs with more than two successors to have sane probabilities
         and do not guess them here. For BBs terminated by switch statements
         expanded to jump-table jump, we have done the right thing during
         expansion. For EH edges, we still guess the probabilities here.  */
      bool complex_edge = false;
      FOR_EACH_EDGE (e, ei, b->succs)
        if (e->flags & EDGE_COMPLEX)
          {
            complex_edge = true;
            break;
          }
      if (complex_edge)
        guess_outgoing_edge_probabilities (b);
    }

585
  if (b->count)
586
    FOR_EACH_EDGE (e, ei, b->succs)
587
      e->count = apply_probability (b->count, e->probability);
588 589
}

590 591 592
/* Assume that some pass has inserted labels or control flow
   instructions within a basic block.  Split basic blocks as needed
   and create edges.  */
593 594

void
595
find_many_sub_basic_blocks (sbitmap blocks)
596
{
597
  basic_block bb, min, max;
598

599
  FOR_EACH_BB_FN (bb, cfun)
600
    SET_STATE (bb,
601
	       bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
602

603
  FOR_EACH_BB_FN (bb, cfun)
604 605
    if (STATE (bb) == BLOCK_TO_SPLIT)
      find_bb_boundaries (bb);
606

607
  FOR_EACH_BB_FN (bb, cfun)
608
    if (STATE (bb) != BLOCK_ORIGINAL)
609
      break;
610

611
  min = max = bb;
612
  for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb)
613 614
    if (STATE (bb) != BLOCK_ORIGINAL)
      max = bb;
615 616 617

  /* Now re-scan and wire in all edges.  This expect simple (conditional)
     jumps at the end of each new basic blocks.  */
618
  make_edges (min, max, 1);
619 620 621

  /* Update branch probabilities.  Expect only (un)conditional jumps
     to be created with only the forward edges.  */
622
  if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
623 624 625
    FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
      {
	edge e;
626
	edge_iterator ei;
627 628 629 630 631 632 633

	if (STATE (bb) == BLOCK_ORIGINAL)
	  continue;
	if (STATE (bb) == BLOCK_NEW)
	  {
	    bb->count = 0;
	    bb->frequency = 0;
634
	    FOR_EACH_EDGE (e, ei, bb->preds)
635 636 637 638 639
	      {
		bb->count += e->count;
		bb->frequency += EDGE_FREQUENCY (e);
	      }
	  }
640

641 642
	compute_outgoing_frequencies (bb);
      }
643

644
  FOR_EACH_BB_FN (bb, cfun)
645
    SET_STATE (bb, 0);
646
}