ddg.c 34.1 KB
Newer Older
1
/* DDG - Data Dependence Graph implementation.
Jakub Jelinek committed
2
   Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 4 5 6 7 8
   Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10 11 12 13 14 15 16 17
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
18 19
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
20 21 22 23 24 25


#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
26
#include "diagnostic-core.h"
27 28 29 30
#include "rtl.h"
#include "tm_p.h"
#include "hard-reg-set.h"
#include "regs.h"
31 32 33 34 35
#include "hashtab.h"
#include "hash-set.h"
#include "vec.h"
#include "machmode.h"
#include "input.h"
36 37 38 39 40 41
#include "function.h"
#include "flags.h"
#include "insn-config.h"
#include "insn-attr.h"
#include "except.h"
#include "recog.h"
42 43
#include "predict.h"
#include "basic-block.h"
44 45 46 47
#include "sched-int.h"
#include "target.h"
#include "cfgloop.h"
#include "sbitmap.h"
48
#include "symtab.h"
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63
#include "statistics.h"
#include "double-int.h"
#include "real.h"
#include "fixed-value.h"
#include "alias.h"
#include "wide-int.h"
#include "inchash.h"
#include "tree.h"
#include "expmed.h"
#include "dojump.h"
#include "explow.h"
#include "calls.h"
#include "emit-rtl.h"
#include "varasm.h"
#include "stmt.h"
64 65
#include "expr.h"
#include "bitmap.h"
66
#include "df.h"
67
#include "ddg.h"
68
#include "rtl-iter.h"
69

70 71
#ifdef INSN_SCHEDULING

72 73 74 75 76 77 78
/* A flag indicating that a ddg edge belongs to an SCC or not.  */
enum edge_flag {NOT_IN_SCC = 0, IN_SCC};

/* Forward declarations.  */
static void add_backarc_to_ddg (ddg_ptr, ddg_edge_ptr);
static void add_backarc_to_scc (ddg_scc_ptr, ddg_edge_ptr);
static void add_scc_to_ddg (ddg_all_sccs_ptr, ddg_scc_ptr);
79 80
static void create_ddg_dep_from_intra_loop_link (ddg_ptr, ddg_node_ptr,
                                                 ddg_node_ptr, dep_t);
81 82 83 84 85 86 87 88 89 90 91
static void create_ddg_dep_no_link (ddg_ptr, ddg_node_ptr, ddg_node_ptr,
 				    dep_type, dep_data_type, int);
static ddg_edge_ptr create_ddg_edge (ddg_node_ptr, ddg_node_ptr, dep_type,
				     dep_data_type, int, int);
static void add_edge_to_ddg (ddg_ptr g, ddg_edge_ptr);

/* Auxiliary variable for mem_read_insn_p/mem_write_insn_p.  */
static bool mem_ref_p;

/* Auxiliary function for mem_read_insn_p.  */
static void
92
mark_mem_use (rtx *x, void *)
93
{
94 95
  subrtx_iterator::array_type array;
  FOR_EACH_SUBRTX (iter, array, *x, NONCONST)
96
    if (MEM_P (*iter))
97 98 99 100
      {
	mem_ref_p = true;
	break;
      }
101 102
}

103
/* Returns nonzero if INSN reads from memory.  */
104
static bool
David Malcolm committed
105
mem_read_insn_p (rtx_insn *insn)
106 107
{
  mem_ref_p = false;
108
  note_uses (&PATTERN (insn), mark_mem_use, NULL);
109 110 111 112
  return mem_ref_p;
}

static void
113
mark_mem_store (rtx loc, const_rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
114
{
115
  if (MEM_P (loc))
116 117 118
    mem_ref_p = true;
}

119
/* Returns nonzero if INSN writes to memory.  */
120
static bool
David Malcolm committed
121
mem_write_insn_p (rtx_insn *insn)
122 123 124 125 126 127
{
  mem_ref_p = false;
  note_stores (PATTERN (insn), mark_mem_store, NULL);
  return mem_ref_p;
}

128
/* Returns nonzero if X has access to memory.  */
129 130 131 132 133 134 135 136 137 138
static bool
rtx_mem_access_p (rtx x)
{
  int i, j;
  const char *fmt;
  enum rtx_code code;

  if (x == 0)
    return false;

139
  if (MEM_P (x))
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160
    return true;

  code = GET_CODE (x);
  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
	{
	  if (rtx_mem_access_p (XEXP (x, i)))
            return true;
        }
      else if (fmt[i] == 'E')
	for (j = 0; j < XVECLEN (x, i); j++)
	  {
	    if (rtx_mem_access_p (XVECEXP (x, i, j)))
              return true;
          }
    }
  return false;
}

161
/* Returns nonzero if INSN reads to or writes from memory.  */
162
static bool
David Malcolm committed
163
mem_access_insn_p (rtx_insn *insn)
164 165 166 167
{
  return rtx_mem_access_p (PATTERN (insn));
}

168 169 170 171 172 173 174 175 176
/* Return true if DEF_INSN contains address being auto-inc or auto-dec
   which is used in USE_INSN.  Otherwise return false.  The result is
   being used to decide whether to remove the edge between def_insn and
   use_insn when -fmodulo-sched-allow-regmoves is set.  This function
   doesn't need to consider the specific address register; no reg_moves
   will be allowed for any life range defined by def_insn and used
   by use_insn, if use_insn uses an address register auto-inc'ed by
   def_insn.  */
bool
David Malcolm committed
177
autoinc_var_is_used_p (rtx_insn *def_insn, rtx_insn *use_insn)
178 179 180 181 182 183 184 185 186 187 188
{
  rtx note;

  for (note = REG_NOTES (def_insn); note; note = XEXP (note, 1))
    if (REG_NOTE_KIND (note) == REG_INC
	&& reg_referenced_p (XEXP (note, 0), PATTERN (use_insn)))
      return true;

  return false;
}

189 190 191
/* Return true if one of the definitions in INSN has MODE_CC.  Otherwise
   return false.  */
static bool
David Malcolm committed
192
def_has_ccmode_p (rtx_insn *insn)
193
{
194
  df_ref def;
195

196
  FOR_EACH_INSN_DEF (def, insn)
197
    {
198
      machine_mode mode = GET_MODE (DF_REF_REG (def));
199 200 201 202 203 204 205 206

      if (GET_MODE_CLASS (mode) == MODE_CC)
	return true;
    }

  return false;
}

207 208 209
/* Computes the dependence parameters (latency, distance etc.), creates
   a ddg_edge and adds it to the given DDG.  */
static void
210 211
create_ddg_dep_from_intra_loop_link (ddg_ptr g, ddg_node_ptr src_node,
                                     ddg_node_ptr dest_node, dep_t link)
212 213 214 215 216 217 218
{
  ddg_edge_ptr e;
  int latency, distance = 0;
  dep_type t = TRUE_DEP;
  dep_data_type dt = (mem_access_insn_p (src_node->insn)
		      && mem_access_insn_p (dest_node->insn) ? MEM_DEP
							     : REG_DEP);
219
  gcc_assert (src_node->cuid < dest_node->cuid);
220
  gcc_assert (link);
221 222

  /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!!  */
223
  if (DEP_TYPE (link) == REG_DEP_ANTI)
224
    t = ANTI_DEP;
225
  else if (DEP_TYPE (link) == REG_DEP_OUTPUT)
226 227
    t = OUTPUT_DEP;

228
  gcc_assert (!DEBUG_INSN_P (dest_node->insn) || t == ANTI_DEP);
229
  gcc_assert (!DEBUG_INSN_P (src_node->insn) || t == ANTI_DEP);
230

231 232 233 234
  /* We currently choose not to create certain anti-deps edges and
     compensate for that by generating reg-moves based on the life-range
     analysis.  The anti-deps that will be deleted are the ones which
     have true-deps edges in the opposite direction (in other words
235 236 237 238 239
     the kernel has only one def of the relevant register).
     If the address that is being auto-inc or auto-dec in DEST_NODE
     is used in SRC_NODE then do not remove the edge to make sure
     reg-moves will not be created for this address.  
     TODO: support the removal of all anti-deps edges, i.e. including those
240
     whose register has multiple defs in the loop.  */
241 242
  if (flag_modulo_sched_allow_regmoves 
      && (t == ANTI_DEP && dt == REG_DEP)
243
      && !def_has_ccmode_p (dest_node->insn)
244
      && !autoinc_var_is_used_p (dest_node->insn, src_node->insn))
245
    {
246 247 248
      rtx set;

      set = single_set (dest_node->insn);
249 250 251
      /* TODO: Handle registers that REG_P is not true for them, i.e.
         subregs and special registers.  */
      if (set && REG_P (SET_DEST (set)))
252 253
        {
          int regno = REGNO (SET_DEST (set));
254
          df_ref first_def;
255
          struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
256

257 258 259
          first_def = df_bb_regno_first_def_find (g->bb, regno);
          gcc_assert (first_def);

260
          if (bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)))
261 262
            return;
        }
263
    }
264 265 266 267

   latency = dep_cost (link);
   e = create_ddg_edge (src_node, dest_node, t, dt, latency, distance);
   add_edge_to_ddg (g, e);
268 269 270 271 272 273 274 275 276
}

/* The same as the above function, but it doesn't require a link parameter.  */
static void
create_ddg_dep_no_link (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to,
			dep_type d_t, dep_data_type d_dt, int distance)
{
  ddg_edge_ptr e;
  int l;
277 278
  enum reg_note dep_kind;
  struct _dep _dep, *dep = &_dep;
279

280
  gcc_assert (!DEBUG_INSN_P (to->insn) || d_t == ANTI_DEP);
281
  gcc_assert (!DEBUG_INSN_P (from->insn) || d_t == ANTI_DEP);
282

283
  if (d_t == ANTI_DEP)
284
    dep_kind = REG_DEP_ANTI;
285
  else if (d_t == OUTPUT_DEP)
286 287 288 289 290 291 292 293 294
    dep_kind = REG_DEP_OUTPUT;
  else
    {
      gcc_assert (d_t == TRUE_DEP);

      dep_kind = REG_DEP_TRUE;
    }

  init_dep (dep, from->insn, to->insn, dep_kind);
295

296
  l = dep_cost (dep);
297 298 299 300 301 302 303 304

  e = create_ddg_edge (from, to, d_t, d_dt, l, distance);
  if (distance > 0)
    add_backarc_to_ddg (g, e);
  else
    add_edge_to_ddg (g, e);
}

305 306 307 308 309 310 311

/* Given a downwards exposed register def LAST_DEF (which is the last
   definition of that register in the bb), add inter-loop true dependences
   to all its uses in the next iteration, an output dependence to the
   first def of the same register (possibly itself) in the next iteration
   and anti-dependences from its uses in the current iteration to the
   first definition in the next iteration.  */
312
static void
313
add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
314
{
315
  int regno = DF_REF_REGNO (last_def);
316
  struct df_link *r_use;
317
  int has_use_in_bb_p = false;
David Malcolm committed
318
  rtx_insn *def_insn = DF_REF_INSN (last_def);
319 320
  ddg_node_ptr last_def_node = get_node_of_insn (g, def_insn);
  ddg_node_ptr use_node;
321
#ifdef ENABLE_CHECKING
322
  struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
323
#endif
324
  df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
325

326 327 328
  gcc_assert (last_def_node);
  gcc_assert (first_def);

329
#ifdef ENABLE_CHECKING
330
  if (DF_REF_ID (last_def) != DF_REF_ID (first_def))
331 332 333
    gcc_assert (!bitmap_bit_p (&bb_info->gen,
			       DF_REF_ID (first_def)));
#endif
334

335 336
  /* Create inter-loop true dependences and anti dependences.  */
  for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next)
337
    {
David Malcolm committed
338
      rtx_insn *use_insn = DF_REF_INSN (r_use->ref);
339

340 341
      if (BLOCK_FOR_INSN (use_insn) != g->bb)
	continue;
342

343 344 345 346 347 348 349 350 351
      /* ??? Do not handle uses with DF_REF_IN_NOTE notes.  */
      use_node = get_node_of_insn (g, use_insn);
      gcc_assert (use_node);
      has_use_in_bb_p = true;
      if (use_node->cuid <= last_def_node->cuid)
	{
	  /* Add true deps from last_def to it's uses in the next
	     iteration.  Any such upwards exposed use appears before
	     the last_def def.  */
352 353
	  create_ddg_dep_no_link (g, last_def_node, use_node,
				  DEBUG_INSN_P (use_insn) ? ANTI_DEP : TRUE_DEP,
354 355
				  REG_DEP, 1);
	}
356
      else if (!DEBUG_INSN_P (use_insn))
357 358 359 360 361 362 363 364
	{
	  /* Add anti deps from last_def's uses in the current iteration
	     to the first def in the next iteration.  We do not add ANTI
	     dep when there is an intra-loop TRUE dep in the opposite
	     direction, but use regmoves to fix such disregarded ANTI
	     deps when broken.	If the first_def reaches the USE then
	     there is such a dep.  */
	  ddg_node_ptr first_def_node = get_node_of_insn (g,
365
							  DF_REF_INSN (first_def));
366 367 368

	  gcc_assert (first_def_node);

369
         /* Always create the edge if the use node is a branch in
370 371 372 373
            order to prevent the creation of reg-moves.  
            If the address that is being auto-inc or auto-dec in LAST_DEF
            is used in USE_INSN then do not remove the edge to make sure
            reg-moves will not be created for that address.  */
374
          if (DF_REF_ID (last_def) != DF_REF_ID (first_def)
375
              || !flag_modulo_sched_allow_regmoves
376
	      || JUMP_P (use_node->insn)
377 378
              || autoinc_var_is_used_p (DF_REF_INSN (last_def), use_insn)
	      || def_has_ccmode_p (DF_REF_INSN (last_def)))
379 380 381
            create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP,
                                    REG_DEP, 1);

382
	}
383
    }
384 385 386 387 388 389 390 391
  /* Create an inter-loop output dependence between LAST_DEF (which is the
     last def in its block, being downwards exposed) and the first def in
     its block.  Avoid creating a self output dependence.  Avoid creating
     an output dependence if there is a dependence path between the two
     defs starting with a true dependence to a use which can be in the
     next iteration; followed by an anti dependence of that use to the
     first def (i.e. if there is a use between the two defs.)  */
  if (!has_use_in_bb_p)
392 393 394
    {
      ddg_node_ptr dest_node;

395
      if (DF_REF_ID (last_def) == DF_REF_ID (first_def))
396 397
	return;

398
      dest_node = get_node_of_insn (g, DF_REF_INSN (first_def));
399 400 401
      gcc_assert (dest_node);
      create_ddg_dep_no_link (g, last_def_node, dest_node,
			      OUTPUT_DEP, REG_DEP, 1);
402 403 404 405
    }
}
/* Build inter-loop dependencies, by looking at DF analysis backwards.  */
static void
406
build_inter_loop_deps (ddg_ptr g)
407
{
408
  unsigned rd_num;
409
  struct df_rd_bb_info *rd_bb_info;
410
  bitmap_iterator bi;
411

412
  rd_bb_info = DF_RD_BB_INFO (g->bb);
413

414
  /* Find inter-loop register output, true and anti deps.  */
415
  EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info->gen, 0, rd_num, bi)
416
  {
417
    df_ref rd = DF_DEFS_GET (rd_num);
418

419 420
    add_cross_iteration_register_deps (g, rd);
  }
421 422
}

423

424 425 426 427
/* Return true if two specified instructions have mem expr with conflict
   alias sets.  */
static bool
insns_may_alias_p (rtx_insn *insn1, rtx_insn *insn2)
428
{
429 430 431
  subrtx_iterator::array_type array1;
  subrtx_iterator::array_type array2;
  FOR_EACH_SUBRTX (iter1, array1, PATTERN (insn1), NONCONST)
432
    {
433 434 435 436 437 438 439 440
      const_rtx x1 = *iter1;
      if (MEM_P (x1))
	FOR_EACH_SUBRTX (iter2, array2, PATTERN (insn2), NONCONST)
	  {
	    const_rtx x2 = *iter2;
	    if (MEM_P (x2) && may_alias_p (x2, x1))
	      return true;
	  }
441
    }
442
  return false;
443 444
}

445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471
/* Given two nodes, analyze their RTL insns and add intra-loop mem deps
   to ddg G.  */
static void
add_intra_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
{

  if ((from->cuid == to->cuid)
      || !insns_may_alias_p (from->insn, to->insn))
    /* Do not create edge if memory references have disjoint alias sets
       or 'to' and 'from' are the same instruction.  */
    return;

  if (mem_write_insn_p (from->insn))
    {
      if (mem_read_insn_p (to->insn))
	create_ddg_dep_no_link (g, from, to,
				DEBUG_INSN_P (to->insn)
				? ANTI_DEP : TRUE_DEP, MEM_DEP, 0);
      else
	create_ddg_dep_no_link (g, from, to,
				DEBUG_INSN_P (to->insn)
				? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 0);
    }
  else if (!mem_read_insn_p (to->insn))
    create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 0);
}

472 473 474 475 476
/* Given two nodes, analyze their RTL insns and add inter-loop mem deps
   to ddg G.  */
static void
add_inter_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
{
477
  if (!insns_may_alias_p (from->insn, to->insn))
478 479
    /* Do not create edge if memory references have disjoint alias sets.  */
    return;
H.J. Lu committed
480

481 482 483
  if (mem_write_insn_p (from->insn))
    {
      if (mem_read_insn_p (to->insn))
484 485 486
  	create_ddg_dep_no_link (g, from, to,
				DEBUG_INSN_P (to->insn)
				? ANTI_DEP : TRUE_DEP, MEM_DEP, 1);
487
      else if (from->cuid != to->cuid)
488 489 490
  	create_ddg_dep_no_link (g, from, to,
				DEBUG_INSN_P (to->insn)
				? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 1);
491 492 493 494 495 496 497
    }
  else
    {
      if (mem_read_insn_p (to->insn))
	return;
      else if (from->cuid != to->cuid)
	{
498 499 500 501 502
	  create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 1);
	  if (DEBUG_INSN_P (from->insn) || DEBUG_INSN_P (to->insn))
	    create_ddg_dep_no_link (g, to, from, ANTI_DEP, MEM_DEP, 1);
	  else
	    create_ddg_dep_no_link (g, to, from, TRUE_DEP, MEM_DEP, 1);
503 504 505 506 507 508
	}
    }

}

/* Perform intra-block Data Dependency analysis and connect the nodes in
509
   the DDG.  We assume the loop has a single basic block.  */
510 511 512 513 514
static void
build_intra_loop_deps (ddg_ptr g)
{
  int i;
  /* Hold the dependency analysis state during dependency calculations.  */
515
  struct deps_desc tmp_deps;
516
  rtx_insn *head, *tail;
517 518 519

  /* Build the dependence information, using the sched_analyze function.  */
  init_deps_global ();
520
  init_deps (&tmp_deps, false);
521 522

  /* Do the intra-block data dependence analysis for the given block.  */
523
  get_ebb_head_tail (g->bb, g->bb, &head, &tail);
524 525
  sched_analyze (&tmp_deps, head, tail);

526
  /* Build intra-loop data dependencies using the scheduler dependency
527 528 529 530
     analysis.  */
  for (i = 0; i < g->num_nodes; i++)
    {
      ddg_node_ptr dest_node = &g->nodes[i];
531 532
      sd_iterator_def sd_it;
      dep_t dep;
533 534 535 536

      if (! INSN_P (dest_node->insn))
	continue;

537
      FOR_EACH_DEP (dest_node->insn, SD_LIST_BACK, sd_it, dep)
538
	{
David Malcolm committed
539
	  rtx_insn *src_insn = DEP_PRO (dep);
540 541 542 543 544 545 546 547
	  ddg_node_ptr src_node;

	  /* Don't add dependencies on debug insns to non-debug insns
	     to avoid codegen differences between -g and -g0.  */
	  if (DEBUG_INSN_P (src_insn) && !DEBUG_INSN_P (dest_node->insn))
	    continue;

	  src_node = get_node_of_insn (g, src_insn);
548 549 550 551

	  if (!src_node)
	    continue;

552
	  create_ddg_dep_from_intra_loop_link (g, src_node, dest_node, dep);
553 554 555 556 557 558 559 560 561 562 563
	}

      /* If this insn modifies memory, add an edge to all insns that access
	 memory.  */
      if (mem_access_insn_p (dest_node->insn))
	{
	  int j;

	  for (j = 0; j <= i; j++)
	    {
	      ddg_node_ptr j_node = &g->nodes[j];
564 565
	      if (DEBUG_INSN_P (j_node->insn))
		continue;
566
	      if (mem_access_insn_p (j_node->insn))
567 568 569
		{
		  /* Don't bother calculating inter-loop dep if an intra-loop dep
		     already exists.  */
570
	      	  if (! bitmap_bit_p (dest_node->successors, j))
571
		    add_inter_loop_mem_dep (g, dest_node, j_node);
572 573 574 575 576 577 578 579
		  /* If -fmodulo-sched-allow-regmoves
		     is set certain anti-dep edges are not created.
		     It might be that these anti-dep edges are on the
		     path from one memory instruction to another such that
		     removing these edges could cause a violation of the
		     memory dependencies.  Thus we add intra edges between
		     every two memory instructions in this case.  */
		  if (flag_modulo_sched_allow_regmoves
580
		      && !bitmap_bit_p (dest_node->predecessors, j))
581 582
		    add_intra_loop_mem_dep (g, j_node, dest_node);
		}
583 584 585 586 587 588 589
            }
        }
    }

  /* Free the INSN_LISTs.  */
  finish_deps_global ();
  free_deps (&tmp_deps);
590 591 592

  /* Free dependencies.  */
  sched_free_deps (head, tail, false);
593 594 595 596 597 598 599
}


/* Given a basic block, create its DDG and return a pointer to a variable
   of ddg type that represents it.
   Initialize the ddg structure fields to the appropriate values.  */
ddg_ptr
600
create_ddg (basic_block bb, int closing_branch_deps)
601 602
{
  ddg_ptr g;
David Malcolm committed
603
  rtx_insn *insn, *first_note;
604 605 606 607 608 609 610 611 612 613 614 615 616 617 618
  int i;
  int num_nodes = 0;

  g = (ddg_ptr) xcalloc (1, sizeof (struct ddg));

  g->bb = bb;
  g->closing_branch_deps = closing_branch_deps;

  /* Count the number of insns in the BB.  */
  for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
       insn = NEXT_INSN (insn))
    {
      if (! INSN_P (insn) || GET_CODE (PATTERN (insn)) == USE)
	continue;

619 620 621 622 623 624 625 626 627
      if (DEBUG_INSN_P (insn))
	g->num_debug++;
      else
	{
	  if (mem_read_insn_p (insn))
	    g->num_loads++;
	  if (mem_write_insn_p (insn))
	    g->num_stores++;
	}
628 629 630 631
      num_nodes++;
    }

  /* There is nothing to do for this BB.  */
632
  if ((num_nodes - g->num_debug) <= 1)
633 634 635 636 637 638 639 640 641 642
    {
      free (g);
      return NULL;
    }

  /* Allocate the nodes array, and initialize the nodes.  */
  g->num_nodes = num_nodes;
  g->nodes = (ddg_node_ptr) xcalloc (num_nodes, sizeof (struct ddg_node));
  g->closing_branch = NULL;
  i = 0;
David Malcolm committed
643
  first_note = NULL;
644 645 646 647 648
  for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
       insn = NEXT_INSN (insn))
    {
      if (! INSN_P (insn))
	{
649
	  if (! first_note && NOTE_P (insn)
650
	      && NOTE_KIND (insn) !=  NOTE_INSN_BASIC_BLOCK)
651 652 653
	    first_note = insn;
	  continue;
	}
654
      if (JUMP_P (insn))
655
	{
656 657
	  gcc_assert (!g->closing_branch);
	  g->closing_branch = &g->nodes[i];
658 659 660 661 662 663 664 665 666 667
	}
      else if (GET_CODE (PATTERN (insn)) == USE)
	{
	  if (! first_note)
	    first_note = insn;
	  continue;
	}

      g->nodes[i].cuid = i;
      g->nodes[i].successors = sbitmap_alloc (num_nodes);
668
      bitmap_clear (g->nodes[i].successors);
669
      g->nodes[i].predecessors = sbitmap_alloc (num_nodes);
670
      bitmap_clear (g->nodes[i].predecessors);
671 672
      g->nodes[i].first_note = (first_note ? first_note : insn);
      g->nodes[i++].insn = insn;
David Malcolm committed
673
      first_note = NULL;
674
    }
H.J. Lu committed
675

676 677
  /* We must have found a branch in DDG.  */
  gcc_assert (g->closing_branch);
H.J. Lu committed
678

679

680
  /* Build the data dependency graph.  */
681
  build_intra_loop_deps (g);
682
  build_inter_loop_deps (g);
683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715
  return g;
}

/* Free all the memory allocated for the DDG.  */
void
free_ddg (ddg_ptr g)
{
  int i;

  if (!g)
    return;

  for (i = 0; i < g->num_nodes; i++)
    {
      ddg_edge_ptr e = g->nodes[i].out;

      while (e)
	{
	  ddg_edge_ptr next = e->next_out;

	  free (e);
	  e = next;
	}
      sbitmap_free (g->nodes[i].successors);
      sbitmap_free (g->nodes[i].predecessors);
    }
  if (g->num_backarcs > 0)
    free (g->backarcs);
  free (g->nodes);
  free (g);
}

void
716
print_ddg_edge (FILE *file, ddg_edge_ptr e)
717 718 719
{
  char dep_c;

720 721
  switch (e->type)
    {
722 723 724 725 726 727 728 729
    case OUTPUT_DEP :
      dep_c = 'O';
      break;
    case ANTI_DEP :
      dep_c = 'A';
      break;
    default:
      dep_c = 'T';
730
    }
731

732
  fprintf (file, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e->src->insn),
733 734 735 736 737
	   dep_c, e->latency, e->distance, INSN_UID (e->dest->insn));
}

/* Print the DDG nodes with there in/out edges to the dump file.  */
void
738
print_ddg (FILE *file, ddg_ptr g)
739 740 741 742 743 744 745
{
  int i;

  for (i = 0; i < g->num_nodes; i++)
    {
      ddg_edge_ptr e;

746
      fprintf (file, "Node num: %d\n", g->nodes[i].cuid);
747 748
      print_rtl_single (file, g->nodes[i].insn);
      fprintf (file, "OUT ARCS: ");
749
      for (e = g->nodes[i].out; e; e = e->next_out)
750
	print_ddg_edge (file, e);
751

752
      fprintf (file, "\nIN ARCS: ");
753
      for (e = g->nodes[i].in; e; e = e->next_in)
754
	print_ddg_edge (file, e);
755

756
      fprintf (file, "\n");
757 758 759 760
    }
}

/* Print the given DDG in VCG format.  */
761
DEBUG_FUNCTION void
762
vcg_print_ddg (FILE *file, ddg_ptr g)
763 764 765
{
  int src_cuid;

766
  fprintf (file, "graph: {\n");
767 768 769 770 771
  for (src_cuid = 0; src_cuid < g->num_nodes; src_cuid++)
    {
      ddg_edge_ptr e;
      int src_uid = INSN_UID (g->nodes[src_cuid].insn);

772 773 774
      fprintf (file, "node: {title: \"%d_%d\" info1: \"", src_cuid, src_uid);
      print_rtl_single (file, g->nodes[src_cuid].insn);
      fprintf (file, "\"}\n");
775 776 777 778 779 780 781
      for (e = g->nodes[src_cuid].out; e; e = e->next_out)
	{
	  int dst_uid = INSN_UID (e->dest->insn);
	  int dst_cuid = e->dest->cuid;

	  /* Give the backarcs a different color.  */
	  if (e->distance > 0)
782
	    fprintf (file, "backedge: {color: red ");
783
	  else
784
	    fprintf (file, "edge: { ");
785

786 787 788
	  fprintf (file, "sourcename: \"%d_%d\" ", src_cuid, src_uid);
	  fprintf (file, "targetname: \"%d_%d\" ", dst_cuid, dst_uid);
	  fprintf (file, "label: \"%d_%d\"}\n", e->latency, e->distance);
789 790
	}
    }
791
  fprintf (file, "}\n");
792 793
}

794 795 796 797 798 799 800 801 802 803 804 805 806 807 808
/* Dump the sccs in SCCS.  */
void
print_sccs (FILE *file, ddg_all_sccs_ptr sccs, ddg_ptr g)
{
  unsigned int u = 0;
  sbitmap_iterator sbi;
  int i;

  if (!file)
    return;

  fprintf (file, "\n;; Number of SCC nodes - %d\n", sccs->num_sccs);
  for (i = 0; i < sccs->num_sccs; i++)
    {
      fprintf (file, "SCC number: %d\n", i);
809
      EXECUTE_IF_SET_IN_BITMAP (sccs->sccs[i]->nodes, 0, u, sbi)
810 811 812 813 814 815 816 817
      {
        fprintf (file, "insn num %d\n", u);
        print_rtl_single (file, g->nodes[u].insn);
      }
    }
  fprintf (file, "\n");
}

818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842
/* Create an edge and initialize it with given values.  */
static ddg_edge_ptr
create_ddg_edge (ddg_node_ptr src, ddg_node_ptr dest,
		 dep_type t, dep_data_type dt, int l, int d)
{
  ddg_edge_ptr e = (ddg_edge_ptr) xmalloc (sizeof (struct ddg_edge));

  e->src = src;
  e->dest = dest;
  e->type = t;
  e->data_type = dt;
  e->latency = l;
  e->distance = d;
  e->next_in = e->next_out = NULL;
  e->aux.info = 0;
  return e;
}

/* Add the given edge to the in/out linked lists of the DDG nodes.  */
static void
add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED, ddg_edge_ptr e)
{
  ddg_node_ptr src = e->src;
  ddg_node_ptr dest = e->dest;

843 844
  /* Should have allocated the sbitmaps.  */
  gcc_assert (src->successors && dest->predecessors);
845

846 847
  bitmap_set_bit (src->successors, dest->cuid);
  bitmap_set_bit (dest->predecessors, src->cuid);
848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890
  e->next_in = dest->in;
  dest->in = e;
  e->next_out = src->out;
  src->out = e;
}



/* Algorithm for computing the recurrence_length of an scc.  We assume at
   for now that cycles in the data dependence graph contain a single backarc.
   This simplifies the algorithm, and can be generalized later.  */
static void
set_recurrence_length (ddg_scc_ptr scc, ddg_ptr g)
{
  int j;
  int result = -1;

  for (j = 0; j < scc->num_backarcs; j++)
    {
      ddg_edge_ptr backarc = scc->backarcs[j];
      int length;
      int distance = backarc->distance;
      ddg_node_ptr src = backarc->dest;
      ddg_node_ptr dest = backarc->src;

      length = longest_simple_path (g, src->cuid, dest->cuid, scc->nodes);
      if (length < 0 )
	{
	  /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
	  continue;
	}
      length += backarc->latency;
      result = MAX (result, (length / distance));
    }
  scc->recurrence_length = result;
}

/* Create a new SCC given the set of its nodes.  Compute its recurrence_length
   and mark edges that belong to this scc as IN_SCC.  */
static ddg_scc_ptr
create_scc (ddg_ptr g, sbitmap nodes)
{
  ddg_scc_ptr scc;
891
  unsigned int u = 0;
892
  sbitmap_iterator sbi;
893 894 895 896 897

  scc = (ddg_scc_ptr) xmalloc (sizeof (struct ddg_scc));
  scc->backarcs = NULL;
  scc->num_backarcs = 0;
  scc->nodes = sbitmap_alloc (g->num_nodes);
898
  bitmap_copy (scc->nodes, nodes);
899 900

  /* Mark the backarcs that belong to this SCC.  */
901
  EXECUTE_IF_SET_IN_BITMAP (nodes, 0, u, sbi)
902 903 904 905 906
    {
      ddg_edge_ptr e;
      ddg_node_ptr n = &g->nodes[u];

      for (e = n->out; e; e = e->next_out)
907
	if (bitmap_bit_p (nodes, e->dest->cuid))
908 909 910 911 912
	  {
	    e->aux.count = IN_SCC;
	    if (e->distance > 0)
	      add_backarc_to_scc (scc, e);
	  }
913
    }
914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965

  set_recurrence_length (scc, g);
  return scc;
}

/* Cleans the memory allocation of a given SCC.  */
static void
free_scc (ddg_scc_ptr scc)
{
  if (!scc)
    return;

  sbitmap_free (scc->nodes);
  if (scc->num_backarcs > 0)
    free (scc->backarcs);
  free (scc);
}


/* Add a given edge known to be a backarc to the given DDG.  */
static void
add_backarc_to_ddg (ddg_ptr g, ddg_edge_ptr e)
{
  int size = (g->num_backarcs + 1) * sizeof (ddg_edge_ptr);

  add_edge_to_ddg (g, e);
  g->backarcs = (ddg_edge_ptr *) xrealloc (g->backarcs, size);
  g->backarcs[g->num_backarcs++] = e;
}

/* Add backarc to an SCC.  */
static void
add_backarc_to_scc (ddg_scc_ptr scc, ddg_edge_ptr e)
{
  int size = (scc->num_backarcs + 1) * sizeof (ddg_edge_ptr);

  scc->backarcs = (ddg_edge_ptr *) xrealloc (scc->backarcs, size);
  scc->backarcs[scc->num_backarcs++] = e;
}

/* Add the given SCC to the DDG.  */
static void
add_scc_to_ddg (ddg_all_sccs_ptr g, ddg_scc_ptr scc)
{
  int size = (g->num_sccs + 1) * sizeof (ddg_scc_ptr);

  g->sccs = (ddg_scc_ptr *) xrealloc (g->sccs, size);
  g->sccs[g->num_sccs++] = scc;
}

/* Given the instruction INSN return the node that represents it.  */
ddg_node_ptr
David Malcolm committed
966
get_node_of_insn (ddg_ptr g, rtx_insn *insn)
967 968 969 970 971 972 973 974 975 976 977 978 979 980 981
{
  int i;

  for (i = 0; i < g->num_nodes; i++)
    if (insn == g->nodes[i].insn)
      return &g->nodes[i];
  return NULL;
}

/* Given a set OPS of nodes in the DDG, find the set of their successors
   which are not in OPS, and set their bits in SUCC.  Bits corresponding to
   OPS are cleared from SUCC.  Leaves the other bits in SUCC unchanged.  */
void
find_successors (sbitmap succ, ddg_ptr g, sbitmap ops)
{
982
  unsigned int i = 0;
983
  sbitmap_iterator sbi;
984

985
  EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi)
986 987
    {
      const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]);
988
      bitmap_ior (succ, succ, node_succ);
989
    };
990 991

  /* We want those that are not in ops.  */
992
  bitmap_and_compl (succ, succ, ops);
993 994 995 996 997 998 999 1000
}

/* Given a set OPS of nodes in the DDG, find the set of their predecessors
   which are not in OPS, and set their bits in PREDS.  Bits corresponding to
   OPS are cleared from PREDS.  Leaves the other bits in PREDS unchanged.  */
void
find_predecessors (sbitmap preds, ddg_ptr g, sbitmap ops)
{
1001
  unsigned int i = 0;
1002
  sbitmap_iterator sbi;
1003

1004
  EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi)
1005 1006
    {
      const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]);
1007
      bitmap_ior (preds, preds, node_preds);
1008
    };
1009 1010

  /* We want those that are not in ops.  */
1011
  bitmap_and_compl (preds, preds, ops);
1012 1013 1014 1015 1016 1017 1018 1019
}


/* Compare function to be passed to qsort to order the backarcs in descending
   recMII order.  */
static int
compare_sccs (const void *s1, const void *s2)
{
1020
  const int rec_l1 = (*(const ddg_scc_ptr *)s1)->recurrence_length;
H.J. Lu committed
1021
  const int rec_l2 = (*(const ddg_scc_ptr *)s2)->recurrence_length;
1022
  return ((rec_l2 > rec_l1) - (rec_l2 < rec_l1));
H.J. Lu committed
1023

1024 1025 1026 1027 1028 1029 1030 1031 1032 1033
}

/* Order the backarcs in descending recMII order using compare_sccs.  */
static void
order_sccs (ddg_all_sccs_ptr g)
{
  qsort (g->sccs, g->num_sccs, sizeof (ddg_scc_ptr),
	 (int (*) (const void *, const void *)) compare_sccs);
}

1034
#ifdef ENABLE_CHECKING
1035 1036 1037 1038 1039 1040 1041 1042
/* Check that every node in SCCS belongs to exactly one strongly connected
   component and that no element of SCCS is empty.  */
static void
check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
{
  int i = 0;
  sbitmap tmp = sbitmap_alloc (num_nodes);

1043
  bitmap_clear (tmp);
1044 1045
  for (i = 0; i < sccs->num_sccs; i++)
    {
1046
      gcc_assert (!bitmap_empty_p (sccs->sccs[i]->nodes));
1047 1048
      /* Verify that every node in sccs is in exactly one strongly
         connected component.  */
1049 1050
      gcc_assert (!bitmap_intersect_p (tmp, sccs->sccs[i]->nodes));
      bitmap_ior (tmp, tmp, sccs->sccs[i]->nodes);
1051 1052 1053
    }
  sbitmap_free (tmp);
}
1054
#endif
1055

1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083
/* Perform the Strongly Connected Components decomposing algorithm on the
   DDG and return DDG_ALL_SCCS structure that contains them.  */
ddg_all_sccs_ptr
create_ddg_all_sccs (ddg_ptr g)
{
  int i;
  int num_nodes = g->num_nodes;
  sbitmap from = sbitmap_alloc (num_nodes);
  sbitmap to = sbitmap_alloc (num_nodes);
  sbitmap scc_nodes = sbitmap_alloc (num_nodes);
  ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr)
			  xmalloc (sizeof (struct ddg_all_sccs));

  sccs->ddg = g;
  sccs->sccs = NULL;
  sccs->num_sccs = 0;

  for (i = 0; i < g->num_backarcs; i++)
    {
      ddg_scc_ptr  scc;
      ddg_edge_ptr backarc = g->backarcs[i];
      ddg_node_ptr src = backarc->src;
      ddg_node_ptr dest = backarc->dest;

      /* If the backarc already belongs to an SCC, continue.  */
      if (backarc->aux.count == IN_SCC)
	continue;

1084 1085 1086
      bitmap_clear (scc_nodes);
      bitmap_clear (from);
      bitmap_clear (to);
1087 1088
      bitmap_set_bit (from, dest->cuid);
      bitmap_set_bit (to, src->cuid);
1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099

      if (find_nodes_on_paths (scc_nodes, g, from, to))
	{
	  scc = create_scc (g, scc_nodes);
	  add_scc_to_ddg (sccs, scc);
	}
    }
  order_sccs (sccs);
  sbitmap_free (from);
  sbitmap_free (to);
  sbitmap_free (scc_nodes);
1100 1101 1102
#ifdef ENABLE_CHECKING
  check_sccs (sccs, num_nodes);
#endif
1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117
  return sccs;
}

/* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG.  */
void
free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs)
{
  int i;

  if (!all_sccs)
    return;

  for (i = 0; i < all_sccs->num_sccs; i++)
    free_scc (all_sccs->sccs[i]);

Revital Eres committed
1118
  free (all_sccs->sccs);
1119 1120 1121 1122 1123 1124
  free (all_sccs);
}


/* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
   nodes - find all nodes that lie on paths from FROM to TO (not excluding
1125
   nodes from FROM and TO).  Return nonzero if nodes exist.  */
1126 1127 1128 1129
int
find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
{
  int answer;
1130
  int change;
1131
  unsigned int u = 0;
1132
  int num_nodes = g->num_nodes;
1133 1134
  sbitmap_iterator sbi;

1135 1136 1137 1138 1139
  sbitmap workset = sbitmap_alloc (num_nodes);
  sbitmap reachable_from = sbitmap_alloc (num_nodes);
  sbitmap reach_to = sbitmap_alloc (num_nodes);
  sbitmap tmp = sbitmap_alloc (num_nodes);

1140 1141
  bitmap_copy (reachable_from, from);
  bitmap_copy (tmp, from);
1142 1143 1144 1145 1146

  change = 1;
  while (change)
    {
      change = 0;
1147 1148
      bitmap_copy (workset, tmp);
      bitmap_clear (tmp);
1149
      EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
1150 1151 1152 1153 1154 1155 1156 1157 1158
	{
	  ddg_edge_ptr e;
	  ddg_node_ptr u_node = &g->nodes[u];

	  for (e = u_node->out; e != (ddg_edge_ptr) 0; e = e->next_out)
	    {
	      ddg_node_ptr v_node = e->dest;
	      int v = v_node->cuid;

1159
	      if (!bitmap_bit_p (reachable_from, v))
1160
		{
1161 1162
		  bitmap_set_bit (reachable_from, v);
		  bitmap_set_bit (tmp, v);
1163 1164 1165
		  change = 1;
		}
	    }
1166
	}
1167 1168
    }

1169 1170
  bitmap_copy (reach_to, to);
  bitmap_copy (tmp, to);
1171 1172 1173 1174 1175

  change = 1;
  while (change)
    {
      change = 0;
1176 1177
      bitmap_copy (workset, tmp);
      bitmap_clear (tmp);
1178
      EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
1179 1180 1181 1182 1183 1184 1185 1186 1187
	{
	  ddg_edge_ptr e;
	  ddg_node_ptr u_node = &g->nodes[u];

	  for (e = u_node->in; e != (ddg_edge_ptr) 0; e = e->next_in)
	    {
	      ddg_node_ptr v_node = e->src;
	      int v = v_node->cuid;

1188
	      if (!bitmap_bit_p (reach_to, v))
1189
		{
1190 1191
		  bitmap_set_bit (reach_to, v);
		  bitmap_set_bit (tmp, v);
1192 1193 1194
		  change = 1;
		}
	    }
1195
	}
1196 1197
    }

1198
  answer = bitmap_and (result, reachable_from, reach_to);
1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209
  sbitmap_free (workset);
  sbitmap_free (reachable_from);
  sbitmap_free (reach_to);
  sbitmap_free (tmp);
  return answer;
}


/* Updates the counts of U_NODE's successors (that belong to NODES) to be
   at-least as large as the count of U_NODE plus the latency between them.
   Sets a bit in TMP for each successor whose count was changed (increased).
1210
   Returns nonzero if any count was changed.  */
1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221
static int
update_dist_to_successors (ddg_node_ptr u_node, sbitmap nodes, sbitmap tmp)
{
  ddg_edge_ptr e;
  int result = 0;

  for (e = u_node->out; e; e = e->next_out)
    {
      ddg_node_ptr v_node = e->dest;
      int v = v_node->cuid;

1222
      if (bitmap_bit_p (nodes, v)
1223 1224 1225 1226
	  && (e->distance == 0)
	  && (v_node->aux.count < u_node->aux.count + e->latency))
	{
	  v_node->aux.count = u_node->aux.count + e->latency;
1227
	  bitmap_set_bit (tmp, v);
1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239
	  result = 1;
	}
    }
  return result;
}


/* Find the length of a longest path from SRC to DEST in G,
   going only through NODES, and disregarding backarcs.  */
int
longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
{
1240
  int i;
1241
  unsigned int u = 0;
1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254
  int change = 1;
  int result;
  int num_nodes = g->num_nodes;
  sbitmap workset = sbitmap_alloc (num_nodes);
  sbitmap tmp = sbitmap_alloc (num_nodes);


  /* Data will hold the distance of the longest path found so far from
     src to each node.  Initialize to -1 = less than minimum.  */
  for (i = 0; i < g->num_nodes; i++)
    g->nodes[i].aux.count = -1;
  g->nodes[src].aux.count = 0;

1255
  bitmap_clear (tmp);
1256
  bitmap_set_bit (tmp, src);
1257 1258 1259

  while (change)
    {
1260 1261
      sbitmap_iterator sbi;

1262
      change = 0;
1263 1264
      bitmap_copy (workset, tmp);
      bitmap_clear (tmp);
1265
      EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
1266 1267 1268 1269
	{
	  ddg_node_ptr u_node = &g->nodes[u];

	  change |= update_dist_to_successors (u_node, nodes, tmp);
1270
	}
1271 1272 1273 1274 1275 1276
    }
  result = g->nodes[dest].aux.count;
  sbitmap_free (workset);
  sbitmap_free (tmp);
  return result;
}
1277 1278

#endif /* INSN_SCHEDULING */