df-scan.c 125 KB
Newer Older
1
/* Scanning of rtl for dataflow analysis.
2
   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3
   2008, 2009, 2010 Free Software Foundation, Inc.
H.J. Lu committed
4
   Originally contributed by Michael P. Hayes
5 6 7 8 9 10 11 12
             (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
   Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
             and Kenneth Zadeck (zadeck@naturalbridge.com).

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
13
Software Foundation; either version 3, or (at your option) any later
14 15 16 17 18 19 20 21
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
22 23
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42

#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "rtl.h"
#include "tm_p.h"
#include "insn-config.h"
#include "recog.h"
#include "function.h"
#include "regs.h"
#include "output.h"
#include "alloc-pool.h"
#include "flags.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "sbitmap.h"
#include "bitmap.h"
#include "timevar.h"
43 44 45
#include "tree.h"
#include "target.h"
#include "target-def.h"
46
#include "df.h"
47
#include "tree-pass.h"
48
#include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
49

50 51 52 53 54 55 56 57 58 59 60 61 62
DEF_VEC_P(df_ref);
DEF_VEC_ALLOC_P_STACK(df_ref);

#define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)

typedef struct df_mw_hardreg *df_mw_hardreg_ptr;

DEF_VEC_P(df_mw_hardreg_ptr);
DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);

#define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
  VEC_stack_alloc (df_mw_hardreg_ptr, alloc)

63 64 65 66 67 68 69 70 71 72 73 74 75 76
#ifndef HAVE_epilogue
#define HAVE_epilogue 0
#endif
#ifndef HAVE_prologue
#define HAVE_prologue 0
#endif
#ifndef HAVE_sibcall_epilogue
#define HAVE_sibcall_epilogue 0
#endif

#ifndef EPILOGUE_USES
#define EPILOGUE_USES(REGNO)  0
#endif

77 78
/* The following two macros free the vecs that hold either the refs or
   the mw refs.  They are a little tricky because the vec has 0
H.J. Lu committed
79
   elements is special and is not to be freed.  */
80 81 82 83 84 85 86 87 88 89 90 91
#define df_scan_free_ref_vec(V) \
  do { \
    if (V && *V) \
      free (V);  \
  } while (0)

#define df_scan_free_mws_vec(V) \
  do { \
    if (V && *V) \
      free (V);  \
  } while (0)

92 93 94 95 96 97 98
/* The set of hard registers in eliminables[i].from. */

static HARD_REG_SET elim_reg_set;

/* Initialize ur_in and ur_out as if all hard registers were partially
   available.  */

99 100
struct df_collection_rec
{
101 102 103 104
  VEC(df_ref,stack) *def_vec;
  VEC(df_ref,stack) *use_vec;
  VEC(df_ref,stack) *eq_use_vec;
  VEC(df_mw_hardreg_ptr,stack) *mw_vec;
105 106
};

107
static df_ref df_null_ref_rec[1];
108 109
static struct df_mw_hardreg * df_null_mw_rec[1];

110
static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
H.J. Lu committed
111
			   rtx, rtx *,
112
			   basic_block, struct df_insn_info *,
113
			   enum df_ref_type, int ref_flags);
114 115
static void df_def_record_1 (struct df_collection_rec *, rtx,
			     basic_block, struct df_insn_info *,
116
			     int ref_flags);
117 118
static void df_defs_record (struct df_collection_rec *, rtx,
			    basic_block, struct df_insn_info *,
119
			    int ref_flags);
120
static void df_uses_record (struct df_collection_rec *,
121
			    rtx *, enum df_ref_type,
122
			    basic_block, struct df_insn_info *,
123
			    int ref_flags);
124

H.J. Lu committed
125 126
static df_ref df_ref_create_structure (enum df_ref_class,
				       struct df_collection_rec *, rtx, rtx *,
127
				       basic_block, struct df_insn_info *,
128
				       enum df_ref_type, int ref_flags);
H.J. Lu committed
129 130
static void df_insn_refs_collect (struct df_collection_rec*,
				  basic_block, struct df_insn_info *);
131 132 133 134 135 136 137 138 139
static void df_canonize_collection_rec (struct df_collection_rec *);

static void df_get_regular_block_artificial_uses (bitmap);
static void df_get_eh_block_artificial_uses (bitmap);

static void df_record_entry_block_defs (bitmap);
static void df_record_exit_block_uses (bitmap);
static void df_get_exit_block_use_set (bitmap);
static void df_get_entry_block_def_set (bitmap);
140
static void df_grow_ref_info (struct df_ref_info *, unsigned int);
141 142
static void df_ref_chain_delete_du_chain (df_ref *);
static void df_ref_chain_delete (df_ref *);
143

H.J. Lu committed
144
static void df_refs_add_to_chains (struct df_collection_rec *,
145 146 147 148 149
				   basic_block, rtx);

static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
H.J. Lu committed
150
static void df_install_ref (df_ref, struct df_reg_info *,
151 152 153 154 155 156 157 158 159 160 161 162 163 164
			    struct df_ref_info *, bool);

static int df_ref_compare (const void *, const void *);
static int df_mw_compare (const void *, const void *);

/* Indexed by hardware reg number, is true if that register is ever
   used in the current function.

   In df-scan.c, this is set up to record the hard regs used
   explicitly.  Reload adds in the hard regs used for holding pseudo
   regs.  Final uses it to generate the code in the function prologue
   and epilogue to save and restore registers as needed.  */

static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
165 166 167 168 169 170 171 172 173 174 175 176 177

/*----------------------------------------------------------------------------
   SCANNING DATAFLOW PROBLEM

   There are several ways in which scanning looks just like the other
   dataflow problems.  It shares the all the mechanisms for local info
   as well as basic block info.  Where it differs is when and how often
   it gets run.  It also has no need for the iterative solver.
----------------------------------------------------------------------------*/

/* Problem data for the scanning dataflow function.  */
struct df_scan_problem_data
{
178 179 180
  alloc_pool ref_base_pool;
  alloc_pool ref_artificial_pool;
  alloc_pool ref_regular_pool;
181 182
  alloc_pool insn_pool;
  alloc_pool reg_pool;
183
  alloc_pool mw_reg_pool;
184 185
  bitmap_obstack reg_bitmaps;
  bitmap_obstack insn_bitmaps;
186 187 188 189
};

typedef struct df_scan_bb_info *df_scan_bb_info_t;

190 191

/* Internal function to shut down the scanning problem.  */
H.J. Lu committed
192
static void
193
df_scan_free_internal (void)
194
{
195
  struct df_scan_problem_data *problem_data
196
    = (struct df_scan_problem_data *) df_scan->problem_data;
197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226
  unsigned int i;
  basic_block bb;

  /* The vectors that hold the refs are not pool allocated because
     they come in many sizes.  This makes them impossible to delete
     all at once.  */
  for (i = 0; i < DF_INSN_SIZE(); i++)
    {
      struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
      /* Skip the insns that have no insn_info or have been
	 deleted.  */
      if (insn_info)
	{
	  df_scan_free_ref_vec (insn_info->defs);
	  df_scan_free_ref_vec (insn_info->uses);
	  df_scan_free_ref_vec (insn_info->eq_uses);
	  df_scan_free_mws_vec (insn_info->mw_hardregs);
	}
    }

  FOR_ALL_BB (bb)
    {
      unsigned int bb_index = bb->index;
      struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
      if (bb_info)
	{
	  df_scan_free_ref_vec (bb_info->artificial_defs);
	  df_scan_free_ref_vec (bb_info->artificial_uses);
	}
    }
227 228

  free (df->def_info.refs);
229 230
  free (df->def_info.begin);
  free (df->def_info.count);
231 232 233
  memset (&df->def_info, 0, (sizeof (struct df_ref_info)));

  free (df->use_info.refs);
234 235
  free (df->use_info.begin);
  free (df->use_info.count);
236 237
  memset (&df->use_info, 0, (sizeof (struct df_ref_info)));

238 239 240 241 242 243 244 245 246
  free (df->def_regs);
  df->def_regs = NULL;
  free (df->use_regs);
  df->use_regs = NULL;
  free (df->eq_use_regs);
  df->eq_use_regs = NULL;
  df->regs_size = 0;
  DF_REG_SIZE(df) = 0;

247 248
  free (df->insns);
  df->insns = NULL;
249
  DF_INSN_SIZE () = 0;
250

251 252 253
  free (df_scan->block_info);
  df_scan->block_info = NULL;
  df_scan->block_info_size = 0;
254

255 256 257
  bitmap_clear (&df->hardware_regs_used);
  bitmap_clear (&df->regular_block_artificial_uses);
  bitmap_clear (&df->eh_block_artificial_uses);
258
  BITMAP_FREE (df->entry_block_defs);
259
  BITMAP_FREE (df->exit_block_uses);
260 261 262
  bitmap_clear (&df->insns_to_delete);
  bitmap_clear (&df->insns_to_rescan);
  bitmap_clear (&df->insns_to_notes_rescan);
263

264 265 266
  free_alloc_pool (problem_data->ref_base_pool);
  free_alloc_pool (problem_data->ref_artificial_pool);
  free_alloc_pool (problem_data->ref_regular_pool);
267 268
  free_alloc_pool (problem_data->insn_pool);
  free_alloc_pool (problem_data->reg_pool);
269
  free_alloc_pool (problem_data->mw_reg_pool);
270 271 272
  bitmap_obstack_release (&problem_data->reg_bitmaps);
  bitmap_obstack_release (&problem_data->insn_bitmaps);
  free (df_scan->problem_data);
273 274 275 276 277 278
}


/* Free basic block info.  */

static void
279
df_scan_free_bb_info (basic_block bb, void *vbb_info)
280 281
{
  struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
282
  unsigned int bb_index = bb->index;
283 284 285

  /* See if bb_info is initialized.  */
  if (bb_info->artificial_defs)
286
    {
287 288 289 290 291 292 293
      rtx insn;
      FOR_BB_INSNS (bb, insn)
	{
	  if (INSN_P (insn))
	    /* Record defs within INSN.  */
	    df_insn_delete (bb, INSN_UID (insn));
	}
H.J. Lu committed
294

295 296
      if (bb_index < df_scan->block_info_size)
	bb_info = df_scan_get_bb_info (bb_index);
H.J. Lu committed
297

298
      /* Get rid of any artificial uses or defs.  */
299 300 301 302 303 304 305 306 307
      if (bb_info->artificial_defs)
	{
	  df_ref_chain_delete_du_chain (bb_info->artificial_defs);
	  df_ref_chain_delete_du_chain (bb_info->artificial_uses);
	  df_ref_chain_delete (bb_info->artificial_defs);
	  df_ref_chain_delete (bb_info->artificial_uses);
	  bb_info->artificial_defs = NULL;
	  bb_info->artificial_uses = NULL;
	}
308
    }
309 310 311 312 313 314
}


/* Allocate the problem data for the scanning problem.  This should be
   called when the problem is created or when the entire function is to
   be rescanned.  */
H.J. Lu committed
315
void
316
df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
317 318 319
{
  struct df_scan_problem_data *problem_data;
  unsigned int insn_num = get_max_uid () + 1;
320 321
  unsigned int block_size = 400;
  basic_block bb;
322 323 324

  /* Given the number of pools, this is really faster than tearing
     everything apart.  */
325 326
  if (df_scan->problem_data)
    df_scan_free_internal ();
327

328
  problem_data = XNEW (struct df_scan_problem_data);
329 330
  df_scan->problem_data = problem_data;
  df_scan->computed = true;
331

H.J. Lu committed
332 333
  problem_data->ref_base_pool
    = create_alloc_pool ("df_scan ref base",
334
			 sizeof (struct df_base_ref), block_size);
H.J. Lu committed
335 336
  problem_data->ref_artificial_pool
    = create_alloc_pool ("df_scan ref artificial",
337
			 sizeof (struct df_artificial_ref), block_size);
H.J. Lu committed
338 339
  problem_data->ref_regular_pool
    = create_alloc_pool ("df_scan ref regular",
340
			 sizeof (struct df_regular_ref), block_size);
H.J. Lu committed
341 342
  problem_data->insn_pool
    = create_alloc_pool ("df_scan insn",
343
			 sizeof (struct df_insn_info), block_size);
H.J. Lu committed
344 345
  problem_data->reg_pool
    = create_alloc_pool ("df_scan reg",
346
			 sizeof (struct df_reg_info), block_size);
H.J. Lu committed
347 348
  problem_data->mw_reg_pool
    = create_alloc_pool ("df_scan mw_reg",
349
			 sizeof (struct df_mw_hardreg), block_size);
350

351 352
  bitmap_obstack_initialize (&problem_data->reg_bitmaps);
  bitmap_obstack_initialize (&problem_data->insn_bitmaps);
353

H.J. Lu committed
354
  insn_num += insn_num / 4;
355
  df_grow_reg_info ();
356

357 358
  df_grow_insn_info ();
  df_grow_bb_info (df_scan);
359

360
  FOR_ALL_BB (bb)
361
    {
362 363
      unsigned int bb_index = bb->index;
      struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
364 365 366 367
      bb_info->artificial_defs = NULL;
      bb_info->artificial_uses = NULL;
    }

368 369 370
  bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
  bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
  bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
371 372
  df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
  df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
373 374 375
  bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
  bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
  bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
376
  df_scan->optional_p = false;
377 378 379 380 381
}


/* Free all of the data associated with the scan problem.  */

H.J. Lu committed
382
static void
383
df_scan_free (void)
384
{
385 386
  if (df_scan->problem_data)
    df_scan_free_internal ();
387

388
  if (df->blocks_to_analyze)
389 390 391 392
    {
      BITMAP_FREE (df->blocks_to_analyze);
      df->blocks_to_analyze = NULL;
    }
393

394
  free (df_scan);
395 396
}

397
/* Dump the preamble for DF_SCAN dump. */
H.J. Lu committed
398
static void
399
df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
400 401
{
  int i;
402 403 404 405 406 407 408
  int dcount = 0;
  int ucount = 0;
  int ecount = 0;
  int icount = 0;
  int ccount = 0;
  basic_block bb;
  rtx insn;
409

410
  fprintf (file, ";;  invalidated by call \t");
411
  df_print_regset (file, regs_invalidated_by_call_regset);
412
  fprintf (file, ";;  hardware regs used \t");
413
  df_print_regset (file, &df->hardware_regs_used);
414
  fprintf (file, ";;  regular block artificial uses \t");
415
  df_print_regset (file, &df->regular_block_artificial_uses);
416
  fprintf (file, ";;  eh block artificial uses \t");
417
  df_print_regset (file, &df->eh_block_artificial_uses);
418 419 420 421 422
  fprintf (file, ";;  entry block defs \t");
  df_print_regset (file, df->entry_block_defs);
  fprintf (file, ";;  exit block uses \t");
  df_print_regset (file, df->exit_block_uses);
  fprintf (file, ";;  regs ever live \t");
423
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
424 425
    if (df_regs_ever_live_p (i))
      fprintf (file, " %d[%s]", i, reg_names[i]);
426
  fprintf (file, "\n;;  ref usage \t");
H.J. Lu committed
427

428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452
  for (i = 0; i < (int)df->regs_inited; i++)
    if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
      {
	const char * sep = "";

	fprintf (file, "r%d={", i);
	if (DF_REG_DEF_COUNT (i))
	  {
	    fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
	    sep = ",";
	    dcount += DF_REG_DEF_COUNT (i);
	  }
	if (DF_REG_USE_COUNT (i))
	  {
	    fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
	    sep = ",";
	    ucount += DF_REG_USE_COUNT (i);
	  }
	if (DF_REG_EQ_USE_COUNT (i))
	  {
	    fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
	    ecount += DF_REG_EQ_USE_COUNT (i);
	  }
	fprintf (file, "} ");
      }
453

454 455 456 457 458 459 460 461 462 463
  FOR_EACH_BB (bb)
    FOR_BB_INSNS (bb, insn)
      if (INSN_P (insn))
	{
	  if (CALL_P (insn))
	    ccount++;
	  else
	    icount++;
	}

H.J. Lu committed
464
  fprintf (file, "\n;;    total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
465
	   dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
466 467
}

468
/* Dump the bb_info for a given basic block. */
H.J. Lu committed
469
static void
470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492
df_scan_start_block (basic_block bb, FILE *file)
{
  struct df_scan_bb_info *bb_info
    = df_scan_get_bb_info (bb->index);

  if (bb_info)
    {
      fprintf (file, ";; bb %d artificial_defs: ", bb->index);
      df_refs_chain_dump (bb_info->artificial_defs, true, file);
      fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
      df_refs_chain_dump (bb_info->artificial_uses, true, file);
      fprintf (file, "\n");
    }
#if 0
  {
    rtx insn;
    FOR_BB_INSNS (bb, insn)
      if (INSN_P (insn))
	df_insn_debug (insn, false, file);
  }
#endif
}

493 494 495 496 497
static struct df_problem problem_SCAN =
{
  DF_SCAN,                    /* Problem id.  */
  DF_NONE,                    /* Direction.  */
  df_scan_alloc,              /* Allocate the problem specific data.  */
498
  NULL,                       /* Reset global information.  */
499 500 501 502
  df_scan_free_bb_info,       /* Free basic block info.  */
  NULL,                       /* Local compute function.  */
  NULL,                       /* Init the solution specific data.  */
  NULL,                       /* Iterative solver.  */
H.J. Lu committed
503 504
  NULL,                       /* Confluence operator 0.  */
  NULL,                       /* Confluence operator n.  */
505 506 507
  NULL,                       /* Transfer function.  */
  NULL,                       /* Finalize function.  */
  df_scan_free,               /* Free all of the problem information.  */
508 509 510 511 512
  NULL,                       /* Remove this problem from the stack of dataflow problems.  */
  df_scan_start_dump,         /* Debugging.  */
  df_scan_start_block,        /* Debugging start block.  */
  NULL,                       /* Debugging end block.  */
  NULL,                       /* Incremental solution verify start.  */
513
  NULL,                       /* Incremental solution verify end.  */
514
  NULL,                       /* Dependent problem.  */
515
  sizeof (struct df_scan_bb_info),/* Size of entry of block_info array.  */
516 517
  TV_DF_SCAN,                 /* Timing variable.  */
  false                       /* Reset blocks on dropping out of blocks_to_analyze.  */
518 519 520 521 522 523 524
};


/* Create a new DATAFLOW instance and add it to an existing instance
   of DF.  The returned structure is what is used to get at the
   solution.  */

525 526
void
df_scan_add_problem (void)
527
{
528
  df_add_problem (&problem_SCAN);
529 530
}

531

532 533 534 535 536 537
/*----------------------------------------------------------------------------
   Storage Allocation Utilities
----------------------------------------------------------------------------*/


/* First, grow the reg_info information.  If the current size is less than
538
   the number of pseudos, grow to 25% more than the number of
H.J. Lu committed
539
   pseudos.
540 541 542 543

   Second, assure that all of the slots up to max_reg_num have been
   filled with reg_info structures.  */

H.J. Lu committed
544
void
545
df_grow_reg_info (void)
546 547 548
{
  unsigned int max_reg = max_reg_num ();
  unsigned int new_size = max_reg;
549
  struct df_scan_problem_data *problem_data
550
    = (struct df_scan_problem_data *) df_scan->problem_data;
551 552
  unsigned int i;

553
  if (df->regs_size < new_size)
554 555
    {
      new_size += new_size / 4;
556 557 558 559 560 561 562 563
      df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
      df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
      df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
				    new_size);
      df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
      df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
      df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
      df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
564
      df->regs_size = new_size;
565 566
    }

567
  for (i = df->regs_inited; i < max_reg; i++)
568
    {
569 570
      struct df_reg_info *reg_info;

571
      reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
572 573
      memset (reg_info, 0, sizeof (struct df_reg_info));
      df->def_regs[i] = reg_info;
574
      reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
575 576
      memset (reg_info, 0, sizeof (struct df_reg_info));
      df->use_regs[i] = reg_info;
577
      reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
578
      memset (reg_info, 0, sizeof (struct df_reg_info));
579 580 581 582 583
      df->eq_use_regs[i] = reg_info;
      df->def_info.begin[i] = 0;
      df->def_info.count[i] = 0;
      df->use_info.begin[i] = 0;
      df->use_info.count[i] = 0;
584
    }
H.J. Lu committed
585

586
  df->regs_inited = max_reg;
587 588 589 590 591
}


/* Grow the ref information.  */

H.J. Lu committed
592
static void
593 594 595 596
df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
{
  if (ref_info->refs_size < new_size)
    {
597
      ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
598
      memset (ref_info->refs + ref_info->refs_size, 0,
599
	      (new_size - ref_info->refs_size) *sizeof (df_ref));
600 601 602 603 604
      ref_info->refs_size = new_size;
    }
}


605 606 607 608 609 610
/* Check and grow the ref information if necessary.  This routine
   guarantees total_size + BITMAP_ADDEND amount of entries in refs
   array.  It updates ref_info->refs_size only and does not change
   ref_info->total_size.  */

static void
H.J. Lu committed
611
df_check_and_grow_ref_info (struct df_ref_info *ref_info,
612 613 614 615 616 617 618 619 620 621 622
			    unsigned bitmap_addend)
{
  if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
    {
      int new_size = ref_info->total_size + bitmap_addend;
      new_size += ref_info->total_size / 4;
      df_grow_ref_info (ref_info, new_size);
    }
}


623 624 625 626
/* Grow the ref information.  If the current size is less than the
   number of instructions, grow to 25% more than the number of
   instructions.  */

H.J. Lu committed
627
void
628
df_grow_insn_info (void)
629 630
{
  unsigned int new_size = get_max_uid () + 1;
631
  if (DF_INSN_SIZE () < new_size)
632 633
    {
      new_size += new_size / 4;
634
      df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
635
      memset (df->insns + df->insns_size, 0,
636 637
	      (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
      DF_INSN_SIZE () = new_size;
638 639 640 641 642 643 644 645 646 647
    }
}




/*----------------------------------------------------------------------------
   PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
----------------------------------------------------------------------------*/

648 649
/* Rescan all of the block_to_analyze or all of the blocks in the
   function if df_set_blocks if blocks_to_analyze is NULL;  */
650 651

void
652
df_scan_blocks (void)
653 654 655
{
  basic_block bb;

656 657
  df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
  df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
658

659 660
  df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
  df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
661

662 663
  bitmap_ior_into (&df->eh_block_artificial_uses,
		   &df->regular_block_artificial_uses);
664

665 666 667 668 669 670 671
  /* ENTRY and EXIT blocks have special defs/uses.  */
  df_get_entry_block_def_set (df->entry_block_defs);
  df_record_entry_block_defs (df->entry_block_defs);
  df_get_exit_block_use_set (df->exit_block_uses);
  df_record_exit_block_uses (df->exit_block_uses);
  df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
  df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
672

673 674
  /* Regular blocks */
  FOR_EACH_BB (bb)
675
    {
676 677
      unsigned int bb_index = bb->index;
      df_bb_refs_record (bb_index, true);
678 679 680
    }
}

681

682
/* Create a new ref of type DF_REF_TYPE for register REG at address
683
   LOC within INSN of BB.  This function is only used externally.  */
684

H.J. Lu committed
685 686
df_ref
df_ref_create (rtx reg, rtx *loc, rtx insn,
687
	       basic_block bb,
H.J. Lu committed
688
	       enum df_ref_type ref_type,
689
	       int ref_flags)
690
{
691
  df_ref ref;
692 693
  struct df_reg_info **reg_info;
  struct df_ref_info *ref_info;
694 695
  df_ref *ref_rec;
  df_ref **ref_rec_ptr;
696 697
  unsigned int count = 0;
  bool add_to_table;
698
  enum df_ref_class cl;
699

700
  df_grow_reg_info ();
701

702 703
  /* You cannot hack artificial refs.  */
  gcc_assert (insn);
704

705
  if (loc)
706 707 708 709
    cl = DF_REF_REGULAR;
  else
    cl = DF_REF_BASE;
  ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
710
                                 ref_type, ref_flags);
711

712
  if (DF_REF_REG_DEF_P (ref))
713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742
    {
      reg_info = df->def_regs;
      ref_info = &df->def_info;
      ref_rec_ptr = &DF_INSN_DEFS (insn);
      add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
    }
  else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
    {
      reg_info = df->eq_use_regs;
      ref_info = &df->use_info;
      ref_rec_ptr = &DF_INSN_EQ_USES (insn);
      switch (ref_info->ref_order)
	{
	case DF_REF_ORDER_UNORDERED_WITH_NOTES:
	case DF_REF_ORDER_BY_REG_WITH_NOTES:
	case DF_REF_ORDER_BY_INSN_WITH_NOTES:
	  add_to_table = true;
	  break;
	default:
	  add_to_table = false;
	  break;
	}
    }
  else
    {
      reg_info = df->use_regs;
      ref_info = &df->use_info;
      ref_rec_ptr = &DF_INSN_USES (insn);
      add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
    }
743

744 745 746
  /* Do not add if ref is not in the right blocks.  */
  if (add_to_table && df->analyze_subset)
    add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
747

748
  df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
H.J. Lu committed
749

750 751 752 753 754 755 756 757 758 759 760 761
  if (add_to_table)
    switch (ref_info->ref_order)
      {
      case DF_REF_ORDER_UNORDERED_WITH_NOTES:
      case DF_REF_ORDER_BY_REG_WITH_NOTES:
      case DF_REF_ORDER_BY_INSN_WITH_NOTES:
	ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
	break;
      default:
	ref_info->ref_order = DF_REF_ORDER_UNORDERED;
	break;
      }
762

763 764 765 766 767 768
  ref_rec = *ref_rec_ptr;
  while (*ref_rec)
    {
      count++;
      ref_rec++;
    }
769

770 771
  ref_rec = *ref_rec_ptr;
  if (count)
772
    {
773
      ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
774 775 776
      *ref_rec_ptr = ref_rec;
      ref_rec[count] = ref;
      ref_rec[count+1] = NULL;
777
      qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
778 779 780
    }
  else
    {
781
      df_ref *ref_rec = XNEWVEC (df_ref, 2);
782 783 784 785
      ref_rec[0] = ref;
      ref_rec[1] = NULL;
      *ref_rec_ptr = ref_rec;
    }
786

787 788 789 790 791
#if 0
  if (dump_file)
    {
      fprintf (dump_file, "adding ref ");
      df_ref_debug (ref, dump_file);
792
    }
793 794 795
#endif
  /* By adding the ref directly, df_insn_rescan my not find any
     differences even though the block will have changed.  So we need
H.J. Lu committed
796
     to mark the block dirty ourselves.  */
797 798
  if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
    df_set_bb_dirty (bb);
799

800
  return ref;
801 802 803
}


804 805 806 807 808

/*----------------------------------------------------------------------------
   UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
----------------------------------------------------------------------------*/

809
static void
810
df_free_ref (df_ref ref)
811 812 813 814
{
  struct df_scan_problem_data *problem_data
    = (struct df_scan_problem_data *) df_scan->problem_data;

815 816 817 818 819 820 821 822 823 824 825 826 827 828
  switch (DF_REF_CLASS (ref))
    {
    case DF_REF_BASE:
      pool_free (problem_data->ref_base_pool, ref);
      break;

    case DF_REF_ARTIFICIAL:
      pool_free (problem_data->ref_artificial_pool, ref);
      break;

    case DF_REF_REGULAR:
      pool_free (problem_data->ref_regular_pool, ref);
      break;
    }
829 830
}

831

832 833 834 835
/* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
   Also delete the def-use or use-def chain if it exists.  */

static void
H.J. Lu committed
836
df_reg_chain_unlink (df_ref ref)
837
{
H.J. Lu committed
838
  df_ref next = DF_REF_NEXT_REG (ref);
839
  df_ref prev = DF_REF_PREV_REG (ref);
840
  int id = DF_REF_ID (ref);
841
  struct df_reg_info *reg_info;
842
  df_ref *refs = NULL;
843

844
  if (DF_REF_REG_DEF_P (ref))
845
    {
846 847
      int regno = DF_REF_REGNO (ref);
      reg_info = DF_REG_DEF_GET (regno);
848
      refs = df->def_info.refs;
849
    }
H.J. Lu committed
850
  else
851
    {
852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876
      if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
	{
	  reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
	  switch (df->use_info.ref_order)
	    {
	    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
	    case DF_REF_ORDER_BY_REG_WITH_NOTES:
	    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
	      refs = df->use_info.refs;
	      break;
	    default:
	      break;
	    }
	}
      else
	{
	  reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
	  refs = df->use_info.refs;
	}
    }

  if (refs)
    {
      if (df->analyze_subset)
	{
877
	  if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
878 879 880 881
	    refs[id] = NULL;
	}
      else
	refs[id] = NULL;
882
    }
H.J. Lu committed
883

884 885 886 887 888 889 890
  /* Delete any def-use or use-def chains that start here. It is
     possible that there is trash in this field.  This happens for
     insns that have been deleted when rescanning has been deferred
     and the chain problem has also been deleted.  The chain tear down
     code skips deleted insns.  */
  if (df_chain && DF_REF_CHAIN (ref))
    df_chain_unlink (ref);
H.J. Lu committed
891

892
  reg_info->n_refs--;
893 894 895 896 897
  if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
    {
      gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
      df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
    }
898 899 900 901

  /* Unlink from the reg chain.  If there is no prev, this is the
     first of the list.  If not, just join the next and prev.  */
  if (prev)
902
    DF_REF_NEXT_REG (prev) = next;
903 904
  else
    {
905
      gcc_assert (reg_info->reg_chain == ref);
906 907
      reg_info->reg_chain = next;
    }
908 909
  if (next)
    DF_REF_PREV_REG (next) = prev;
910

911
  df_free_ref (ref);
912 913 914 915 916 917
}


/* Remove REF from VEC.  */

static void
918
df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
919
{
920
  df_ref *vec = *vec_ptr;
921 922 923 924 925

  if (vec[1])
    {
      while (*vec && *vec != ref)
	vec++;
H.J. Lu committed
926

927 928 929 930 931 932 933 934 935 936 937
      while (*vec)
	{
	  *vec = *(vec+1);
	  vec++;
	}
    }
  else
    {
      free (vec);
      *vec_ptr = df_null_ref_rec;
    }
938 939 940 941 942 943
}


/* Unlink REF from all def-use/use-def chains, etc.  */

void
944
df_ref_remove (df_ref ref)
945
{
946 947 948 949 950 951 952 953
#if 0
  if (dump_file)
    {
      fprintf (dump_file, "removing ref ");
      df_ref_debug (ref, dump_file);
    }
#endif

954 955
  if (DF_REF_REG_DEF_P (ref))
    {
956
      if (DF_REF_IS_ARTIFICIAL (ref))
957
	{
H.J. Lu committed
958
	  struct df_scan_bb_info *bb_info
959
	    = df_scan_get_bb_info (DF_REF_BBNO (ref));
960
	  df_ref_compress_rec (&bb_info->artificial_defs, ref);
961 962
	}
      else
963 964 965 966 967
	{
	  unsigned int uid = DF_REF_INSN_UID (ref);
	  struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
	  df_ref_compress_rec (&insn_rec->defs, ref);
	}
968 969 970
    }
  else
    {
971
      if (DF_REF_IS_ARTIFICIAL (ref))
972
	{
H.J. Lu committed
973
	  struct df_scan_bb_info *bb_info
974
	    = df_scan_get_bb_info (DF_REF_BBNO (ref));
975 976
	  df_ref_compress_rec (&bb_info->artificial_uses, ref);
	}
H.J. Lu committed
977
      else
978 979 980 981 982 983 984 985
	{
	  unsigned int uid = DF_REF_INSN_UID (ref);
	  struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);

	  if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
	    df_ref_compress_rec (&insn_rec->eq_uses, ref);
	  else
	    df_ref_compress_rec (&insn_rec->uses, ref);
986 987 988
	}
    }

989 990
  /* By deleting the ref directly, df_insn_rescan my not find any
     differences even though the block will have changed.  So we need
H.J. Lu committed
991
     to mark the block dirty ourselves.  */
992 993
  if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
    df_set_bb_dirty (DF_REF_BB (ref));
994
  df_reg_chain_unlink (ref);
995 996 997
}


998 999
/* Create the insn record for INSN.  If there was one there, zero it
   out.  */
1000

1001 1002
struct df_insn_info *
df_insn_create_insn_record (rtx insn)
1003
{
1004
  struct df_scan_problem_data *problem_data
1005 1006
    = (struct df_scan_problem_data *) df_scan->problem_data;
  struct df_insn_info *insn_rec;
1007

1008
  df_grow_insn_info ();
1009
  insn_rec = DF_INSN_INFO_GET (insn);
1010 1011
  if (!insn_rec)
    {
1012
      insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1013
      DF_INSN_INFO_SET (insn, insn_rec);
1014 1015
    }
  memset (insn_rec, 0, sizeof (struct df_insn_info));
1016
  insn_rec->insn = insn;
1017 1018 1019
  return insn_rec;
}

1020

1021
/* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain.  */
1022

1023
static void
1024
df_ref_chain_delete_du_chain (df_ref *ref_rec)
1025
{
1026
  while (*ref_rec)
1027
    {
1028
      df_ref ref = *ref_rec;
H.J. Lu committed
1029
      /* CHAIN is allocated by DF_CHAIN. So make sure to
1030 1031 1032 1033 1034 1035
         pass df_scan instance for the problem.  */
      if (DF_REF_CHAIN (ref))
        df_chain_unlink (ref);
      ref_rec++;
    }
}
1036

1037

1038 1039 1040
/* Delete all refs in the ref chain.  */

static void
1041
df_ref_chain_delete (df_ref *ref_rec)
1042
{
1043
  df_ref *start = ref_rec;
1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080
  while (*ref_rec)
    {
      df_reg_chain_unlink (*ref_rec);
      ref_rec++;
    }

  /* If the list is empty, it has a special shared element that is not
     to be deleted.  */
  if (*start)
    free (start);
}


/* Delete the hardreg chain.  */

static void
df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
{
  struct df_scan_problem_data *problem_data;

  if (!hardregs)
    return;

  problem_data = (struct df_scan_problem_data *) df_scan->problem_data;

  while (*hardregs)
    {
      pool_free (problem_data->mw_reg_pool, *hardregs);
      hardregs++;
    }
}


/* Delete all of the refs information from INSN.  BB must be passed in
   except when called from df_process_deferred_rescans to mark the block
   as dirty.  */

H.J. Lu committed
1081
void
1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098
df_insn_delete (basic_block bb, unsigned int uid)
{
  struct df_insn_info *insn_info = NULL;
  if (!df)
    return;

  df_grow_bb_info (df_scan);
  df_grow_reg_info ();

  /* The block must be marked as dirty now, rather than later as in
     df_insn_rescan and df_notes_rescan because it may not be there at
     rescanning time and the mark would blow up.  */
  if (bb)
    df_set_bb_dirty (bb);

  insn_info = DF_INSN_UID_SAFE_GET (uid);

1099
  /* The client has deferred rescanning.  */
1100 1101 1102 1103
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
    {
      if (insn_info)
	{
1104 1105 1106
	  bitmap_clear_bit (&df->insns_to_rescan, uid);
	  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
	  bitmap_set_bit (&df->insns_to_delete, uid);
1107 1108
	}
      if (dump_file)
1109
	fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1110 1111 1112 1113 1114 1115
      return;
    }

  if (dump_file)
    fprintf (dump_file, "deleting insn with uid = %d.\n", uid);

1116 1117 1118
  bitmap_clear_bit (&df->insns_to_delete, uid);
  bitmap_clear_bit (&df->insns_to_rescan, uid);
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1119 1120
  if (insn_info)
    {
H.J. Lu committed
1121
      struct df_scan_problem_data *problem_data
1122 1123 1124 1125 1126 1127 1128 1129 1130 1131
	= (struct df_scan_problem_data *) df_scan->problem_data;

      /* In general, notes do not have the insn_info fields
	 initialized.  However, combine deletes insns by changing them
	 to notes.  How clever.  So we cannot just check if it is a
	 valid insn before short circuiting this code, we need to see
	 if we actually initialized it.  */
      if (insn_info->defs)
	{
	  df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
H.J. Lu committed
1132

1133 1134 1135
	  if (df_chain)
	    {
	      df_ref_chain_delete_du_chain (insn_info->defs);
H.J. Lu committed
1136
	      df_ref_chain_delete_du_chain (insn_info->uses);
1137 1138
	      df_ref_chain_delete_du_chain (insn_info->eq_uses);
	    }
H.J. Lu committed
1139

1140 1141 1142 1143
	  df_ref_chain_delete (insn_info->defs);
	  df_ref_chain_delete (insn_info->uses);
	  df_ref_chain_delete (insn_info->eq_uses);
	}
1144
      pool_free (problem_data->insn_pool, insn_info);
1145
      DF_INSN_UID_SET (uid, NULL);
1146 1147 1148 1149
    }
}


1150
/* Free all of the refs and the mw_hardregs in COLLECTION_REC.  */
1151

1152 1153
static void
df_free_collection_rec (struct df_collection_rec *collection_rec)
1154
{
1155
  unsigned int ix;
H.J. Lu committed
1156
  struct df_scan_problem_data *problem_data
1157
    = (struct df_scan_problem_data *) df_scan->problem_data;
1158 1159 1160
  df_ref ref;
  struct df_mw_hardreg *mw;

1161
  FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
1162
    df_free_ref (ref);
1163
  FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
1164
    df_free_ref (ref);
1165
  FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
1166
    df_free_ref (ref);
1167
  FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
1168 1169 1170 1171 1172 1173
    pool_free (problem_data->mw_reg_pool, mw);

  VEC_free (df_ref, stack, collection_rec->def_vec);
  VEC_free (df_ref, stack, collection_rec->use_vec);
  VEC_free (df_ref, stack, collection_rec->eq_use_vec);
  VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1174
}
1175

1176 1177
/* Rescan INSN.  Return TRUE if the rescanning produced any changes.  */

H.J. Lu committed
1178
bool
1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189
df_insn_rescan (rtx insn)
{
  unsigned int uid = INSN_UID (insn);
  struct df_insn_info *insn_info = NULL;
  basic_block bb = BLOCK_FOR_INSN (insn);
  struct df_collection_rec collection_rec;

  if ((!df) || (!INSN_P (insn)))
    return false;

  if (!bb)
1190
    {
1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204
      if (dump_file)
	fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
      return false;
    }

  /* The client has disabled rescanning and plans to do it itself.  */
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
    return false;

  df_grow_bb_info (df_scan);
  df_grow_reg_info ();

  insn_info = DF_INSN_UID_SAFE_GET (uid);

1205
  /* The client has deferred rescanning.  */
1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
    {
      if (!insn_info)
	{
	  insn_info = df_insn_create_insn_record (insn);
	  insn_info->defs = df_null_ref_rec;
	  insn_info->uses = df_null_ref_rec;
	  insn_info->eq_uses = df_null_ref_rec;
	  insn_info->mw_hardregs = df_null_mw_rec;
	}
      if (dump_file)
1217
	fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
H.J. Lu committed
1218

1219 1220 1221
      bitmap_clear_bit (&df->insns_to_delete, uid);
      bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
      bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1222 1223 1224
      return false;
    }

1225 1226 1227 1228 1229
  collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
  collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
  collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
  collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);

1230 1231 1232
  bitmap_clear_bit (&df->insns_to_delete, uid);
  bitmap_clear_bit (&df->insns_to_rescan, uid);
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1233 1234
  if (insn_info)
    {
1235
      int luid;
1236 1237 1238
      bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
      /* If there's no change, return false. */
      if (the_same)
1239
	{
1240 1241 1242 1243
	  df_free_collection_rec (&collection_rec);
	  if (dump_file)
	    fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
	  return false;
1244
	}
1245 1246 1247
      if (dump_file)
	fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);

1248 1249 1250
      /* There's change - we need to delete the existing info.
	 Since the insn isn't moved, we can salvage its LUID.  */
      luid = DF_INSN_LUID (insn);
1251 1252
      df_insn_delete (NULL, uid);
      df_insn_create_insn_record (insn);
1253
      DF_INSN_LUID (insn) = luid;
1254 1255 1256
    }
  else
    {
1257 1258
      struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
      df_insn_refs_collect (&collection_rec, bb, insn_info);
1259 1260 1261 1262 1263
      if (dump_file)
	fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
    }

  df_refs_add_to_chains (&collection_rec, bb, insn);
1264 1265 1266 1267
  if (DEBUG_INSN_P (insn))
    df_set_bb_dirty_nonlr (bb);
  else
    df_set_bb_dirty (bb);
1268 1269 1270 1271 1272 1273

  VEC_free (df_ref, stack, collection_rec.def_vec);
  VEC_free (df_ref, stack, collection_rec.use_vec);
  VEC_free (df_ref, stack, collection_rec.eq_use_vec);
  VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);

1274 1275 1276
  return true;
}

1277 1278 1279 1280 1281 1282 1283 1284 1285
/* Same as df_insn_rescan, but don't mark the basic block as
   dirty.  */

bool
df_insn_rescan_debug_internal (rtx insn)
{
  unsigned int uid = INSN_UID (insn);
  struct df_insn_info *insn_info;

1286 1287
  gcc_assert (DEBUG_INSN_P (insn)
	      && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298

  if (!df)
    return false;

  insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
  if (!insn_info)
    return false;

  if (dump_file)
    fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);

1299 1300 1301
  bitmap_clear_bit (&df->insns_to_delete, uid);
  bitmap_clear_bit (&df->insns_to_rescan, uid);
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332

  if (!insn_info->defs)
    return false;

  if (insn_info->defs == df_null_ref_rec
      && insn_info->uses == df_null_ref_rec
      && insn_info->eq_uses == df_null_ref_rec
      && insn_info->mw_hardregs == df_null_mw_rec)
    return false;

  df_mw_hardreg_chain_delete (insn_info->mw_hardregs);

  if (df_chain)
    {
      df_ref_chain_delete_du_chain (insn_info->defs);
      df_ref_chain_delete_du_chain (insn_info->uses);
      df_ref_chain_delete_du_chain (insn_info->eq_uses);
    }

  df_ref_chain_delete (insn_info->defs);
  df_ref_chain_delete (insn_info->uses);
  df_ref_chain_delete (insn_info->eq_uses);

  insn_info->defs = df_null_ref_rec;
  insn_info->uses = df_null_ref_rec;
  insn_info->eq_uses = df_null_ref_rec;
  insn_info->mw_hardregs = df_null_mw_rec;

  return true;
}

1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345

/* Rescan all of the insns in the function.  Note that the artificial
   uses and defs are not touched.  This function will destroy def-se
   or use-def chains.  */

void
df_insn_rescan_all (void)
{
  bool no_insn_rescan = false;
  bool defer_insn_rescan = false;
  basic_block bb;
  bitmap_iterator bi;
  unsigned int uid;
1346 1347 1348
  bitmap_head tmp;

  bitmap_initialize (&tmp, &df_bitmap_obstack);
H.J. Lu committed
1349

1350 1351 1352 1353
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
    {
      df_clear_flags (DF_NO_INSN_RESCAN);
      no_insn_rescan = true;
1354
    }
H.J. Lu committed
1355

1356
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1357
    {
1358 1359 1360 1361
      df_clear_flags (DF_DEFER_INSN_RESCAN);
      defer_insn_rescan = true;
    }

1362 1363
  bitmap_copy (&tmp, &df->insns_to_delete);
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1364 1365 1366 1367
    {
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
      if (insn_info)
	df_insn_delete (NULL, uid);
1368
    }
1369

1370 1371 1372 1373
  bitmap_clear (&tmp);
  bitmap_clear (&df->insns_to_delete);
  bitmap_clear (&df->insns_to_rescan);
  bitmap_clear (&df->insns_to_notes_rescan);
1374

H.J. Lu committed
1375
  FOR_EACH_BB (bb)
1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387
    {
      rtx insn;
      FOR_BB_INSNS (bb, insn)
	{
	  df_insn_rescan (insn);
	}
    }

  if (no_insn_rescan)
    df_set_flags (DF_NO_INSN_RESCAN);
  if (defer_insn_rescan)
    df_set_flags (DF_DEFER_INSN_RESCAN);
1388 1389 1390
}


1391
/* Process all of the deferred rescans or deletions.  */
1392

1393 1394
void
df_process_deferred_rescans (void)
1395
{
1396 1397
  bool no_insn_rescan = false;
  bool defer_insn_rescan = false;
1398
  bitmap_iterator bi;
1399
  unsigned int uid;
1400 1401 1402
  bitmap_head tmp;

  bitmap_initialize (&tmp, &df_bitmap_obstack);
H.J. Lu committed
1403

1404 1405 1406 1407 1408
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
    {
      df_clear_flags (DF_NO_INSN_RESCAN);
      no_insn_rescan = true;
    }
H.J. Lu committed
1409

1410 1411 1412 1413 1414 1415 1416
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
    {
      df_clear_flags (DF_DEFER_INSN_RESCAN);
      defer_insn_rescan = true;
    }

  if (dump_file)
1417
    fprintf (dump_file, "starting the processing of deferred insns\n");
1418

1419 1420
  bitmap_copy (&tmp, &df->insns_to_delete);
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1421 1422 1423 1424 1425 1426
    {
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
      if (insn_info)
	df_insn_delete (NULL, uid);
    }

1427 1428
  bitmap_copy (&tmp, &df->insns_to_rescan);
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1429 1430 1431 1432 1433 1434
    {
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
      if (insn_info)
	df_insn_rescan (insn_info->insn);
    }

1435 1436
  bitmap_copy (&tmp, &df->insns_to_notes_rescan);
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1437 1438 1439 1440 1441 1442 1443
    {
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
      if (insn_info)
	df_notes_rescan (insn_info->insn);
    }

  if (dump_file)
1444
    fprintf (dump_file, "ending the processing of deferred insns\n");
1445

1446 1447 1448 1449
  bitmap_clear (&tmp);
  bitmap_clear (&df->insns_to_delete);
  bitmap_clear (&df->insns_to_rescan);
  bitmap_clear (&df->insns_to_notes_rescan);
1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464

  if (no_insn_rescan)
    df_set_flags (DF_NO_INSN_RESCAN);
  if (defer_insn_rescan)
    df_set_flags (DF_DEFER_INSN_RESCAN);

  /* If someone changed regs_ever_live during this pass, fix up the
     entry and exit blocks.  */
  if (df->redo_entry_and_exit)
    {
      df_update_entry_exit_and_calls ();
      df->redo_entry_and_exit = false;
    }
}

1465

1466 1467 1468 1469 1470
/* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
   the uses if INCLUDE_USES. Include the eq_uses if
   INCLUDE_EQ_USES.  */

static unsigned int
H.J. Lu committed
1471
df_count_refs (bool include_defs, bool include_uses,
1472 1473 1474 1475 1476
	       bool include_eq_uses)
{
  unsigned int regno;
  int size = 0;
  unsigned int m = df->regs_inited;
H.J. Lu committed
1477

1478
  for (regno = 0; regno < m; regno++)
1479
    {
1480 1481 1482 1483 1484 1485
      if (include_defs)
	size += DF_REG_DEF_COUNT (regno);
      if (include_uses)
	size += DF_REG_USE_COUNT (regno);
      if (include_eq_uses)
	size += DF_REG_EQ_USE_COUNT (regno);
1486
    }
1487
  return size;
1488 1489 1490 1491
}


/* Take build ref table for either the uses or defs from the reg-use
1492 1493
   or reg-def chains.  This version processes the refs in reg order
   which is likely to be best if processing the whole function.  */
1494

H.J. Lu committed
1495
static void
1496
df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
H.J. Lu committed
1497 1498
				  bool include_defs,
				  bool include_uses,
1499
				  bool include_eq_uses)
1500
{
1501
  unsigned int m = df->regs_inited;
1502 1503
  unsigned int regno;
  unsigned int offset = 0;
1504
  unsigned int start;
1505

1506 1507 1508 1509 1510
  if (df->changeable_flags & DF_NO_HARD_REGS)
    {
      start = FIRST_PSEUDO_REGISTER;
      memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
      memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1511
    }
1512 1513
  else
    start = 0;
1514

H.J. Lu committed
1515
  ref_info->total_size
1516 1517 1518 1519 1520
    = df_count_refs (include_defs, include_uses, include_eq_uses);

  df_check_and_grow_ref_info (ref_info, 1);

  for (regno = start; regno < m; regno++)
1521 1522
    {
      int count = 0;
1523 1524 1525
      ref_info->begin[regno] = offset;
      if (include_defs)
	{
1526
	  df_ref ref = DF_REG_DEF_CHAIN (regno);
H.J. Lu committed
1527
	  while (ref)
1528 1529 1530 1531 1532
	    {
	      ref_info->refs[offset] = ref;
	      DF_REF_ID (ref) = offset++;
	      count++;
	      ref = DF_REF_NEXT_REG (ref);
1533
	      gcc_checking_assert (offset < ref_info->refs_size);
1534 1535 1536
	    }
	}
      if (include_uses)
1537
	{
1538
	  df_ref ref = DF_REG_USE_CHAIN (regno);
H.J. Lu committed
1539
	  while (ref)
1540 1541 1542
	    {
	      ref_info->refs[offset] = ref;
	      DF_REF_ID (ref) = offset++;
1543
	      count++;
1544
	      ref = DF_REF_NEXT_REG (ref);
1545
	      gcc_checking_assert (offset < ref_info->refs_size);
1546 1547 1548 1549
	    }
	}
      if (include_eq_uses)
	{
1550
	  df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
H.J. Lu committed
1551
	  while (ref)
1552 1553 1554
	    {
	      ref_info->refs[offset] = ref;
	      DF_REF_ID (ref) = offset++;
1555
	      count++;
1556
	      ref = DF_REF_NEXT_REG (ref);
1557
	      gcc_checking_assert (offset < ref_info->refs_size);
1558 1559
	    }
	}
1560
      ref_info->count[regno] = count;
1561
    }
H.J. Lu committed
1562

1563 1564 1565
  /* The bitmap size is not decremented when refs are deleted.  So
     reset it now that we have squished out all of the empty
     slots.  */
1566
  ref_info->table_size = offset;
1567 1568 1569
}


1570 1571 1572 1573
/* Take build ref table for either the uses or defs from the reg-use
   or reg-def chains.  This version processes the refs in insn order
   which is likely to be best if processing some segment of the
   function.  */
1574

H.J. Lu committed
1575
static void
1576
df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
H.J. Lu committed
1577 1578
				   bool include_defs,
				   bool include_uses,
1579
				   bool include_eq_uses)
1580
{
1581 1582 1583 1584 1585
  bitmap_iterator bi;
  unsigned int bb_index;
  unsigned int m = df->regs_inited;
  unsigned int offset = 0;
  unsigned int r;
H.J. Lu committed
1586
  unsigned int start
1587
    = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1588

1589 1590 1591 1592 1593
  memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
  memset (ref_info->count, 0, sizeof (int) * df->regs_inited);

  ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
  df_check_and_grow_ref_info (ref_info, 1);
1594

1595
  EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1596
    {
1597 1598
      basic_block bb = BASIC_BLOCK (bb_index);
      rtx insn;
1599
      df_ref *ref_rec;
1600 1601 1602

      if (include_defs)
	for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1603
	  {
1604 1605
	    unsigned int regno = DF_REF_REGNO (*ref_rec);
	    ref_info->count[regno]++;
1606
	  }
1607 1608
      if (include_uses)
	for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1609
	  {
1610 1611
	    unsigned int regno = DF_REF_REGNO (*ref_rec);
	    ref_info->count[regno]++;
1612
	  }
1613

1614 1615 1616 1617 1618
      FOR_BB_INSNS (bb, insn)
	{
	  if (INSN_P (insn))
	    {
	      unsigned int uid = INSN_UID (insn);
H.J. Lu committed
1619

1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647
	      if (include_defs)
		for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
		  {
		    unsigned int regno = DF_REF_REGNO (*ref_rec);
		    ref_info->count[regno]++;
		  }
	      if (include_uses)
		for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
		  {
		    unsigned int regno = DF_REF_REGNO (*ref_rec);
		    ref_info->count[regno]++;
		  }
	      if (include_eq_uses)
		for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
		  {
		    unsigned int regno = DF_REF_REGNO (*ref_rec);
		    ref_info->count[regno]++;
		  }
	    }
	}
    }

  for (r = start; r < m; r++)
    {
      ref_info->begin[r] = offset;
      offset += ref_info->count[r];
      ref_info->count[r] = 0;
    }
H.J. Lu committed
1648

1649 1650 1651 1652
  EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
    {
      basic_block bb = BASIC_BLOCK (bb_index);
      rtx insn;
1653
      df_ref *ref_rec;
1654 1655 1656

      if (include_defs)
	for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1657
	  {
1658
	    df_ref ref = *ref_rec;
1659 1660
	    unsigned int regno = DF_REF_REGNO (ref);
	    if (regno >= start)
1661
	      {
1662 1663 1664 1665
		unsigned int id
		  = ref_info->begin[regno] + ref_info->count[regno]++;
		DF_REF_ID (ref) = id;
		ref_info->refs[id] = ref;
1666 1667
	      }
	  }
1668 1669
      if (include_uses)
	for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1670
	  {
1671
	    df_ref ref = *ref_rec;
1672 1673 1674 1675 1676 1677 1678 1679
	    unsigned int regno = DF_REF_REGNO (ref);
	    if (regno >= start)
	      {
		unsigned int id
		  = ref_info->begin[regno] + ref_info->count[regno]++;
		DF_REF_ID (ref) = id;
		ref_info->refs[id] = ref;
	      }
1680
	  }
1681 1682 1683 1684 1685 1686

      FOR_BB_INSNS (bb, insn)
	{
	  if (INSN_P (insn))
	    {
	      unsigned int uid = INSN_UID (insn);
H.J. Lu committed
1687

1688 1689 1690
	      if (include_defs)
		for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
		  {
1691
		    df_ref ref = *ref_rec;
1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703
		    unsigned int regno = DF_REF_REGNO (ref);
		    if (regno >= start)
		      {
			unsigned int id
			  = ref_info->begin[regno] + ref_info->count[regno]++;
			DF_REF_ID (ref) = id;
			ref_info->refs[id] = ref;
		      }
		  }
	      if (include_uses)
		for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
		  {
1704
		    df_ref ref = *ref_rec;
1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716
		    unsigned int regno = DF_REF_REGNO (ref);
		    if (regno >= start)
		      {
			unsigned int id
			  = ref_info->begin[regno] + ref_info->count[regno]++;
			DF_REF_ID (ref) = id;
			ref_info->refs[id] = ref;
		      }
		  }
	      if (include_eq_uses)
		for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
		  {
1717
		    df_ref ref = *ref_rec;
1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740
		    unsigned int regno = DF_REF_REGNO (ref);
		    if (regno >= start)
		      {
			unsigned int id
			  = ref_info->begin[regno] + ref_info->count[regno]++;
			DF_REF_ID (ref) = id;
			ref_info->refs[id] = ref;
		      }
		  }
	    }
	}
    }

  /* The bitmap size is not decremented when refs are deleted.  So
     reset it now that we have squished out all of the empty
     slots.  */

  ref_info->table_size = offset;
}

/* Take build ref table for either the uses or defs from the reg-use
   or reg-def chains.  */

H.J. Lu committed
1741
static void
1742
df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
H.J. Lu committed
1743 1744
			   bool include_defs,
			   bool include_uses,
1745 1746 1747
			   bool include_eq_uses)
{
  if (df->analyze_subset)
H.J. Lu committed
1748
    df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1749 1750
				       include_uses, include_eq_uses);
  else
H.J. Lu committed
1751
    df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1752 1753 1754 1755 1756
				       include_uses, include_eq_uses);
}


/* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET.  */
H.J. Lu committed
1757 1758 1759
static unsigned int
df_add_refs_to_table (unsigned int offset,
		      struct df_ref_info *ref_info,
1760
		      df_ref *ref_vec)
1761 1762 1763
{
  while (*ref_vec)
    {
1764
      df_ref ref = *ref_vec;
1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781
      if ((!(df->changeable_flags & DF_NO_HARD_REGS))
	  || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
	{
	  ref_info->refs[offset] = ref;
	  DF_REF_ID (*ref_vec) = offset++;
	}
      ref_vec++;
    }
  return offset;
}


/* Count the number of refs in all of the insns of BB. Include the
   defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
   eq_uses if INCLUDE_EQ_USES.  */

static unsigned int
H.J. Lu committed
1782
df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1783
			       struct df_ref_info *ref_info,
H.J. Lu committed
1784
			       bool include_defs, bool include_uses,
1785 1786 1787 1788 1789
			       bool include_eq_uses)
{
  rtx insn;

  if (include_defs)
H.J. Lu committed
1790
    offset = df_add_refs_to_table (offset, ref_info,
1791 1792
				   df_get_artificial_defs (bb->index));
  if (include_uses)
H.J. Lu committed
1793
    offset = df_add_refs_to_table (offset, ref_info,
1794 1795 1796 1797 1798 1799 1800
				   df_get_artificial_uses (bb->index));

  FOR_BB_INSNS (bb, insn)
    if (INSN_P (insn))
      {
	unsigned int uid = INSN_UID (insn);
	if (include_defs)
H.J. Lu committed
1801
	  offset = df_add_refs_to_table (offset, ref_info,
1802 1803
					 DF_INSN_UID_DEFS (uid));
	if (include_uses)
H.J. Lu committed
1804
	  offset = df_add_refs_to_table (offset, ref_info,
1805 1806
					 DF_INSN_UID_USES (uid));
	if (include_eq_uses)
H.J. Lu committed
1807
	  offset = df_add_refs_to_table (offset, ref_info,
1808
					 DF_INSN_UID_EQ_USES (uid));
1809
      }
1810 1811 1812 1813
  return offset;
}


1814
/* Organize the refs by insn into the table in REF_INFO.  If
1815 1816 1817 1818 1819 1820
   blocks_to_analyze is defined, use that set, otherwise the entire
   program.  Include the defs if INCLUDE_DEFS. Include the uses if
   INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES.  */

static void
df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
H.J. Lu committed
1821
			    bool include_defs, bool include_uses,
1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835
			    bool include_eq_uses)
{
  basic_block bb;
  unsigned int offset = 0;

  ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
  df_check_and_grow_ref_info (ref_info, 1);
  if (df->blocks_to_analyze)
    {
      bitmap_iterator bi;
      unsigned int index;

      EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
	{
H.J. Lu committed
1836 1837
	  offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
						  include_defs, include_uses,
1838 1839 1840 1841 1842 1843 1844 1845
						  include_eq_uses);
	}

      ref_info->table_size = offset;
    }
  else
    {
      FOR_ALL_BB (bb)
H.J. Lu committed
1846 1847
	offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
						include_defs, include_uses,
1848 1849 1850 1851 1852 1853 1854 1855
						include_eq_uses);
      ref_info->table_size = offset;
    }
}


/* If the use refs in DF are not organized, reorganize them.  */

H.J. Lu committed
1856
void
1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890
df_maybe_reorganize_use_refs (enum df_ref_order order)
{
  if (order == df->use_info.ref_order)
    return;

  switch (order)
    {
    case DF_REF_ORDER_BY_REG:
      df_reorganize_refs_by_reg (&df->use_info, false, true, false);
      break;

    case DF_REF_ORDER_BY_REG_WITH_NOTES:
      df_reorganize_refs_by_reg (&df->use_info, false, true, true);
      break;

    case DF_REF_ORDER_BY_INSN:
      df_reorganize_refs_by_insn (&df->use_info, false, true, false);
      break;

    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
      df_reorganize_refs_by_insn (&df->use_info, false, true, true);
      break;

    case DF_REF_ORDER_NO_TABLE:
      free (df->use_info.refs);
      df->use_info.refs = NULL;
      df->use_info.refs_size = 0;
      break;

    case DF_REF_ORDER_UNORDERED:
    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
      gcc_unreachable ();
      break;
    }
H.J. Lu committed
1891

1892 1893 1894 1895 1896 1897
  df->use_info.ref_order = order;
}


/* If the def refs in DF are not organized, reorganize them.  */

H.J. Lu committed
1898
void
1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926
df_maybe_reorganize_def_refs (enum df_ref_order order)
{
  if (order == df->def_info.ref_order)
    return;

  switch (order)
    {
    case DF_REF_ORDER_BY_REG:
      df_reorganize_refs_by_reg (&df->def_info, true, false, false);
      break;

    case DF_REF_ORDER_BY_INSN:
      df_reorganize_refs_by_insn (&df->def_info, true, false, false);
      break;

    case DF_REF_ORDER_NO_TABLE:
      free (df->def_info.refs);
      df->def_info.refs = NULL;
      df->def_info.refs_size = 0;
      break;

    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
    case DF_REF_ORDER_BY_REG_WITH_NOTES:
    case DF_REF_ORDER_UNORDERED:
    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
      gcc_unreachable ();
      break;
    }
H.J. Lu committed
1927

1928 1929 1930 1931 1932
  df->def_info.ref_order = order;
}


/* Change all of the basic block references in INSN to use the insn's
H.J. Lu committed
1933 1934
   current basic block.  This function is called from routines that move
   instructions from one block to another.  */
1935 1936

void
1937
df_insn_change_bb (rtx insn, basic_block new_bb)
1938
{
1939
  basic_block old_bb = BLOCK_FOR_INSN (insn);
1940 1941 1942
  struct df_insn_info *insn_info;
  unsigned int uid = INSN_UID (insn);

1943 1944 1945 1946 1947
  if (old_bb == new_bb)
    return;

  set_block_for_insn (insn, new_bb);

1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969
  if (!df)
    return;

  if (dump_file)
    fprintf (dump_file, "changing bb of uid %d\n", uid);

  insn_info = DF_INSN_UID_SAFE_GET (uid);
  if (insn_info == NULL)
    {
      if (dump_file)
	fprintf (dump_file, "  unscanned insn\n");
      df_insn_rescan (insn);
      return;
    }

  if (!INSN_P (insn))
    return;

  df_set_bb_dirty (new_bb);
  if (old_bb)
    {
      if (dump_file)
H.J. Lu committed
1970
	fprintf (dump_file, "  from %d to %d\n",
1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982
		 old_bb->index, new_bb->index);
      df_set_bb_dirty (old_bb);
    }
  else
    if (dump_file)
      fprintf (dump_file, "  to %d\n", new_bb->index);
}


/* Helper function for df_ref_change_reg_with_loc.  */

static void
H.J. Lu committed
1983
df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1984
			      struct df_reg_info *new_df,
1985 1986
			      int new_regno, rtx loc)
{
1987
  df_ref the_ref = old_df->reg_chain;
1988 1989 1990

  while (the_ref)
    {
1991
      if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1992
	  && DF_REF_LOC (the_ref)
1993
	  && (*DF_REF_LOC (the_ref) == loc))
1994
	{
1995 1996 1997 1998
	  df_ref next_ref = DF_REF_NEXT_REG (the_ref);
	  df_ref prev_ref = DF_REF_PREV_REG (the_ref);
	  df_ref *ref_vec, *ref_vec_t;
	  struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1999 2000 2001 2002 2003 2004 2005
	  unsigned int count = 0;

	  DF_REF_REGNO (the_ref) = new_regno;
	  DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];

	  /* Pull the_ref out of the old regno chain.  */
	  if (prev_ref)
2006
	    DF_REF_NEXT_REG (prev_ref) = next_ref;
2007
	  else
2008
	    old_df->reg_chain = next_ref;
2009
	  if (next_ref)
2010
	    DF_REF_PREV_REG (next_ref) = prev_ref;
2011
	  old_df->n_refs--;
2012 2013

	  /* Put the ref into the new regno chain.  */
2014 2015
	  DF_REF_PREV_REG (the_ref) = NULL;
	  DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2016
	  if (new_df->reg_chain)
2017
	    DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2018 2019
	  new_df->reg_chain = the_ref;
	  new_df->n_refs++;
2020 2021
	  if (DF_REF_BB (the_ref))
	    df_set_bb_dirty (DF_REF_BB (the_ref));
2022

2023 2024 2025 2026 2027
	  /* Need to sort the record again that the ref was in because
	     the regno is a sorting key.  First, find the right
	     record.  */
	  if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
	    ref_vec = insn_info->eq_uses;
2028
	  else
2029 2030
	    ref_vec = insn_info->uses;
	  if (dump_file)
H.J. Lu committed
2031 2032 2033
	    fprintf (dump_file, "changing reg in insn %d\n",
		     DF_REF_INSN_UID (the_ref));

2034
	  ref_vec_t = ref_vec;
H.J. Lu committed
2035

2036 2037 2038 2039 2040 2041
	  /* Find the length.  */
	  while (*ref_vec_t)
	    {
	      count++;
	      ref_vec_t++;
	    }
2042
	  qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2043 2044 2045 2046

	  the_ref = next_ref;
	}
      else
2047
	the_ref = DF_REF_NEXT_REG (the_ref);
2048 2049 2050 2051 2052
    }
}


/* Change the regno of all refs that contained LOC from OLD_REGNO to
2053 2054 2055
   NEW_REGNO.  Refs that do not match LOC are not changed which means
   that artificial refs are not changed since they have no loc.  This
   call is to support the SET_REGNO macro. */
2056 2057 2058 2059 2060 2061 2062 2063 2064

void
df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
{
  if ((!df) || (old_regno == -1) || (old_regno == new_regno))
    return;

  df_grow_reg_info ();

H.J. Lu committed
2065
  df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2066
				DF_REG_DEF_GET (new_regno), new_regno, loc);
H.J. Lu committed
2067
  df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2068
				DF_REG_USE_GET (new_regno), new_regno, loc);
H.J. Lu committed
2069
  df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081
				DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
}


/* Delete the mw_hardregs that point into the eq_notes.  */

static unsigned int
df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
{
  struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
  unsigned int deleted = 0;
  unsigned int count = 0;
H.J. Lu committed
2082
  struct df_scan_problem_data *problem_data
2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098
    = (struct df_scan_problem_data *) df_scan->problem_data;

  if (!*mw_vec)
    return 0;

  while (*mw_vec)
    {
      if ((*mw_vec)->flags & DF_REF_IN_NOTE)
	{
	  struct df_mw_hardreg **temp_vec = mw_vec;

	  pool_free (problem_data->mw_reg_pool, *mw_vec);
	  temp_vec = mw_vec;
	  /* Shove the remaining ones down one to fill the gap.  While
	     this looks n**2, it is highly unusual to have any mw regs
	     in eq_notes and the chances of more than one are almost
H.J. Lu committed
2099
	     non existent.  */
2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115
	  while (*temp_vec)
	    {
	      *temp_vec = *(temp_vec + 1);
	      temp_vec++;
	    }
	  deleted++;
	}
      else
	{
	  mw_vec++;
	  count++;
	}
    }

  if (count == 0)
    {
2116
      df_scan_free_mws_vec (insn_info->mw_hardregs);
2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138
      insn_info->mw_hardregs = df_null_mw_rec;
      return 0;
    }
  return deleted;
}


/* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN.  */

void
df_notes_rescan (rtx insn)
{
  struct df_insn_info *insn_info;
  unsigned int uid = INSN_UID (insn);

  if (!df)
    return;

  /* The client has disabled rescanning and plans to do it itself.  */
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
    return;

2139 2140 2141 2142
  /* Do nothing if the insn hasn't been emitted yet.  */
  if (!BLOCK_FOR_INSN (insn))
    return;

2143 2144 2145 2146 2147
  df_grow_bb_info (df_scan);
  df_grow_reg_info ();

  insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));

2148
  /* The client has deferred rescanning.  */
2149 2150 2151 2152 2153 2154 2155 2156 2157 2158
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
    {
      if (!insn_info)
	{
	  insn_info = df_insn_create_insn_record (insn);
	  insn_info->defs = df_null_ref_rec;
	  insn_info->uses = df_null_ref_rec;
	  insn_info->eq_uses = df_null_ref_rec;
	  insn_info->mw_hardregs = df_null_mw_rec;
	}
H.J. Lu committed
2159

2160
      bitmap_clear_bit (&df->insns_to_delete, uid);
2161 2162
      /* If the insn is set to be rescanned, it does not need to also
	 be notes rescanned.  */
2163 2164
      if (!bitmap_bit_p (&df->insns_to_rescan, uid))
	bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
2165 2166 2167
      return;
    }

2168 2169
  bitmap_clear_bit (&df->insns_to_delete, uid);
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
2170 2171 2172 2173 2174 2175 2176

  if (insn_info)
    {
      basic_block bb = BLOCK_FOR_INSN (insn);
      rtx note;
      struct df_collection_rec collection_rec;
      unsigned int num_deleted;
2177
      unsigned int mw_len;
2178 2179

      memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2180 2181
      collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
      collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194

      num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
      df_ref_chain_delete (insn_info->eq_uses);
      insn_info->eq_uses = NULL;

      /* Process REG_EQUIV/REG_EQUAL notes */
      for (note = REG_NOTES (insn); note;
	   note = XEXP (note, 1))
	{
	  switch (REG_NOTE_KIND (note))
	    {
	    case REG_EQUIV:
	    case REG_EQUAL:
2195
	      df_uses_record (&collection_rec,
2196
			      &XEXP (note, 0), DF_REF_REG_USE,
2197
			      bb, insn_info, DF_REF_IN_NOTE);
2198 2199 2200 2201 2202 2203 2204
	    default:
	      break;
	    }
	}

      /* Find some place to put any new mw_hardregs.  */
      df_canonize_collection_rec (&collection_rec);
2205 2206
      mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
      if (mw_len)
2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219
	{
	  unsigned int count = 0;
	  struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
	  while (*mw_rec)
	    {
	      count++;
	      mw_rec++;
	    }

	  if (count)
	    {
	      /* Append to the end of the existing record after
		 expanding it if necessary.  */
2220
	      if (mw_len > num_deleted)
2221
		{
H.J. Lu committed
2222
		  insn_info->mw_hardregs =
2223
		    XRESIZEVEC (struct df_mw_hardreg *,
2224 2225
				insn_info->mw_hardregs,
				count + 1 + mw_len);
2226
		}
2227
	      memcpy (&insn_info->mw_hardregs[count],
H.J. Lu committed
2228
		      VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2229 2230
		      mw_len * sizeof (struct df_mw_hardreg *));
	      insn_info->mw_hardregs[count + mw_len] = NULL;
H.J. Lu committed
2231
	      qsort (insn_info->mw_hardregs, count + mw_len,
2232 2233 2234 2235
		     sizeof (struct df_mw_hardreg *), df_mw_compare);
	    }
	  else
	    {
H.J. Lu committed
2236 2237
	      /* No vector there. */
	      insn_info->mw_hardregs
2238 2239 2240 2241 2242
		= XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
	      memcpy (insn_info->mw_hardregs,
		      VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
		      mw_len * sizeof (struct df_mw_hardreg *));
	      insn_info->mw_hardregs[mw_len] = NULL;
2243 2244 2245 2246
	    }
	}
      /* Get rid of the mw_rec so that df_refs_add_to_chains will
	 ignore it.  */
2247
      VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2248
      df_refs_add_to_chains (&collection_rec, bb, insn);
2249
      VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262
    }
  else
    df_insn_rescan (insn);

}


/*----------------------------------------------------------------------------
   Hard core instruction scanning code.  No external interfaces here,
   just a lot of routines that look inside insns.
----------------------------------------------------------------------------*/


H.J. Lu committed
2263
/* Return true if the contents of two df_ref's are identical.
2264 2265 2266
   It ignores DF_REF_MARKER.  */

static bool
2267
df_ref_equal_p (df_ref ref1, df_ref ref2)
2268 2269 2270
{
  if (!ref2)
    return false;
H.J. Lu committed
2271

2272 2273 2274 2275 2276 2277 2278
  if (ref1 == ref2)
    return true;

  if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
      || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
      || DF_REF_REG (ref1) != DF_REF_REG (ref2)
      || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
H.J. Lu committed
2279
      || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2280 2281 2282 2283
	  != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
      || DF_REF_BB (ref1) != DF_REF_BB (ref2)
      || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
    return false;
H.J. Lu committed
2284

2285 2286 2287 2288 2289
  switch (DF_REF_CLASS (ref1))
    {
    case DF_REF_ARTIFICIAL:
    case DF_REF_BASE:
      return true;
2290

2291 2292
    case DF_REF_REGULAR:
      return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2293

2294 2295 2296 2297
    default:
      gcc_unreachable ();
    }
  return false;
2298 2299 2300 2301 2302 2303 2304 2305 2306 2307
}


/* Compare REF1 and REF2 for sorting.  This is only called from places
   where all of the refs are of the same type, in the same insn, and
   have the same bb.  So these fields are not checked.  */

static int
df_ref_compare (const void *r1, const void *r2)
{
2308 2309
  const df_ref ref1 = *(const df_ref *)r1;
  const df_ref ref2 = *(const df_ref *)r2;
2310 2311 2312 2313

  if (ref1 == ref2)
    return 0;

2314 2315 2316
  if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
    return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);

2317 2318
  if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
    return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
H.J. Lu committed
2319

2320 2321 2322
  if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
    return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);

2323 2324 2325 2326 2327 2328
  if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
    return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);

  /* Cannot look at the LOC field on artificial refs.  */
  if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
      && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343
    return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);

  if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
    {
      /* If two refs are identical except that one of them has is from
	 a mw and one is not, we need to have the one with the mw
	 first.  */
      if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
	  DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
	return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
      else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
	return -1;
      else
	return 1;
    }
2344

2345
  return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2346 2347 2348
}

static void
2349
df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2350
{
2351 2352 2353
  df_ref tmp = VEC_index (df_ref, *ref_vec, i);
  VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
  VEC_replace (df_ref, *ref_vec, j, tmp);
2354 2355 2356 2357
}

/* Sort and compress a set of refs.  */

2358 2359
static void
df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2360
{
2361
  unsigned int count;
2362 2363 2364
  unsigned int i;
  unsigned int dist = 0;

2365 2366
  count = VEC_length (df_ref, *ref_vec);

2367 2368
  /* If there are 1 or 0 elements, there is nothing to do.  */
  if (count < 2)
2369
    return;
2370 2371
  else if (count == 2)
    {
2372 2373 2374
      df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
      df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
      if (df_ref_compare (&r0, &r1) > 0)
2375 2376 2377 2378 2379
        df_swap_refs (ref_vec, 0, 1);
    }
  else
    {
      for (i = 0; i < count - 1; i++)
2380 2381 2382 2383 2384 2385
	{
	  df_ref r0 = VEC_index (df_ref, *ref_vec, i);
	  df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
	  if (df_ref_compare (&r0, &r1) >= 0)
	    break;
	}
2386 2387 2388 2389
      /* If the array is already strictly ordered,
         which is the most common case for large COUNT case
         (which happens for CALL INSNs),
         no need to sort and filter out duplicate.
H.J. Lu committed
2390
         Simply return the count.
2391 2392 2393
         Make sure DF_GET_ADD_REFS adds refs in the increasing order
         of DF_REF_COMPARE.  */
      if (i == count - 1)
2394 2395 2396
        return;
      qsort (VEC_address (df_ref, *ref_vec), count, sizeof (df_ref),
	     df_ref_compare);
2397 2398 2399 2400 2401
    }

  for (i=0; i<count-dist; i++)
    {
      /* Find the next ref that is not equal to the current ref.  */
2402 2403 2404
      while (i + dist + 1 < count
	     && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
				VEC_index (df_ref, *ref_vec, i + dist + 1)))
2405
	{
2406
	  df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2407 2408 2409
	  dist++;
	}
      /* Copy it down to the next position.  */
2410 2411 2412
      if (dist && i + dist + 1 < count)
	VEC_replace (df_ref, *ref_vec, i + 1,
		     VEC_index (df_ref, *ref_vec, i + dist + 1));
2413 2414 2415
    }

  count -= dist;
2416
  VEC_truncate (df_ref, *ref_vec, count);
2417 2418 2419
}


H.J. Lu committed
2420
/* Return true if the contents of two df_ref's are identical.
2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431 2432 2433 2434 2435 2436 2437 2438 2439 2440 2441
   It ignores DF_REF_MARKER.  */

static bool
df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
{
  if (!mw2)
    return false;
  return (mw1 == mw2) ||
    (mw1->mw_reg == mw2->mw_reg
     && mw1->type == mw2->type
     && mw1->flags == mw2->flags
     && mw1->start_regno == mw2->start_regno
     && mw1->end_regno == mw2->end_regno);
}


/* Compare MW1 and MW2 for sorting.  */

static int
df_mw_compare (const void *m1, const void *m2)
{
2442 2443
  const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
  const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468

  if (mw1 == mw2)
    return 0;

  if (mw1->type != mw2->type)
    return mw1->type - mw2->type;

  if (mw1->flags != mw2->flags)
    return mw1->flags - mw2->flags;

  if (mw1->start_regno != mw2->start_regno)
    return mw1->start_regno - mw2->start_regno;

  if (mw1->end_regno != mw2->end_regno)
    return mw1->end_regno - mw2->end_regno;

  if (mw1->mw_reg != mw2->mw_reg)
    return mw1->mw_order - mw2->mw_order;

  return 0;
}


/* Sort and compress a set of refs.  */

2469 2470
static void
df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2471
{
2472
  unsigned int count;
H.J. Lu committed
2473
  struct df_scan_problem_data *problem_data
2474 2475 2476 2477
    = (struct df_scan_problem_data *) df_scan->problem_data;
  unsigned int i;
  unsigned int dist = 0;

2478
  count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2479
  if (count < 2)
2480
    return;
2481 2482
  else if (count == 2)
    {
2483 2484 2485
      struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
      struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
      if (df_mw_compare (&m0, &m1) > 0)
2486
        {
2487 2488 2489 2490 2491
          struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
						 *mw_vec, 0);
	  VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
		       VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
	  VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2492 2493 2494
        }
    }
  else
2495 2496
    qsort (VEC_address (df_mw_hardreg_ptr, *mw_vec), count,
	   sizeof (struct df_mw_hardreg *), df_mw_compare);
2497 2498 2499 2500

  for (i=0; i<count-dist; i++)
    {
      /* Find the next ref that is not equal to the current ref.  */
2501 2502 2503 2504
      while (i + dist + 1 < count
	     && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
			       VEC_index (df_mw_hardreg_ptr, *mw_vec,
					  i + dist + 1)))
2505
	{
2506 2507
	  pool_free (problem_data->mw_reg_pool,
		     VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2508 2509 2510
	  dist++;
	}
      /* Copy it down to the next position.  */
2511 2512 2513
      if (dist && i + dist + 1 < count)
	VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
		     VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2514 2515 2516
    }

  count -= dist;
2517
  VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2518 2519 2520 2521 2522 2523 2524 2525
}


/* Sort and remove duplicates from the COLLECTION_REC.  */

static void
df_canonize_collection_rec (struct df_collection_rec *collection_rec)
{
2526 2527 2528 2529
  df_sort_and_compress_refs (&collection_rec->def_vec);
  df_sort_and_compress_refs (&collection_rec->use_vec);
  df_sort_and_compress_refs (&collection_rec->eq_use_vec);
  df_sort_and_compress_mws (&collection_rec->mw_vec);
2530 2531 2532 2533 2534 2535
}


/* Add the new df_ref to appropriate reg_info/ref_info chains.  */

static void
H.J. Lu committed
2536 2537
df_install_ref (df_ref this_ref,
		struct df_reg_info *reg_info,
2538 2539 2540 2541 2542
		struct df_ref_info *ref_info,
		bool add_to_table)
{
  unsigned int regno = DF_REF_REGNO (this_ref);
  /* Add the ref to the reg_{def,use,eq_use} chain.  */
2543
  df_ref head = reg_info->reg_chain;
2544 2545 2546 2547 2548 2549 2550 2551 2552 2553

  reg_info->reg_chain = this_ref;
  reg_info->n_refs++;

  if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
    {
      gcc_assert (regno < FIRST_PSEUDO_REGISTER);
      df->hard_regs_live_count[regno]++;
    }

2554 2555
  gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
		       && DF_REF_PREV_REG (this_ref) == NULL);
2556 2557 2558 2559 2560 2561 2562 2563

  DF_REF_NEXT_REG (this_ref) = head;

  /* We cannot actually link to the head of the chain.  */
  DF_REF_PREV_REG (this_ref) = NULL;

  if (head)
    DF_REF_PREV_REG (head) = this_ref;
H.J. Lu committed
2564

2565 2566 2567 2568 2569 2570 2571 2572
  if (add_to_table)
    {
      gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
      df_check_and_grow_ref_info (ref_info, 1);
      DF_REF_ID (this_ref) = ref_info->table_size;
      /* Add the ref to the big array of defs.  */
      ref_info->refs[ref_info->table_size] = this_ref;
      ref_info->table_size++;
H.J. Lu committed
2573
    }
2574 2575
  else
    DF_REF_ID (this_ref) = -1;
H.J. Lu committed
2576

2577 2578 2579 2580 2581 2582 2583 2584
  ref_info->total_size++;
}


/* This function takes one of the groups of refs (defs, uses or
   eq_uses) and installs the entire group into the insn.  It also adds
   each of these refs into the appropriate chains.  */

2585
static df_ref *
2586
df_install_refs (basic_block bb,
2587
		 VEC(df_ref,stack)* old_vec,
H.J. Lu committed
2588
		 struct df_reg_info **reg_info,
2589 2590 2591
		 struct df_ref_info *ref_info,
		 bool is_notes)
{
2592 2593 2594
  unsigned int count;

  count = VEC_length (df_ref, old_vec);
2595 2596
  if (count)
    {
2597
      df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2598
      bool add_to_table;
2599 2600
      df_ref this_ref;
      unsigned int ix;
2601 2602 2603 2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619 2620 2621 2622 2623 2624

      switch (ref_info->ref_order)
	{
	case DF_REF_ORDER_UNORDERED_WITH_NOTES:
	case DF_REF_ORDER_BY_REG_WITH_NOTES:
	case DF_REF_ORDER_BY_INSN_WITH_NOTES:
	  ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
	  add_to_table = true;
	  break;
	case DF_REF_ORDER_UNORDERED:
	case DF_REF_ORDER_BY_REG:
	case DF_REF_ORDER_BY_INSN:
	  ref_info->ref_order = DF_REF_ORDER_UNORDERED;
	  add_to_table = !is_notes;
	  break;
	default:
	  add_to_table = false;
	  break;
	}

      /* Do not add if ref is not in the right blocks.  */
      if (add_to_table && df->analyze_subset)
	add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);

2625
      FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
2626
	{
2627
	  new_vec[ix] = this_ref;
H.J. Lu committed
2628
	  df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2629 2630
			  ref_info, add_to_table);
	}
H.J. Lu committed
2631

2632 2633 2634 2635 2636 2637 2638 2639 2640 2641 2642 2643
      new_vec[count] = NULL;
      return new_vec;
    }
  else
    return df_null_ref_rec;
}


/* This function takes the mws installs the entire group into the
   insn.  */

static struct df_mw_hardreg **
2644
df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2645
{
2646 2647 2648
  unsigned int count;

  count = VEC_length (df_mw_hardreg_ptr, old_vec);
2649 2650
  if (count)
    {
H.J. Lu committed
2651
      struct df_mw_hardreg **new_vec
2652
	= XNEWVEC (struct df_mw_hardreg*, count + 1);
H.J. Lu committed
2653
      memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2654 2655
	      sizeof (struct df_mw_hardreg*) * count);
      new_vec[count] = NULL;
2656 2657 2658 2659 2660 2661 2662 2663 2664 2665 2666
      return new_vec;
    }
  else
    return df_null_mw_rec;
}


/* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
   chains and update other necessary information.  */

static void
H.J. Lu committed
2667
df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2668 2669 2670 2671
		       basic_block bb, rtx insn)
{
  if (insn)
    {
2672
      struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2673 2674 2675 2676 2677
      /* If there is a vector in the collection rec, add it to the
	 insn.  A null rec is a signal that the caller will handle the
	 chain specially.  */
      if (collection_rec->def_vec)
	{
2678
	  df_scan_free_ref_vec (insn_rec->defs);
H.J. Lu committed
2679
	  insn_rec->defs
2680
	    = df_install_refs (bb, collection_rec->def_vec,
2681 2682 2683 2684 2685
			       df->def_regs,
			       &df->def_info, false);
	}
      if (collection_rec->use_vec)
	{
2686
	  df_scan_free_ref_vec (insn_rec->uses);
H.J. Lu committed
2687 2688
	  insn_rec->uses
	    = df_install_refs (bb, collection_rec->use_vec,
2689 2690 2691 2692 2693
			       df->use_regs,
			       &df->use_info, false);
	}
      if (collection_rec->eq_use_vec)
	{
2694
	  df_scan_free_ref_vec (insn_rec->eq_uses);
H.J. Lu committed
2695 2696
	  insn_rec->eq_uses
	    = df_install_refs (bb, collection_rec->eq_use_vec,
2697 2698 2699 2700 2701
			       df->eq_use_regs,
			       &df->use_info, true);
	}
      if (collection_rec->mw_vec)
	{
2702
	  df_scan_free_mws_vec (insn_rec->mw_hardregs);
H.J. Lu committed
2703
	  insn_rec->mw_hardregs
2704
	    = df_install_mws (collection_rec->mw_vec);
2705 2706 2707 2708 2709 2710
	}
    }
  else
    {
      struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);

2711
      df_scan_free_ref_vec (bb_info->artificial_defs);
H.J. Lu committed
2712
      bb_info->artificial_defs
2713
	= df_install_refs (bb, collection_rec->def_vec,
2714 2715
			   df->def_regs,
			   &df->def_info, false);
2716
      df_scan_free_ref_vec (bb_info->artificial_uses);
H.J. Lu committed
2717 2718
      bb_info->artificial_uses
	= df_install_refs (bb, collection_rec->use_vec,
2719 2720 2721 2722
			   df->use_regs,
			   &df->use_info, false);
    }
}
2723 2724


2725
/* Allocate a ref and initialize its fields.  */
2726

H.J. Lu committed
2727 2728
static df_ref
df_ref_create_structure (enum df_ref_class cl,
2729
			 struct df_collection_rec *collection_rec,
H.J. Lu committed
2730
			 rtx reg, rtx *loc,
2731
			 basic_block bb, struct df_insn_info *info,
H.J. Lu committed
2732
			 enum df_ref_type ref_type,
2733
			 int ref_flags)
2734
{
2735
  df_ref this_ref = NULL;
2736 2737 2738 2739
  int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
  struct df_scan_problem_data *problem_data
    = (struct df_scan_problem_data *) df_scan->problem_data;

2740
  switch (cl)
2741
    {
2742 2743
    case DF_REF_BASE:
      this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2744
      gcc_checking_assert (loc == NULL);
2745 2746 2747 2748 2749
      break;

    case DF_REF_ARTIFICIAL:
      this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
      this_ref->artificial_ref.bb = bb;
2750
      gcc_checking_assert (loc == NULL);
2751 2752 2753 2754 2755
      break;

    case DF_REF_REGULAR:
      this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
      this_ref->regular_ref.loc = loc;
2756
      gcc_checking_assert (loc);
2757
      break;
2758
    }
2759 2760

  DF_REF_CLASS (this_ref) = cl;
2761 2762 2763
  DF_REF_ID (this_ref) = -1;
  DF_REF_REG (this_ref) = reg;
  DF_REF_REGNO (this_ref) =  regno;
2764
  DF_REF_TYPE (this_ref) = ref_type;
2765
  DF_REF_INSN_INFO (this_ref) = info;
2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777
  DF_REF_CHAIN (this_ref) = NULL;
  DF_REF_FLAGS (this_ref) = ref_flags;
  DF_REF_NEXT_REG (this_ref) = NULL;
  DF_REF_PREV_REG (this_ref) = NULL;
  DF_REF_ORDER (this_ref) = df->ref_order++;

  /* We need to clear this bit because fwprop, and in the future
     possibly other optimizations sometimes create new refs using ond
     refs as the model.  */
  DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);

  /* See if this ref needs to have DF_HARD_REG_LIVE bit set.  */
H.J. Lu committed
2778
  if ((regno < FIRST_PSEUDO_REGISTER)
2779 2780
      && (!DF_REF_IS_ARTIFICIAL (this_ref)))
    {
2781
      if (DF_REF_REG_DEF_P (this_ref))
2782 2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793
	{
	  if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
	    DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
	}
      else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
		 && (regno == FRAME_POINTER_REGNUM
		     || regno == ARG_POINTER_REGNUM)))
	DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
    }

  if (collection_rec)
    {
2794
      if (DF_REF_REG_DEF_P (this_ref))
2795
	VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2796
      else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2797
	VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2798
      else
2799
	VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2800
    }
2801

2802 2803 2804 2805 2806
  return this_ref;
}


/* Create new references of type DF_REF_TYPE for each part of register REG
2807
   at address LOC within INSN of BB.  */
2808

2809 2810

static void
H.J. Lu committed
2811
df_ref_record (enum df_ref_class cl,
2812
	       struct df_collection_rec *collection_rec,
H.J. Lu committed
2813
               rtx reg, rtx *loc,
2814
	       basic_block bb, struct df_insn_info *insn_info,
H.J. Lu committed
2815
	       enum df_ref_type ref_type,
2816
	       int ref_flags)
2817
{
2818
  unsigned int regno;
2819

2820
  gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2821 2822 2823 2824

  regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
  if (regno < FIRST_PSEUDO_REGISTER)
    {
2825 2826
      struct df_mw_hardreg *hardreg = NULL;
      struct df_scan_problem_data *problem_data
2827 2828 2829
        = (struct df_scan_problem_data *) df_scan->problem_data;
      unsigned int i;
      unsigned int endregno;
2830
      df_ref ref;
2831 2832

      if (GET_CODE (reg) == SUBREG)
2833 2834 2835
	{
	  regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
					SUBREG_BYTE (reg), GET_MODE (reg));
2836
	  endregno = regno + subreg_nregs (reg);
2837 2838
	}
      else
2839
	endregno = END_HARD_REGNO (reg);
2840

2841 2842 2843
      /*  If this is a multiword hardreg, we create some extra
	  datastructures that will enable us to easily build REG_DEAD
	  and REG_UNUSED notes.  */
2844
      if ((endregno != regno + 1) && insn_info)
2845
	{
H.J. Lu committed
2846
	  /* Sets to a subreg of a multiword register are partial.
2847
	     Sets to a non-subreg of a multiword register are not.  */
2848
	  if (GET_CODE (reg) == SUBREG)
2849 2850
	    ref_flags |= DF_REF_PARTIAL;
	  ref_flags |= DF_REF_MW_HARDREG;
2851

2852
	  hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2853 2854 2855
	  hardreg->type = ref_type;
	  hardreg->flags = ref_flags;
	  hardreg->mw_reg = reg;
2856 2857 2858
	  hardreg->start_regno = regno;
	  hardreg->end_regno = endregno - 1;
	  hardreg->mw_order = df->ref_order++;
2859 2860
	  VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
			 hardreg);
2861 2862
	}

2863 2864
      for (i = regno; i < endregno; i++)
	{
H.J. Lu committed
2865
	  ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2866
					 bb, insn_info, ref_type, ref_flags);
2867

2868
          gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2869 2870 2871 2872
	}
    }
  else
    {
H.J. Lu committed
2873
      df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2874
			       ref_type, ref_flags);
2875 2876 2877 2878 2879 2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891
    }
}


/* A set to a non-paradoxical SUBREG for which the number of word_mode units
   covered by the outer mode is smaller than that covered by the inner mode,
   is a read-modify-write operation.
   This function returns true iff the SUBREG X is such a SUBREG.  */

bool
df_read_modify_subreg_p (rtx x)
{
  unsigned int isize, osize;
  if (GET_CODE (x) != SUBREG)
    return false;
  isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
  osize = GET_MODE_SIZE (GET_MODE (x));
2892 2893
  return isize > osize
	 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2894 2895 2896 2897 2898 2899 2900 2901
}


/* Process all the registers defined in the rtx, X.
   Autoincrement/decrement definitions will be picked up by
   df_uses_record.  */

static void
2902
df_def_record_1 (struct df_collection_rec *collection_rec,
2903
                 rtx x, basic_block bb, struct df_insn_info *insn_info,
2904
		 int flags)
2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916
{
  rtx *loc;
  rtx dst;

 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
     construct.  */
  if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
    loc = &XEXP (x, 0);
  else
    loc = &SET_DEST (x);
  dst = *loc;

2917 2918
  /* It is legal to have a set destination be a parallel. */
  if (GET_CODE (dst) == PARALLEL)
2919 2920 2921 2922 2923 2924 2925 2926
    {
      int i;

      for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
	{
	  rtx temp = XVECEXP (dst, 0, i);
	  if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
	      || GET_CODE (temp) == SET)
2927
	    df_def_record_1 (collection_rec,
H.J. Lu committed
2928 2929
                             temp, bb, insn_info,
			     GET_CODE (temp) == CLOBBER
2930
			     ? flags | DF_REF_MUST_CLOBBER : flags);
2931 2932 2933 2934
	}
      return;
    }

2935
  if (GET_CODE (dst) == STRICT_LOW_PART)
2936
    {
2937 2938 2939 2940 2941 2942 2943 2944 2945
      flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;

      loc = &XEXP (dst, 0);
      dst = *loc;
    }

  if (GET_CODE (dst) == ZERO_EXTRACT)
    {
      flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
H.J. Lu committed
2946

2947 2948 2949 2950
      loc = &XEXP (dst, 0);
      dst = *loc;
    }

2951 2952 2953
  /* At this point if we do not have a reg or a subreg, just return.  */
  if (REG_P (dst))
    {
2954 2955
      df_ref_record (DF_REF_REGULAR, collection_rec,
		     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2956 2957 2958 2959

      /* We want to keep sp alive everywhere - by making all
	 writes to sp also use of sp. */
      if (REGNO (dst) == STACK_POINTER_REGNUM)
2960
	df_ref_record (DF_REF_BASE, collection_rec,
2961
		       dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2962 2963 2964 2965 2966
    }
  else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
    {
      if (df_read_modify_subreg_p (dst))
	flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2967

2968
      flags |= DF_REF_SUBREG;
2969

2970 2971
      df_ref_record (DF_REF_REGULAR, collection_rec,
		     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2972
    }
2973 2974 2975 2976 2977 2978
}


/* Process all the registers defined in the pattern rtx, X.  */

static void
H.J. Lu committed
2979
df_defs_record (struct df_collection_rec *collection_rec,
2980
                rtx x, basic_block bb, struct df_insn_info *insn_info,
2981
		int flags)
2982 2983 2984 2985 2986 2987
{
  RTX_CODE code = GET_CODE (x);

  if (code == SET || code == CLOBBER)
    {
      /* Mark the single def within the pattern.  */
2988
      int clobber_flags = flags;
2989
      clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2990
      df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
2991 2992 2993
    }
  else if (code == COND_EXEC)
    {
H.J. Lu committed
2994
      df_defs_record (collection_rec, COND_EXEC_CODE (x),
2995
		      bb, insn_info, DF_REF_CONDITIONAL);
2996 2997 2998 2999 3000 3001 3002
    }
  else if (code == PARALLEL)
    {
      int i;

      /* Mark the multiple defs within the pattern.  */
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3003
	df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
3004 3005 3006 3007
    }
}


3008
/* Process all the registers used in the rtx at address LOC.  */
3009 3010

static void
3011
df_uses_record (struct df_collection_rec *collection_rec,
3012
                rtx *loc, enum df_ref_type ref_type,
3013
		basic_block bb, struct df_insn_info *insn_info,
3014
		int flags)
3015 3016 3017
{
  RTX_CODE code;
  rtx x;
3018

3019 3020 3021 3022 3023 3024 3025 3026 3027 3028 3029 3030
 retry:
  x = *loc;
  if (!x)
    return;
  code = GET_CODE (x);
  switch (code)
    {
    case LABEL_REF:
    case SYMBOL_REF:
    case CONST_INT:
    case CONST:
    case CONST_DOUBLE:
3031
    case CONST_FIXED:
3032 3033 3034 3035 3036 3037 3038 3039 3040 3041 3042
    case CONST_VECTOR:
    case PC:
    case CC0:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
      return;

    case CLOBBER:
      /* If we are clobbering a MEM, mark any registers inside the address
	 as being used.  */
      if (MEM_P (XEXP (x, 0)))
3043
	df_uses_record (collection_rec,
3044
			&XEXP (XEXP (x, 0), 0),
3045 3046
			DF_REF_REG_MEM_STORE,
		        bb, insn_info,
3047
			flags);
3048 3049 3050 3051 3052

      /* If we're clobbering a REG then we have a def so ignore.  */
      return;

    case MEM:
3053
      df_uses_record (collection_rec,
H.J. Lu committed
3054
		      &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3055
		      bb, insn_info, flags & DF_REF_IN_NOTE);
3056 3057 3058 3059
      return;

    case SUBREG:
      /* While we're here, optimize this case.  */
3060
      flags |= DF_REF_PARTIAL;
3061 3062 3063 3064
      /* In case the SUBREG is not of a REG, do not optimize.  */
      if (!REG_P (SUBREG_REG (x)))
	{
	  loc = &SUBREG_REG (x);
3065
	  df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
3066 3067 3068 3069 3070
	  return;
	}
      /* ... Fall through ...  */

    case REG:
3071
      df_ref_record (DF_REF_REGULAR, collection_rec,
3072
		     x, loc, bb, insn_info,
3073
		     ref_type, flags);
3074 3075
      return;

3076 3077 3078
    case SIGN_EXTRACT:
    case ZERO_EXTRACT:
      {
3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094
        df_uses_record (collection_rec,
                        &XEXP (x, 1), ref_type, bb, insn_info, flags);
        df_uses_record (collection_rec,
                        &XEXP (x, 2), ref_type, bb, insn_info, flags);

        /* If the parameters to the zero or sign extract are
           constants, strip them off and recurse, otherwise there is
           no information that we can gain from this operation.  */
        if (code == ZERO_EXTRACT)
          flags |= DF_REF_ZERO_EXTRACT;
        else
          flags |= DF_REF_SIGN_EXTRACT;

        df_uses_record (collection_rec,
                        &XEXP (x, 0), ref_type, bb, insn_info, flags);
        return;
3095 3096 3097
      }
      break;

3098 3099 3100 3101
    case SET:
      {
	rtx dst = SET_DEST (x);
	gcc_assert (!(flags & DF_REF_IN_NOTE));
3102 3103
	df_uses_record (collection_rec,
			&SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
3104 3105 3106 3107 3108 3109

	switch (GET_CODE (dst))
	  {
	    case SUBREG:
	      if (df_read_modify_subreg_p (dst))
		{
3110
		  df_uses_record (collection_rec, &SUBREG_REG (dst),
H.J. Lu committed
3111
				  DF_REF_REG_USE, bb, insn_info,
3112
				  flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
3113 3114 3115 3116 3117 3118 3119 3120 3121 3122
		  break;
		}
	      /* Fall through.  */
	    case REG:
	    case PARALLEL:
	    case SCRATCH:
	    case PC:
	    case CC0:
		break;
	    case MEM:
3123 3124
	      df_uses_record (collection_rec, &XEXP (dst, 0),
			      DF_REF_REG_MEM_STORE, bb, insn_info, flags);
3125 3126 3127 3128 3129 3130 3131
	      break;
	    case STRICT_LOW_PART:
	      {
		rtx *temp = &XEXP (dst, 0);
		/* A strict_low_part uses the whole REG and not just the
		 SUBREG.  */
		dst = XEXP (dst, 0);
3132
		df_uses_record (collection_rec,
H.J. Lu committed
3133
				(GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3134
				DF_REF_REG_USE, bb, insn_info,
3135
				DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
3136 3137 3138
	      }
	      break;
	    case ZERO_EXTRACT:
3139
	      {
3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150 3151
		df_uses_record (collection_rec, &XEXP (dst, 1),
				DF_REF_REG_USE, bb, insn_info, flags);
		df_uses_record (collection_rec, &XEXP (dst, 2),
				DF_REF_REG_USE, bb, insn_info, flags);
                if (GET_CODE (XEXP (dst,0)) == MEM)
                  df_uses_record (collection_rec, &XEXP (dst, 0),
                                  DF_REF_REG_USE, bb, insn_info,
                                  flags);
                else
                  df_uses_record (collection_rec, &XEXP (dst, 0),
                                  DF_REF_REG_USE, bb, insn_info,
                                  DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
3152
	      }
3153
	      break;
3154

3155 3156 3157 3158 3159 3160 3161 3162 3163 3164 3165 3166 3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186
	    default:
	      gcc_unreachable ();
	  }
	return;
      }

    case RETURN:
      break;

    case ASM_OPERANDS:
    case UNSPEC_VOLATILE:
    case TRAP_IF:
    case ASM_INPUT:
      {
	/* Traditional and volatile asm instructions must be
	   considered to use and clobber all hard registers, all
	   pseudo-registers and all of memory.  So must TRAP_IF and
	   UNSPEC_VOLATILE operations.

	   Consider for instance a volatile asm that changes the fpu
	   rounding mode.  An insn should not be moved across this
	   even if it only uses pseudo-regs because it might give an
	   incorrectly rounded result.

	   However, flow.c's liveness computation did *not* do this,
	   giving the reasoning as " ?!? Unfortunately, marking all
	   hard registers as live causes massive problems for the
	   register allocator and marking all pseudos as live creates
	   mountains of uninitialized variable warnings."

	   In order to maintain the status quo with regard to liveness
	   and uses, we do what flow.c did and just mark any regs we
3187 3188
	   can find in ASM_OPERANDS as used.  In global asm insns are
	   scanned and regs_asm_clobbered is filled out.
3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199

	   For all ASM_OPERANDS, we must traverse the vector of input
	   operands.  We can not just fall through here since then we
	   would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
	   which do not indicate traditional asms unlike their normal
	   usage.  */
	if (code == ASM_OPERANDS)
	  {
	    int j;

	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3200 3201
	      df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
			      DF_REF_REG_USE, bb, insn_info, flags);
3202 3203 3204 3205 3206
	    return;
	  }
	break;
      }

3207
    case VAR_LOCATION:
3208
      df_uses_record (collection_rec,
3209
		      &PAT_VAR_LOCATION_LOC (x),
3210
		      DF_REF_REG_USE, bb, insn_info, flags);
3211 3212
      return;

3213 3214 3215 3216 3217 3218
    case PRE_DEC:
    case POST_DEC:
    case PRE_INC:
    case POST_INC:
    case PRE_MODIFY:
    case POST_MODIFY:
3219
      gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3220
      /* Catch the def of the register being modified.  */
3221
      df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
H.J. Lu committed
3222
		     bb, insn_info,
3223
		     DF_REF_REG_DEF,
3224
                     flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3225 3226 3227 3228 3229 3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246

      /* ... Fall through to handle uses ...  */

    default:
      break;
    }

  /* Recursively scan the operands of this expression.  */
  {
    const char *fmt = GET_RTX_FORMAT (code);
    int i;

    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
      {
	if (fmt[i] == 'e')
	  {
	    /* Tail recursive case: save a function call level.  */
	    if (i == 0)
	      {
		loc = &XEXP (x, 0);
		goto retry;
	      }
3247 3248
	    df_uses_record (collection_rec, &XEXP (x, i), ref_type,
			    bb, insn_info, flags);
3249 3250 3251 3252 3253
	  }
	else if (fmt[i] == 'E')
	  {
	    int j;
	    for (j = 0; j < XVECLEN (x, i); j++)
3254
	      df_uses_record (collection_rec,
H.J. Lu committed
3255
			      &XVECEXP (x, i, j), ref_type,
3256
			      bb, insn_info, flags);
3257 3258 3259 3260
	  }
      }
  }

3261
  return;
3262 3263 3264
}


3265
/* For all DF_REF_CONDITIONAL defs, add a corresponding uses.  */
3266

3267 3268
static void
df_get_conditional_uses (struct df_collection_rec *collection_rec)
3269
{
3270 3271 3272
  unsigned int ix;
  df_ref ref;

3273
  FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
3274 3275 3276
    {
      if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
        {
3277
          df_ref use;
3278

3279
          use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3280
					 DF_REF_LOC (ref), DF_REF_BB (ref),
3281
					 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3282
					 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3283 3284 3285
          DF_REF_REGNO (use) = DF_REF_REGNO (ref);
        }
    }
3286 3287 3288
}


3289
/* Get call's extra defs and uses. */
3290 3291

static void
3292
df_get_call_refs (struct df_collection_rec * collection_rec,
H.J. Lu committed
3293
                  basic_block bb,
3294
                  struct df_insn_info *insn_info,
3295
                  int flags)
3296
{
3297 3298 3299 3300 3301
  rtx note;
  bitmap_iterator bi;
  unsigned int ui;
  bool is_sibling_call;
  unsigned int i;
3302
  df_ref def;
3303 3304 3305
  bitmap_head defs_generated;

  bitmap_initialize (&defs_generated, &df_bitmap_obstack);
3306

3307 3308 3309
  /* Do not generate clobbers for registers that are the result of the
     call.  This causes ordering problems in the chain building code
     depending on which def is seen first.  */
3310
  FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, i, def)
3311
    bitmap_set_bit (&defs_generated, DF_REF_REGNO (def));
3312

3313 3314
  /* Record the registers used to pass arguments, and explicitly
     noted as clobbered.  */
3315
  for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3316 3317 3318
       note = XEXP (note, 1))
    {
      if (GET_CODE (XEXP (note, 0)) == USE)
3319 3320
        df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
			DF_REF_REG_USE, bb, insn_info, flags);
3321 3322
      else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
	{
3323 3324 3325
	  if (REG_P (XEXP (XEXP (note, 0), 0)))
	    {
	      unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3326
	      if (!bitmap_bit_p (&defs_generated, regno))
3327
		df_defs_record (collection_rec, XEXP (note, 0), bb,
3328
				insn_info, flags);
3329 3330
	    }
	  else
3331 3332
	    df_uses_record (collection_rec, &XEXP (note, 0),
		            DF_REF_REG_USE, bb, insn_info, flags);
3333 3334
	}
    }
3335

3336
  /* The stack ptr is used (honorarily) by a CALL insn.  */
3337
  df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3338
		 NULL, bb, insn_info, DF_REF_REG_USE,
3339
		 DF_REF_CALL_STACK_USAGE | flags);
3340

3341 3342 3343 3344
  /* Calls may also reference any of the global registers,
     so they are recorded as used.  */
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    if (global_regs[i])
3345
      {
3346
	df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3347
		       NULL, bb, insn_info, DF_REF_REG_USE, flags);
3348
	df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3349
		       NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3350
      }
3351

3352
  is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3353
  EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3354
    {
3355
      if (!global_regs[ui]
3356
	  && (!bitmap_bit_p (&defs_generated, ui))
3357 3358
	  && (!is_sibling_call
	      || !bitmap_bit_p (df->exit_block_uses, ui)
H.J. Lu committed
3359
	      || refers_to_regno_p (ui, ui+1,
3360
				    crtl->return_rtx, NULL)))
H.J. Lu committed
3361
        df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
3362
		       NULL, bb, insn_info, DF_REF_REG_DEF,
3363
		       DF_REF_MAY_CLOBBER | flags);
3364 3365
    }

3366
  bitmap_clear (&defs_generated);
3367 3368
  return;
}
3369

3370 3371 3372 3373
/* Collect all refs in the INSN. This function is free of any
   side-effect - it will create and return a lists of df_ref's in the
   COLLECTION_REC without putting those refs into existing ref chains
   and reg chains. */
3374

3375
static void
H.J. Lu committed
3376 3377
df_insn_refs_collect (struct df_collection_rec* collection_rec,
		      basic_block bb, struct df_insn_info *insn_info)
3378 3379
{
  rtx note;
3380
  bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3381

3382
  /* Clear out the collection record.  */
3383 3384 3385 3386
  VEC_truncate (df_ref, collection_rec->def_vec, 0);
  VEC_truncate (df_ref, collection_rec->use_vec, 0);
  VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
  VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3387

3388
  /* Record register defs.  */
3389
  df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3390

3391 3392
  /* Process REG_EQUIV/REG_EQUAL notes.  */
  for (note = REG_NOTES (insn_info->insn); note;
3393 3394 3395 3396 3397 3398
       note = XEXP (note, 1))
    {
      switch (REG_NOTE_KIND (note))
        {
        case REG_EQUIV:
        case REG_EQUAL:
3399
          df_uses_record (collection_rec,
3400
                          &XEXP (note, 0), DF_REF_REG_USE,
3401
                          bb, insn_info, DF_REF_IN_NOTE);
3402 3403 3404
          break;
        case REG_NON_LOCAL_GOTO:
          /* The frame ptr is used by a non-local goto.  */
3405
          df_ref_record (DF_REF_BASE, collection_rec,
3406
                         regno_reg_rtx[FRAME_POINTER_REGNUM],
3407
                         NULL, bb, insn_info,
3408
                         DF_REF_REG_USE, 0);
3409
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3410
          df_ref_record (DF_REF_BASE, collection_rec,
3411
                         regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3412
                         NULL, bb, insn_info,
3413
                         DF_REF_REG_USE, 0);
3414 3415 3416 3417 3418
#endif
          break;
        default:
          break;
        }
3419
    }
3420

3421
  if (CALL_P (insn_info->insn))
H.J. Lu committed
3422
    df_get_call_refs (collection_rec, bb, insn_info,
3423 3424 3425
		      (is_cond_exec) ? DF_REF_CONDITIONAL : 0);

  /* Record the register uses.  */
3426 3427
  df_uses_record (collection_rec,
		  &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3428 3429 3430 3431 3432 3433

  /* DF_REF_CONDITIONAL needs corresponding USES. */
  if (is_cond_exec)
    df_get_conditional_uses (collection_rec);

  df_canonize_collection_rec (collection_rec);
3434 3435
}

3436 3437 3438 3439
/* Recompute the luids for the insns in BB.  */

void
df_recompute_luids (basic_block bb)
3440 3441 3442
{
  rtx insn;
  int luid = 0;
3443

3444
  df_grow_insn_info ();
3445 3446 3447 3448

  /* Scan the block an insn at a time from beginning to end.  */
  FOR_BB_INSNS (bb, insn)
    {
3449
      struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3450 3451 3452
      /* Inserting labels does not always trigger the incremental
	 rescanning.  */
      if (!insn_info)
3453
	{
3454
	  gcc_assert (!INSN_P (insn));
3455
	  insn_info = df_insn_create_insn_record (insn);
3456
	}
3457

3458
      DF_INSN_INFO_LUID (insn_info) = luid;
3459 3460 3461 3462 3463 3464 3465 3466 3467 3468 3469 3470
      if (INSN_P (insn))
	luid++;
    }
}


/* Collect all artificial refs at the block level for BB and add them
   to COLLECTION_REC.  */

static void
df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
{
3471 3472 3473 3474
  VEC_truncate (df_ref, collection_rec->def_vec, 0);
  VEC_truncate (df_ref, collection_rec->use_vec, 0);
  VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
  VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3475 3476 3477 3478 3479 3480 3481 3482 3483 3484

  if (bb->index == ENTRY_BLOCK)
    {
      df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
      return;
    }
  else if (bb->index == EXIT_BLOCK)
    {
      df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
      return;
3485 3486 3487
    }

#ifdef EH_RETURN_DATA_REGNO
3488
  if (bb_has_eh_pred (bb))
3489 3490 3491
    {
      unsigned int i;
      /* Mark the registers that will contain data for the handler.  */
3492 3493 3494 3495 3496
      for (i = 0; ; ++i)
	{
	  unsigned regno = EH_RETURN_DATA_REGNO (i);
	  if (regno == INVALID_REGNUM)
	    break;
3497
	  df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3498
			 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3499
	}
3500 3501 3502
    }
#endif

3503 3504 3505
  /* Add the hard_frame_pointer if this block is the target of a
     non-local goto.  */
  if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3506
    df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3507
		   bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
H.J. Lu committed
3508

3509 3510
  /* Add the artificial uses.  */
  if (bb->index >= NUM_FIXED_BLOCKS)
3511 3512 3513
    {
      bitmap_iterator bi;
      unsigned int regno;
H.J. Lu committed
3514
      bitmap au = bb_has_eh_pred (bb)
3515 3516
	? &df->eh_block_artificial_uses
	: &df->regular_block_artificial_uses;
3517

3518
      EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3519
	{
3520
	  df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3521
			 bb, NULL, DF_REF_REG_USE, 0);
3522
	}
3523
    }
3524 3525

  df_canonize_collection_rec (collection_rec);
3526 3527
}

3528

3529 3530 3531 3532
/* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS.  */

void
df_bb_refs_record (int bb_index, bool scan_insns)
3533
{
3534 3535 3536 3537
  basic_block bb = BASIC_BLOCK (bb_index);
  rtx insn;
  int luid = 0;
  struct df_collection_rec collection_rec;
3538

3539
  if (!df)
3540 3541
    return;

3542
  df_grow_bb_info (df_scan);
3543 3544 3545 3546 3547
  collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
  collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
  collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
  collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);

3548 3549 3550 3551
  if (scan_insns)
    /* Scan the block an insn at a time from beginning to end.  */
    FOR_BB_INSNS (bb, insn)
      {
3552
	struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3553 3554
	gcc_assert (!insn_info);

3555
	insn_info = df_insn_create_insn_record (insn);
3556 3557 3558
	if (INSN_P (insn))
	  {
	    /* Record refs within INSN.  */
3559 3560
	    DF_INSN_INFO_LUID (insn_info) = luid++;
	    df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3561 3562
	    df_refs_add_to_chains (&collection_rec, bb, insn);
	  }
3563
	DF_INSN_INFO_LUID (insn_info) = luid;
3564 3565 3566 3567 3568 3569
      }

  /* Other block level artificial refs */
  df_bb_refs_collect (&collection_rec, bb);
  df_refs_add_to_chains (&collection_rec, bb, NULL);

3570 3571 3572 3573 3574
  VEC_free (df_ref, stack, collection_rec.def_vec);
  VEC_free (df_ref, stack, collection_rec.use_vec);
  VEC_free (df_ref, stack, collection_rec.eq_use_vec);
  VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);

3575
  /* Now that the block has been processed, set the block as dirty so
3576
     LR and LIVE will get it processed.  */
3577
  df_set_bb_dirty (bb);
3578
}
3579

3580 3581 3582

/* Get the artificial use set for a regular (i.e. non-exit/non-entry)
   block. */
3583 3584

static void
3585
df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3586
{
3587 3588 3589 3590
#ifdef EH_USES
  unsigned int i;
#endif

3591
  bitmap_clear (regular_block_artificial_uses);
3592

3593
  if (reload_completed)
3594
    {
3595 3596 3597 3598 3599 3600 3601 3602 3603 3604 3605
      if (frame_pointer_needed)
	bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
    }
  else
    /* Before reload, there are a few registers that must be forced
       live everywhere -- which might not already be the case for
       blocks within infinite loops.  */
    {
      /* Any reference to any pseudo before reload is a potential
	 reference of the frame pointer.  */
      bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
H.J. Lu committed
3606

3607 3608 3609 3610 3611 3612 3613 3614 3615 3616
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
      bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
#endif

#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
      /* Pseudos with argument area equivalences may require
	 reloading via the argument pointer.  */
      if (fixed_regs[ARG_POINTER_REGNUM])
	bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
#endif
H.J. Lu committed
3617

3618 3619 3620 3621 3622
      /* Any constant, or pseudo with constant equivalences, may
	 require reloading from memory using the pic register.  */
      if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
	bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3623
    }
3624 3625
  /* The all-important stack pointer must always be live.  */
  bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639

#ifdef EH_USES
  /* EH_USES registers are used:
     1) at all insns that might throw (calls or with -fnon-call-exceptions
	trapping insns)
     2) in all EH edges
     3) to support backtraces and/or debugging, anywhere between their
	initialization and where they the saved registers are restored
	from them, including the cases where we don't reach the epilogue
	(noreturn call or infinite loop).  */
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    if (EH_USES (i))
      bitmap_set_bit (regular_block_artificial_uses, i);
#endif
3640 3641
}

3642

3643
/* Get the artificial use set for an eh block. */
3644

3645 3646 3647 3648
static void
df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
{
  bitmap_clear (eh_block_artificial_uses);
3649

3650
  /* The following code (down thru the arg_pointer setting APPEARS
3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663 3664 3665 3666 3667
     to be necessary because there is nothing that actually
     describes what the exception handling code may actually need
     to keep alive.  */
  if (reload_completed)
    {
      if (frame_pointer_needed)
	{
	  bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
	  bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
#endif
	}
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
      if (fixed_regs[ARG_POINTER_REGNUM])
	bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
#endif
    }
3668 3669 3670
}


3671

3672 3673 3674 3675
/*----------------------------------------------------------------------------
   Specialized hard register scanning functions.
----------------------------------------------------------------------------*/

3676

3677 3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690
/* Mark a register in SET.  Hard registers in large modes get all
   of their component registers set as well.  */

static void
df_mark_reg (rtx reg, void *vset)
{
  bitmap set = (bitmap) vset;
  int regno = REGNO (reg);

  gcc_assert (GET_MODE (reg) != BLKmode);

  if (regno < FIRST_PSEUDO_REGISTER)
    {
      int n = hard_regno_nregs[regno][GET_MODE (reg)];
3691
      bitmap_set_range (set, regno, n);
3692
    }
3693 3694
  else
    bitmap_set_bit (set, regno);
3695 3696
}

3697

3698
/* Set the bit for regs that are considered being defined at the entry. */
3699 3700

static void
3701
df_get_entry_block_def_set (bitmap entry_block_defs)
3702 3703
{
  rtx r;
3704
  int i;
3705

3706
  bitmap_clear (entry_block_defs);
3707 3708 3709 3710 3711

  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    {
      if (FUNCTION_ARG_REGNO_P (i))
#ifdef INCOMING_REGNO
3712
	bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3713
#else
3714
	bitmap_set_bit (entry_block_defs, i);
3715 3716
#endif
    }
H.J. Lu committed
3717

3718 3719 3720
  /* The always important stack pointer.  */
  bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);

3721 3722 3723 3724 3725 3726 3727
  /* Once the prologue has been generated, all of these registers
     should just show up in the first regular block.  */
  if (HAVE_prologue && epilogue_completed)
    {
      /* Defs for the callee saved registers are inserted so that the
	 pushes have some defining location.  */
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3728 3729
	if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
	  bitmap_set_bit (entry_block_defs, i);
3730 3731
    }

3732 3733 3734 3735
  r = targetm.calls.struct_value_rtx (current_function_decl, true);
  if (r && REG_P (r))
    bitmap_set_bit (entry_block_defs, REGNO (r));

3736 3737 3738 3739 3740 3741
  /* If the function has an incoming STATIC_CHAIN, it has to show up
     in the entry def set.  */
  r = targetm.calls.static_chain (current_function_decl, true);
  if (r && REG_P (r))
    bitmap_set_bit (entry_block_defs, REGNO (r));

3742
  if ((!reload_completed) || frame_pointer_needed)
3743 3744 3745
    {
      /* Any reference to any pseudo before reload is a potential
	 reference of the frame pointer.  */
3746
      bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3747 3748 3749
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
      /* If they are different, also mark the hard frame pointer as live.  */
      if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3750
	bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3751 3752
#endif
    }
3753

3754 3755 3756
  /* These registers are live everywhere.  */
  if (!reload_completed)
    {
3757 3758 3759 3760
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
      /* Pseudos with argument area equivalences may require
	 reloading via the argument pointer.  */
      if (fixed_regs[ARG_POINTER_REGNUM])
3761
	bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3762
#endif
H.J. Lu committed
3763

3764 3765 3766 3767 3768
#ifdef PIC_OFFSET_TABLE_REGNUM
      /* Any constant, or pseudo with constant equivalences, may
	 require reloading from memory using the pic register.  */
      if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3769 3770 3771 3772 3773 3774 3775 3776
	bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
#endif
    }

#ifdef INCOMING_RETURN_ADDR_RTX
  if (REG_P (INCOMING_RETURN_ADDR_RTX))
    bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
#endif
H.J. Lu committed
3777

3778
  targetm.extra_live_on_entry (entry_block_defs);
3779 3780 3781 3782
}


/* Return the (conservative) set of hard registers that are defined on
H.J. Lu committed
3783 3784
   entry to the function.
   It uses df->entry_block_defs to determine which register
3785 3786 3787
   reference to include.  */

static void
H.J. Lu committed
3788
df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3789 3790
			     bitmap entry_block_defs)
{
H.J. Lu committed
3791
  unsigned int i;
3792 3793 3794 3795
  bitmap_iterator bi;

  EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
    {
H.J. Lu committed
3796
      df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3797
		     ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3798 3799 3800 3801 3802 3803 3804 3805 3806 3807 3808 3809 3810 3811
    }

  df_canonize_collection_rec (collection_rec);
}


/* Record the (conservative) set of hard registers that are defined on
   entry to the function.  */

static void
df_record_entry_block_defs (bitmap entry_block_defs)
{
  struct df_collection_rec collection_rec;
  memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3812
  collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3813 3814 3815 3816
  df_entry_block_defs_collect (&collection_rec, entry_block_defs);

  /* Process bb_refs chain */
  df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3817
  VEC_free (df_ref, stack, collection_rec.def_vec);
3818 3819 3820
}


3821
/* Update the defs in the entry block.  */
3822 3823 3824 3825

void
df_update_entry_block_defs (void)
{
3826
  bitmap_head refs;
3827
  bool changed = false;
3828

3829 3830
  bitmap_initialize (&refs, &df_bitmap_obstack);
  df_get_entry_block_def_set (&refs);
3831 3832
  if (df->entry_block_defs)
    {
3833
      if (!bitmap_equal_p (df->entry_block_defs, &refs))
3834 3835 3836 3837 3838 3839 3840 3841 3842 3843 3844 3845
	{
	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
	  df_ref_chain_delete_du_chain (bb_info->artificial_defs);
	  df_ref_chain_delete (bb_info->artificial_defs);
	  bb_info->artificial_defs = NULL;
	  changed = true;
	}
    }
  else
    {
      struct df_scan_problem_data *problem_data
	= (struct df_scan_problem_data *) df_scan->problem_data;
3846
	gcc_unreachable ();
3847 3848 3849
      df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
      changed = true;
    }
3850

3851
  if (changed)
3852
    {
3853 3854
      df_record_entry_block_defs (&refs);
      bitmap_copy (df->entry_block_defs, &refs);
3855
      df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3856
    }
3857
  bitmap_clear (&refs);
3858 3859 3860
}


3861
/* Set the bit for regs that are considered being used at the exit. */
3862 3863

static void
3864
df_get_exit_block_use_set (bitmap exit_block_uses)
3865
{
H.J. Lu committed
3866
  unsigned int i;
3867

3868
  bitmap_clear (exit_block_uses);
3869 3870 3871

  /* Stack pointer is always live at the exit.  */
  bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
H.J. Lu committed
3872

3873 3874 3875
  /* Mark the frame pointer if needed at the end of the function.
     If we end up eliminating it, it will be removed from the live
     list of each basic block by reload.  */
H.J. Lu committed
3876

3877
  if ((!reload_completed) || frame_pointer_needed)
3878
    {
3879
      bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3880 3881
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
      /* If they are different, also mark the hard frame pointer as live.  */
3882
      if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3883
	bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3884 3885 3886 3887 3888 3889
#endif
    }

  /* Many architectures have a GP register even without flag_pic.
     Assume the pic register is not in use, or will be handled by
     other means, if it is not fixed.  */
3890 3891
  if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
      && (unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3892
      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3893
    bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
H.J. Lu committed
3894

3895 3896 3897 3898 3899
  /* Mark all global registers, and all registers used by the
     epilogue as being live at the end of the function since they
     may be referenced by our caller.  */
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    if (global_regs[i] || EPILOGUE_USES (i))
3900
      bitmap_set_bit (exit_block_uses, i);
H.J. Lu committed
3901

3902 3903 3904 3905
  if (HAVE_epilogue && epilogue_completed)
    {
      /* Mark all call-saved registers that we actually used.  */
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3906
	if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3907
	    && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3908
	  bitmap_set_bit (exit_block_uses, i);
3909
    }
H.J. Lu committed
3910

3911 3912
#ifdef EH_RETURN_DATA_REGNO
  /* Mark the registers that will contain data for the handler.  */
3913
  if (reload_completed && crtl->calls_eh_return)
3914 3915 3916 3917 3918
    for (i = 0; ; ++i)
      {
	unsigned regno = EH_RETURN_DATA_REGNO (i);
	if (regno == INVALID_REGNUM)
	  break;
3919
	bitmap_set_bit (exit_block_uses, regno);
3920 3921 3922 3923
      }
#endif

#ifdef EH_RETURN_STACKADJ_RTX
3924
  if ((!HAVE_epilogue || ! epilogue_completed)
3925
      && crtl->calls_eh_return)
3926 3927 3928
    {
      rtx tmp = EH_RETURN_STACKADJ_RTX;
      if (tmp && REG_P (tmp))
3929
	df_mark_reg (tmp, exit_block_uses);
3930 3931 3932 3933
    }
#endif

#ifdef EH_RETURN_HANDLER_RTX
3934
  if ((!HAVE_epilogue || ! epilogue_completed)
3935
      && crtl->calls_eh_return)
3936 3937 3938
    {
      rtx tmp = EH_RETURN_HANDLER_RTX;
      if (tmp && REG_P (tmp))
3939
	df_mark_reg (tmp, exit_block_uses);
3940
    }
H.J. Lu committed
3941
#endif
3942

3943
  /* Mark function return value.  */
3944 3945 3946 3947
  diddle_return_value (df_mark_reg, (void*) exit_block_uses);
}


H.J. Lu committed
3948
/* Return the refs of hard registers that are used in the exit block.
3949 3950 3951 3952 3953
   It uses df->exit_block_uses to determine register to include.  */

static void
df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
{
H.J. Lu committed
3954
  unsigned int i;
3955 3956 3957
  bitmap_iterator bi;

  EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3958
    df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3959
		   EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3960

3961 3962 3963
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
  /* It is deliberate that this is not put in the exit block uses but
     I do not know why.  */
H.J. Lu committed
3964
  if (reload_completed
3965
      && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3966
      && bb_has_eh_pred (EXIT_BLOCK_PTR)
3967
      && fixed_regs[ARG_POINTER_REGNUM])
3968
    df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3969
		   EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3970 3971 3972 3973 3974 3975
#endif

  df_canonize_collection_rec (collection_rec);
}


H.J. Lu committed
3976
/* Record the set of hard registers that are used in the exit block.
3977 3978 3979 3980 3981 3982 3983
   It uses df->exit_block_uses to determine which bit to include.  */

static void
df_record_exit_block_uses (bitmap exit_block_uses)
{
  struct df_collection_rec collection_rec;
  memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3984
  collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3985 3986 3987 3988 3989

  df_exit_block_uses_collect (&collection_rec, exit_block_uses);

  /* Process bb_refs chain */
  df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3990
  VEC_free (df_ref, stack, collection_rec.use_vec);
3991 3992 3993 3994 3995 3996 3997 3998
}


/* Update the uses in the exit block.  */

void
df_update_exit_block_uses (void)
{
3999
  bitmap_head refs;
4000 4001
  bool changed = false;

4002 4003
  bitmap_initialize (&refs, &df_bitmap_obstack);
  df_get_exit_block_use_set (&refs);
4004 4005
  if (df->exit_block_uses)
    {
4006
      if (!bitmap_equal_p (df->exit_block_uses, &refs))
4007 4008 4009 4010 4011 4012 4013 4014 4015 4016 4017 4018
	{
	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
	  df_ref_chain_delete_du_chain (bb_info->artificial_uses);
	  df_ref_chain_delete (bb_info->artificial_uses);
	  bb_info->artificial_uses = NULL;
	  changed = true;
	}
    }
  else
    {
      struct df_scan_problem_data *problem_data
	= (struct df_scan_problem_data *) df_scan->problem_data;
4019
	gcc_unreachable ();
4020 4021 4022 4023 4024 4025
      df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
      changed = true;
    }

  if (changed)
    {
4026 4027
      df_record_exit_block_uses (&refs);
      bitmap_copy (df->exit_block_uses,& refs);
4028 4029
      df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
    }
4030
  bitmap_clear (&refs);
4031 4032 4033 4034
}

static bool initialized = false;

4035

4036 4037
/* Initialize some platform specific structures.  */

H.J. Lu committed
4038
void
4039 4040
df_hard_reg_init (void)
{
4041
#ifdef ELIMINABLE_REGS
4042
  int i;
4043 4044 4045 4046 4047 4048 4049 4050
  static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
#endif
  if (initialized)
    return;

  /* Record which registers will be eliminated.  We use this in
     mark_used_regs.  */
  CLEAR_HARD_REG_SET (elim_reg_set);
H.J. Lu committed
4051

4052 4053 4054 4055 4056 4057
#ifdef ELIMINABLE_REGS
  for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
#else
  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
#endif
H.J. Lu committed
4058

4059 4060
  initialized = true;
}
4061 4062 4063


/* Recompute the parts of scanning that are based on regs_ever_live
H.J. Lu committed
4064
   because something changed in that array.  */
4065

H.J. Lu committed
4066
void
4067 4068 4069 4070 4071 4072 4073 4074 4075
df_update_entry_exit_and_calls (void)
{
  basic_block bb;

  df_update_entry_block_defs ();
  df_update_exit_block_uses ();

  /* The call insns need to be rescanned because there may be changes
     in the set of registers clobbered across the call.  */
H.J. Lu committed
4076
  FOR_EACH_BB (bb)
4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088 4089 4090 4091 4092
    {
      rtx insn;
      FOR_BB_INSNS (bb, insn)
	{
	  if (INSN_P (insn) && CALL_P (insn))
	    df_insn_rescan (insn);
	}
    }
}


/* Return true if hard REG is actually used in the some instruction.
   There are a fair number of conditions that affect the setting of
   this array.  See the comment in df.h for df->hard_regs_live_count
   for the conditions that this array is set. */

H.J. Lu committed
4093
bool
4094 4095 4096 4097 4098 4099 4100 4101 4102 4103 4104 4105 4106 4107 4108 4109 4110 4111 4112 4113 4114 4115
df_hard_reg_used_p (unsigned int reg)
{
  return df->hard_regs_live_count[reg] != 0;
}


/* A count of the number of times REG is actually used in the some
   instruction.  There are a fair number of conditions that affect the
   setting of this array.  See the comment in df.h for
   df->hard_regs_live_count for the conditions that this array is
   set. */


unsigned int
df_hard_reg_used_count (unsigned int reg)
{
  return df->hard_regs_live_count[reg];
}


/* Get the value of regs_ever_live[REGNO].  */

H.J. Lu committed
4116
bool
4117 4118 4119 4120 4121 4122 4123 4124 4125
df_regs_ever_live_p (unsigned int regno)
{
  return regs_ever_live[regno];
}


/* Set regs_ever_live[REGNO] to VALUE.  If this cause regs_ever_live
   to change, schedule that change for the next update.  */

H.J. Lu committed
4126
void
4127 4128 4129 4130 4131 4132 4133 4134 4135 4136 4137 4138 4139 4140 4141 4142 4143 4144 4145
df_set_regs_ever_live (unsigned int regno, bool value)
{
  if (regs_ever_live[regno] == value)
    return;

  regs_ever_live[regno] = value;
  if (df)
    df->redo_entry_and_exit = true;
}


/* Compute "regs_ever_live" information from the underlying df
   information.  Set the vector to all false if RESET.  */

void
df_compute_regs_ever_live (bool reset)
{
  unsigned int i;
  bool changed = df->redo_entry_and_exit;
H.J. Lu committed
4146

4147 4148 4149 4150 4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164 4165 4166
  if (reset)
    memset (regs_ever_live, 0, sizeof (regs_ever_live));

  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
      {
	regs_ever_live[i] = true;
	changed = true;
      }
  if (changed)
    df_update_entry_exit_and_calls ();
  df->redo_entry_and_exit = false;
}


/*----------------------------------------------------------------------------
  Dataflow ref information verification functions.

  df_reg_chain_mark (refs, regno, is_def, is_eq_use)
  df_reg_chain_verify_unmarked (refs)
4167
  df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4168 4169 4170 4171 4172 4173 4174 4175 4176 4177 4178
  df_mws_verify (mw*, mw*, bool)
  df_insn_refs_verify (collection_rec, bb, insn, bool)
  df_bb_refs_verify (bb, refs, bool)
  df_bb_verify (bb)
  df_exit_block_bitmap_verify (bool)
  df_entry_block_bitmap_verify (bool)
  df_scan_verify ()
----------------------------------------------------------------------------*/


/* Mark all refs in the reg chain.  Verify that all of the registers
H.J. Lu committed
4179
are in the correct chain.  */
4180 4181

static unsigned int
H.J. Lu committed
4182
df_reg_chain_mark (df_ref refs, unsigned int regno,
4183 4184 4185
		   bool is_def, bool is_eq_use)
{
  unsigned int count = 0;
4186
  df_ref ref;
4187 4188 4189 4190 4191 4192 4193 4194 4195 4196 4197 4198
  for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
    {
      gcc_assert (!DF_REF_IS_REG_MARKED (ref));

      /* If there are no def-use or use-def chains, make sure that all
	 of the chains are clear.  */
      if (!df_chain)
	gcc_assert (!DF_REF_CHAIN (ref));

      /* Check to make sure the ref is in the correct chain.  */
      gcc_assert (DF_REF_REGNO (ref) == regno);
      if (is_def)
4199
	gcc_assert (DF_REF_REG_DEF_P (ref));
4200
      else
4201
	gcc_assert (!DF_REF_REG_DEF_P (ref));
4202 4203 4204 4205 4206 4207

      if (is_eq_use)
	gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
      else
	gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);

4208 4209
      if (DF_REF_NEXT_REG (ref))
	gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4210 4211 4212 4213 4214 4215 4216
      count++;
      DF_REF_REG_MARK (ref);
    }
  return count;
}


H.J. Lu committed
4217
/* Verify that all of the registers in the chain are unmarked.  */
4218 4219

static void
4220
df_reg_chain_verify_unmarked (df_ref refs)
4221
{
4222
  df_ref ref;
4223 4224 4225 4226 4227 4228 4229 4230
  for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
    gcc_assert (!DF_REF_IS_REG_MARKED (ref));
}


/* Verify that NEW_REC and OLD_REC have exactly the same members. */

static bool
4231
df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4232 4233
		bool abort_if_fail)
{
4234 4235 4236
  unsigned int ix;
  df_ref new_ref;

4237
  FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
4238
    {
4239
      if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4240 4241 4242 4243 4244 4245 4246 4247 4248 4249 4250 4251 4252 4253 4254 4255 4256 4257 4258
	{
	  if (abort_if_fail)
	    gcc_assert (0);
	  else
	    return false;
	}

      /* Abort if fail is called from the function level verifier.  If
	 that is the context, mark this reg as being seem.  */
      if (abort_if_fail)
	{
	  gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
	  DF_REF_REG_UNMARK (*old_rec);
	}

      old_rec++;
    }

  if (abort_if_fail)
4259
    gcc_assert (*old_rec == NULL);
4260
  else
4261
    return *old_rec == NULL;
4262 4263 4264 4265 4266 4267 4268
  return false;
}


/* Verify that NEW_REC and OLD_REC have exactly the same members. */

static bool
4269 4270
df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
	       struct df_mw_hardreg **old_rec,
4271 4272
	       bool abort_if_fail)
{
4273 4274 4275
  unsigned int ix;
  struct df_mw_hardreg *new_reg;

4276
  FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
4277
    {
4278
      if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4279 4280 4281 4282 4283 4284 4285 4286 4287 4288
	{
	  if (abort_if_fail)
	    gcc_assert (0);
	  else
	    return false;
	}
      old_rec++;
    }

  if (abort_if_fail)
4289
    gcc_assert (*old_rec == NULL);
4290
  else
4291
    return *old_rec == NULL;
4292 4293 4294 4295 4296 4297
  return false;
}


/* Return true if the existing insn refs information is complete and
   correct. Otherwise (i.e. if there's any missing or extra refs),
H.J. Lu committed
4298
   return the correct df_ref chain in REFS_RETURN.
4299 4300 4301 4302 4303 4304 4305 4306 4307 4308

   If ABORT_IF_FAIL, leave the refs that are verified (already in the
   ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
   verification mode instead of the whole function, so unmark
   everything.

   If ABORT_IF_FAIL is set, this function never returns false.  */

static bool
df_insn_refs_verify (struct df_collection_rec *collection_rec,
H.J. Lu committed
4309
		     basic_block bb,
4310 4311 4312 4313 4314
                     rtx insn,
		     bool abort_if_fail)
{
  bool ret1, ret2, ret3, ret4;
  unsigned int uid = INSN_UID (insn);
4315
  struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4316

4317
  df_insn_refs_collect (collection_rec, bb, insn_info);
4318 4319 4320 4321 4322 4323

  if (!DF_INSN_UID_DEFS (uid))
    {
      /* The insn_rec was created but it was never filled out.  */
      if (abort_if_fail)
	gcc_assert (0);
H.J. Lu committed
4324
      else
4325 4326 4327 4328 4329
	return false;
    }

  /* Unfortunately we cannot opt out early if one of these is not
     right because the marks will not get cleared.  */
H.J. Lu committed
4330
  ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4331
			 abort_if_fail);
H.J. Lu committed
4332
  ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4333
			 abort_if_fail);
H.J. Lu committed
4334
  ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4335
			 abort_if_fail);
H.J. Lu committed
4336
  ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4337 4338 4339 4340 4341 4342 4343 4344 4345 4346 4347 4348 4349 4350 4351
		       abort_if_fail);
  return (ret1 && ret2 && ret3 && ret4);
}


/* Return true if all refs in the basic block are correct and complete.
   Due to df_ref_chain_verify, it will cause all refs
   that are verified to have DF_REF_MARK bit set.  */

static bool
df_bb_verify (basic_block bb)
{
  rtx insn;
  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
  struct df_collection_rec collection_rec;
H.J. Lu committed
4352

4353
  memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4354 4355 4356 4357
  collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
  collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
  collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
  collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4358 4359 4360

  gcc_assert (bb_info);

4361
  /* Scan the block, one insn at a time, from beginning to end.  */
4362 4363 4364 4365 4366 4367 4368 4369 4370 4371 4372 4373 4374
  FOR_BB_INSNS_REVERSE (bb, insn)
    {
      if (!INSN_P (insn))
        continue;
      df_insn_refs_verify (&collection_rec, bb, insn, true);
      df_free_collection_rec (&collection_rec);
    }

  /* Do the artificial defs and uses.  */
  df_bb_refs_collect (&collection_rec, bb);
  df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
  df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
  df_free_collection_rec (&collection_rec);
H.J. Lu committed
4375

4376 4377 4378 4379
  return true;
}


H.J. Lu committed
4380
/* Returns true if the entry block has correct and complete df_ref set.
4381 4382 4383 4384 4385
   If not it either aborts if ABORT_IF_FAIL is true or returns false.  */

static bool
df_entry_block_bitmap_verify (bool abort_if_fail)
{
4386
  bitmap_head entry_block_defs;
4387 4388
  bool is_eq;

4389 4390
  bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
  df_get_entry_block_def_set (&entry_block_defs);
4391

4392
  is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4393 4394 4395 4396 4397

  if (!is_eq && abort_if_fail)
    {
      print_current_pass (stderr);
      fprintf (stderr, "entry_block_defs = ");
4398
      df_print_regset (stderr, &entry_block_defs);
4399 4400 4401 4402 4403
      fprintf (stderr, "df->entry_block_defs = ");
      df_print_regset (stderr, df->entry_block_defs);
      gcc_assert (0);
    }

4404
  bitmap_clear (&entry_block_defs);
4405 4406 4407 4408 4409

  return is_eq;
}


H.J. Lu committed
4410
/* Returns true if the exit block has correct and complete df_ref set.
4411 4412 4413 4414 4415
   If not it either aborts if ABORT_IF_FAIL is true or returns false. */

static bool
df_exit_block_bitmap_verify (bool abort_if_fail)
{
4416
  bitmap_head exit_block_uses;
4417 4418
  bool is_eq;

4419 4420
  bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
  df_get_exit_block_use_set (&exit_block_uses);
4421

4422
  is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4423 4424 4425 4426 4427

  if (!is_eq && abort_if_fail)
    {
      print_current_pass (stderr);
      fprintf (stderr, "exit_block_uses = ");
4428
      df_print_regset (stderr, &exit_block_uses);
4429 4430 4431 4432 4433
      fprintf (stderr, "df->exit_block_uses = ");
      df_print_regset (stderr, df->exit_block_uses);
      gcc_assert (0);
    }

4434
  bitmap_clear (&exit_block_uses);
4435 4436 4437 4438 4439

  return is_eq;
}


4440 4441
/* Return true if df_ref information for all insns in all blocks are
   correct and complete.  */
4442 4443 4444 4445 4446 4447

void
df_scan_verify (void)
{
  unsigned int i;
  basic_block bb;
4448 4449
  bitmap_head regular_block_artificial_uses;
  bitmap_head eh_block_artificial_uses;
4450 4451 4452 4453 4454 4455 4456 4457 4458

  if (!df)
    return;

  /* Verification is a 4 step process. */

  /* (1) All of the refs are marked by going thru the reg chains.  */
  for (i = 0; i < DF_REG_SIZE (df); i++)
    {
H.J. Lu committed
4459
      gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4460
		  == DF_REG_DEF_COUNT(i));
H.J. Lu committed
4461
      gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4462
		  == DF_REG_USE_COUNT(i));
H.J. Lu committed
4463
      gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4464 4465 4466 4467 4468 4469
		  == DF_REG_EQ_USE_COUNT(i));
    }

  /* (2) There are various bitmaps whose value may change over the
     course of the compilation.  This step recomputes them to make
     sure that they have not slipped out of date.  */
4470 4471
  bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
  bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4472

4473 4474
  df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
  df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4475

4476 4477
  bitmap_ior_into (&eh_block_artificial_uses,
		   &regular_block_artificial_uses);
4478 4479

  /* Check artificial_uses bitmaps didn't change. */
4480 4481 4482 4483
  gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
			      &df->regular_block_artificial_uses));
  gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
			      &df->eh_block_artificial_uses));
4484

4485 4486
  bitmap_clear (&regular_block_artificial_uses);
  bitmap_clear (&eh_block_artificial_uses);
4487 4488 4489 4490 4491

  /* Verify entry block and exit block. These only verify the bitmaps,
     the refs are verified in df_bb_verify.  */
  df_entry_block_bitmap_verify (true);
  df_exit_block_bitmap_verify (true);
H.J. Lu committed
4492

4493 4494 4495 4496 4497 4498 4499 4500 4501 4502 4503 4504 4505 4506 4507 4508 4509 4510 4511 4512
  /* (3) All of the insns in all of the blocks are traversed and the
     marks are cleared both in the artificial refs attached to the
     blocks and the real refs inside the insns.  It is a failure to
     clear a mark that has not been set as this means that the ref in
     the block or insn was not in the reg chain.  */

  FOR_ALL_BB (bb)
    df_bb_verify (bb);

  /* (4) See if all reg chains are traversed a second time.  This time
     a check is made that the marks are clear. A set mark would be a
     from a reg that is not in any insn or basic block.  */

  for (i = 0; i < DF_REG_SIZE (df); i++)
    {
      df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
      df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
      df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
    }
}