flow.c 133 KB
Newer Older
Richard Kenner committed
1
/* Data flow analysis for GNU compiler.
Kazu Hirata committed
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
Richard Kenner committed
4

5
This file is part of GCC.
Richard Kenner committed
6

7 8 9 10
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
Richard Kenner committed
11

12 13 14 15
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
16 17

You should have received a copy of the GNU General Public License
18 19 20
along with GCC; see the file COPYING.  If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA.  */
Richard Kenner committed
21

22 23 24
/* This file contains the data flow analysis pass of the compiler.  It
   computes data flow information which tells combine_instructions
   which insns to consider combining and controls register allocation.
Richard Kenner committed
25

26 27 28
   Additional data flow information that is too bulky to record is
   generated during the analysis, and is used at that time to create
   autoincrement and autodecrement addressing.
Richard Kenner committed
29 30 31 32 33 34 35

   The first step is dividing the function into basic blocks.
   find_basic_blocks does this.  Then life_analysis determines
   where each register is live and where it is dead.

   ** find_basic_blocks **

36 37 38 39
   find_basic_blocks divides the current function's rtl into basic
   blocks and constructs the CFG.  The blocks are recorded in the
   basic_block_info array; the CFG exists in the edge structures
   referenced by the blocks.
Richard Kenner committed
40

41
   find_basic_blocks also finds any unreachable loops and deletes them.
Richard Kenner committed
42 43 44 45 46 47 48 49 50 51

   ** life_analysis **

   life_analysis is called immediately after find_basic_blocks.
   It uses the basic block information to determine where each
   hard or pseudo register is live.

   ** live-register info **

   The information about where each register is live is in two parts:
52
   the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
Richard Kenner committed
53

54 55 56 57
   basic_block->global_live_at_start has an element for each basic
   block, and the element is a bit-vector with a bit for each hard or
   pseudo register.  The bit is 1 if the register is live at the
   beginning of the basic block.
Richard Kenner committed
58

Kazu Hirata committed
59
   Two types of elements can be added to an insn's REG_NOTES.
Richard Kenner committed
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105
   A REG_DEAD note is added to an insn's REG_NOTES for any register
   that meets both of two conditions:  The value in the register is not
   needed in subsequent insns and the insn does not replace the value in
   the register (in the case of multi-word hard registers, the value in
   each register must be replaced by the insn to avoid a REG_DEAD note).

   In the vast majority of cases, an object in a REG_DEAD note will be
   used somewhere in the insn.  The (rare) exception to this is if an
   insn uses a multi-word hard register and only some of the registers are
   needed in subsequent insns.  In that case, REG_DEAD notes will be
   provided for those hard registers that are not subsequently needed.
   Partial REG_DEAD notes of this type do not occur when an insn sets
   only some of the hard registers used in such a multi-word operand;
   omitting REG_DEAD notes for objects stored in an insn is optional and
   the desire to do so does not justify the complexity of the partial
   REG_DEAD notes.

   REG_UNUSED notes are added for each register that is set by the insn
   but is unused subsequently (if every register set by the insn is unused
   and the insn does not reference memory or have some other side-effect,
   the insn is deleted instead).  If only part of a multi-word hard
   register is used in a subsequent insn, REG_UNUSED notes are made for
   the parts that will not be used.

   To determine which registers are live after any insn, one can
   start from the beginning of the basic block and scan insns, noting
   which registers are set by each insn and which die there.

   ** Other actions of life_analysis **

   life_analysis sets up the LOG_LINKS fields of insns because the
   information needed to do so is readily available.

   life_analysis deletes insns whose only effect is to store a value
   that is never used.

   life_analysis notices cases where a reference to a register as
   a memory address can be combined with a preceding or following
   incrementation or decrementation of the register.  The separate
   instruction to increment or decrement is deleted and the address
   is changed to a POST_INC or similar rtx.

   Each time an incrementing or decrementing address is created,
   a REG_INC element is added to the insn's REG_NOTES list.

   life_analysis fills in certain vectors containing information about
106 107
   register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
   REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
John Wehle committed
108 109 110

   life_analysis sets current_function_sp_is_unchanging if the function
   doesn't modify the stack pointer.  */
111

Kazu Hirata committed
112
/* TODO:
113 114

   Split out from life_analysis:
115
	- local property discovery
116 117 118 119
	- global property computation
	- log links creation
	- pre/post modify transformation
*/
Richard Kenner committed
120 121

#include "config.h"
122
#include "system.h"
123 124
#include "coretypes.h"
#include "tm.h"
125
#include "tree.h"
Richard Kenner committed
126
#include "rtl.h"
127
#include "tm_p.h"
128
#include "hard-reg-set.h"
Richard Kenner committed
129 130 131 132 133
#include "basic-block.h"
#include "insn-config.h"
#include "regs.h"
#include "flags.h"
#include "output.h"
134
#include "function.h"
Mike Stump committed
135
#include "except.h"
Graham Stott committed
136
#include "toplev.h"
Kaveh R. Ghazi committed
137
#include "recog.h"
138
#include "expr.h"
139
#include "timevar.h"
Richard Kenner committed
140 141

#include "obstack.h"
142
#include "splay-tree.h"
Jeff Law committed
143

144 145 146 147 148 149
#ifndef HAVE_epilogue
#define HAVE_epilogue 0
#endif
#ifndef HAVE_prologue
#define HAVE_prologue 0
#endif
150 151 152
#ifndef HAVE_sibcall_epilogue
#define HAVE_sibcall_epilogue 0
#endif
153

154 155 156
#ifndef EPILOGUE_USES
#define EPILOGUE_USES(REGNO)  0
#endif
157 158 159
#ifndef EH_USES
#define EH_USES(REGNO)  0
#endif
160

161 162
#ifdef HAVE_conditional_execution
#ifndef REVERSE_CONDEXEC_PREDICATES_P
163 164
#define REVERSE_CONDEXEC_PREDICATES_P(x, y) \
  (GET_CODE ((x)) == reversed_comparison_code ((y), NULL))
165 166 167
#endif
#endif

168 169 170 171 172
/* This is the maximum number of times we process any given block if the
   latest loop depth count is smaller than this number.  Only used for the
   failure strategy to avoid infinite loops in calculate_global_regs_live.  */
#define MAX_LIVENESS_ROUNDS 20

173 174 175
/* Nonzero if the second flow pass has completed.  */
int flow2_completed;

Richard Kenner committed
176 177 178 179
/* Maximum register number used in this function, plus one.  */

int max_regno;

180
/* Indexed by n, giving various register information */
Richard Kenner committed
181

182
varray_type reg_n_info;
Richard Kenner committed
183 184

/* Regset of regs live when calls to `setjmp'-like functions happen.  */
185
/* ??? Does this exist only for the setjmp-clobbered warning message?  */
Richard Kenner committed
186

187
static regset regs_live_at_setjmp;
Richard Kenner committed
188 189 190 191 192 193 194 195 196 197 198 199

/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
   that have to go in the same hard reg.
   The first two regs in the list are a pair, and the next two
   are another pair, etc.  */
rtx regs_may_share;

/* Set of registers that may be eliminable.  These are handled specially
   in updating regs_ever_live.  */

static HARD_REG_SET elim_reg_set;

200 201 202
/* Holds information for tracking conditional register life information.  */
struct reg_cond_life_info
{
Jim Wilson committed
203
  /* A boolean expression of conditions under which a register is dead.  */
204
  rtx condition;
Jim Wilson committed
205 206 207 208 209 210
  /* Conditions under which a register is dead at the basic block end.  */
  rtx orig_condition;

  /* A boolean expression of conditions under which a register has been
     stored into.  */
  rtx stores;
211 212 213 214 215

  /* ??? Could store mask of bytes that are dead, so that we could finally
     track lifetimes of multi-word registers accessed via subregs.  */
};

216 217 218 219 220 221 222 223 224 225 226
/* For use in communicating between propagate_block and its subroutines.
   Holds all information needed to compute life and def-use information.  */

struct propagate_block_info
{
  /* The basic block we're considering.  */
  basic_block bb;

  /* Bit N is set if register N is conditionally or unconditionally live.  */
  regset reg_live;

227 228
  /* Bit N is set if register N is set this insn.  */
  regset new_set;
229

230 231 232 233 234 235 236 237
  /* Element N is the next insn that uses (hard or pseudo) register N
     within the current basic block; or zero, if there is no such insn.  */
  rtx *reg_next_use;

  /* Contains a list of all the MEMs we are tracking for dead store
     elimination.  */
  rtx mem_set_list;

238 239
  /* If non-null, record the set of registers set unconditionally in the
     basic block.  */
240 241
  regset local_set;

242 243 244 245
  /* If non-null, record the set of registers set conditionally in the
     basic block.  */
  regset cond_local_set;

246 247 248 249 250 251 252 253 254
#ifdef HAVE_conditional_execution
  /* Indexed by register number, holds a reg_cond_life_info for each
     register that is not unconditionally live or dead.  */
  splay_tree reg_cond_dead;

  /* Bit N is set if register N is in an expression in reg_cond_dead.  */
  regset reg_cond_reg;
#endif

255 256 257
  /* The length of mem_set_list.  */
  int mem_set_list_len;

258
  /* Nonzero if the value of CC0 is live.  */
259 260
  int cc0_live;

261
  /* Flags controlling the set of information propagate_block collects.  */
262
  int flags;
263 264
  /* Index of instruction being processed.  */
  int insn_num;
265 266
};

267 268 269
/* Number of dead insns removed.  */
static int ndead;

270 271 272 273 274 275
/* When PROP_REG_INFO set, array contains pbi->insn_num of instruction
   where given register died.  When the register is marked alive, we use the
   information to compute amount of instructions life range cross.
   (remember, we are walking backward).  This can be computed as current
   pbi->insn_num - reg_deaths[regno].
   At the end of processing each basic block, the remaining live registers
276
   are inspected and live ranges are increased same way so liverange of global
277 278 279 280 281 282 283 284
   registers are computed correctly.
  
   The array is maintained clear for dead registers, so it can be safely reused
   for next basic block without expensive memset of the whole array after
   reseting pbi->insn_num to 0.  */

static int *reg_deaths;

285 286 287 288
/* Maximum length of pbi->mem_set_list before we start dropping
   new elements on the floor.  */
#define MAX_MEM_SET_LIST_LEN	100

Richard Kenner committed
289
/* Forward declarations */
290 291 292 293
static int verify_wide_reg_1 (rtx *, void *);
static void verify_wide_reg (int, basic_block);
static void verify_local_live_at_start (regset, basic_block);
static void notice_stack_pointer_modification_1 (rtx, rtx, void *);
294
static void notice_stack_pointer_modification (void);
295 296 297 298 299 300 301 302 303 304 305
static void mark_reg (rtx, void *);
static void mark_regs_live_at_end (regset);
static void calculate_global_regs_live (sbitmap, sbitmap, int);
static void propagate_block_delete_insn (rtx);
static rtx propagate_block_delete_libcall (rtx, rtx);
static int insn_dead_p (struct propagate_block_info *, rtx, int, rtx);
static int libcall_dead_p (struct propagate_block_info *, rtx, rtx);
static void mark_set_regs (struct propagate_block_info *, rtx, rtx);
static void mark_set_1 (struct propagate_block_info *, enum rtx_code, rtx,
			rtx, rtx, int);
static int find_regno_partial (rtx *, void *);
306

307
#ifdef HAVE_conditional_execution
308 309 310 311 312 313 314 315
static int mark_regno_cond_dead (struct propagate_block_info *, int, rtx);
static void free_reg_cond_life_info (splay_tree_value);
static int flush_reg_cond_reg_1 (splay_tree_node, void *);
static void flush_reg_cond_reg (struct propagate_block_info *, int);
static rtx elim_reg_cond (rtx, unsigned int);
static rtx ior_reg_cond (rtx, rtx, int);
static rtx not_reg_cond (rtx);
static rtx and_reg_cond (rtx, rtx, int);
316
#endif
317
#ifdef AUTO_INC_DEC
318 319 320 321 322
static void attempt_auto_inc (struct propagate_block_info *, rtx, rtx, rtx,
			      rtx, rtx);
static void find_auto_inc (struct propagate_block_info *, rtx, rtx);
static int try_pre_increment_1 (struct propagate_block_info *, rtx);
static int try_pre_increment (rtx, rtx, HOST_WIDE_INT);
323
#endif
324 325 326 327 328 329 330
static void mark_used_reg (struct propagate_block_info *, rtx, rtx, rtx);
static void mark_used_regs (struct propagate_block_info *, rtx, rtx, rtx);
void debug_flow_info (void);
static void add_to_mem_set_list (struct propagate_block_info *, rtx);
static int invalidate_mems_from_autoinc (rtx *, void *);
static void invalidate_mems_from_set (struct propagate_block_info *, rtx);
static void clear_log_links (sbitmap);
331
static int count_or_remove_death_notes_bb (basic_block, int);
332
static void allocate_bb_life_data (void);
Richard Kenner committed
333

334 335 336 337
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
   note associated with the BLOCK.  */

rtx
338
first_insn_after_basic_block_note (basic_block block)
339 340
{
  rtx insn;
341

342
  /* Get the first instruction in the block.  */
343
  insn = BB_HEAD (block);
344

345 346
  if (insn == NULL_RTX)
    return NULL_RTX;
347
  if (LABEL_P (insn))
348
    insn = NEXT_INSN (insn);
349
  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
350 351 352 353

  return NEXT_INSN (insn);
}

354 355
/* Perform data flow analysis for the whole control flow graph.
   FLAGS is a set of PROP_* flags to be used in accumulating flow info.  */
356 357

void
358
life_analysis (FILE *file, int flags)
359
{
360
#ifdef ELIMINABLE_REGS
361
  int i;
362
  static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
363
#endif
364

365 366
  /* Record which registers will be eliminated.  We use this in
     mark_used_regs.  */
367

368
  CLEAR_HARD_REG_SET (elim_reg_set);
369

370 371 372 373 374 375
#ifdef ELIMINABLE_REGS
  for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
#else
  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
#endif
376

377 378 379

#ifdef CANNOT_CHANGE_MODE_CLASS
  if (flags & PROP_REG_INFO)
380
    init_subregs_of_mode ();
381 382
#endif

383 384
  if (! optimize)
    flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
385

386 387 388
  /* The post-reload life analysis have (on a global basis) the same
     registers live as was computed by reload itself.  elimination
     Otherwise offsets and such may be incorrect.
389

390 391
     Reload will make some registers as live even though they do not
     appear in the rtl.
392

393 394 395 396 397
     We don't want to create new auto-incs after reload, since they
     are unlikely to be useful and can cause problems with shared
     stack slots.  */
  if (reload_completed)
    flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
398

399
  /* We want alias analysis information for local dead store elimination.  */
400
  if (optimize && (flags & PROP_SCAN_DEAD_STORES))
401
    init_alias_analysis ();
402

403 404
  /* Always remove no-op moves.  Do this before other processing so
     that we don't have to keep re-scanning them.  */
405
  delete_noop_moves ();
406

407 408 409 410
  /* Some targets can emit simpler epilogues if they know that sp was
     not ever modified during the function.  After reload, of course,
     we've already emitted the epilogue so there's no sense searching.  */
  if (! reload_completed)
411
    notice_stack_pointer_modification ();
412

413 414 415 416
  /* Allocate and zero out data structures that will record the
     data from lifetime analysis.  */
  allocate_reg_life_data ();
  allocate_bb_life_data ();
417

418 419
  /* Find the set of registers live on function exit.  */
  mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
420

421 422 423
  /* "Update" life info from zero.  It'd be nice to begin the
     relaxation with just the exit and noreturn blocks, but that set
     is not immediately handy.  */
Kazu Hirata committed
424

425
  if (flags & PROP_REG_INFO)
426 427 428 429
    {
      memset (regs_ever_live, 0, sizeof (regs_ever_live));
      memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
    }
430
  update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
431 432 433 434 435
  if (reg_deaths)
    {
      free (reg_deaths);
      reg_deaths = NULL;
    }
436

437
  /* Clean up.  */
438
  if (optimize && (flags & PROP_SCAN_DEAD_STORES))
439
    end_alias_analysis ();
440

441 442
  if (file)
    dump_flow_info (file);
443

444
  /* Removing dead insns should have made jumptables really dead.  */
445 446
  delete_dead_jumptables ();
}
447

448
/* A subroutine of verify_wide_reg, called through for_each_rtx.
449 450
   Search for REGNO.  If found, return 2 if it is not wider than
   word_mode.  */
451

452
static int
453
verify_wide_reg_1 (rtx *px, void *pregno)
454 455 456
{
  rtx x = *px;
  unsigned int regno = *(int *) pregno;
457

458
  if (REG_P (x) && REGNO (x) == regno)
459
    {
460
      if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
461
	return 2;
462
      return 1;
463
    }
464
  return 0;
465 466
}

467
/* A subroutine of verify_local_live_at_start.  Search through insns
468
   of BB looking for register REGNO.  */
469

470
static void
471
verify_wide_reg (int regno, basic_block bb)
472
{
473
  rtx head = BB_HEAD (bb), end = BB_END (bb);
474

475
  while (1)
476
    {
477 478 479 480 481 482 483 484
      if (INSN_P (head))
	{
	  int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
	  if (r == 1)
	    return;
	  if (r == 2)
	    break;
	}
485 486 487 488
      if (head == end)
	break;
      head = NEXT_INSN (head);
    }
489
  if (dump_file)
490
    {
491 492
      fprintf (dump_file, "Register %d died unexpectedly.\n", regno);
      dump_bb (bb, dump_file, 0);
493
    }
494
  fatal_error ("internal consistency failure");
495
}
496

497 498
/* A subroutine of update_life_info.  Verify that there are no untoward
   changes in live_at_start during a local update.  */
499

500
static void
501
verify_local_live_at_start (regset new_live_at_start, basic_block bb)
502 503 504 505 506 507 508
{
  if (reload_completed)
    {
      /* After reload, there are no pseudos, nor subregs of multi-word
	 registers.  The regsets should exactly match.  */
      if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
	{
509
	  if (dump_file)
510
	    {
511
	      fprintf (dump_file,
512
		       "live_at_start mismatch in bb %d, aborting\nNew:\n",
513
		       bb->index);
514 515 516
	      debug_bitmap_file (dump_file, new_live_at_start);
	      fputs ("Old:\n", dump_file);
	      dump_bb (bb, dump_file, 0);
517
	    }
518
	  fatal_error ("internal consistency failure");
519
	}
520 521 522
    }
  else
    {
523
      unsigned i;
524
      reg_set_iterator rsi;
Richard Kenner committed
525

526 527
      /* Find the set of changed registers.  */
      XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
528

529
      EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i, rsi)
530
	{
Kazu Hirata committed
531
	  /* No registers should die.  */
532 533
	  if (REGNO_REG_SET_P (bb->global_live_at_start, i))
	    {
534
	      if (dump_file)
535
		{
536
		  fprintf (dump_file,
537
			   "Register %d died unexpectedly.\n", i);
538
		  dump_bb (bb, dump_file, 0);
539
		}
540
	      fatal_error ("internal consistency failure");
541
	    }
Kazu Hirata committed
542
	  /* Verify that the now-live register is wider than word_mode.  */
543
	  verify_wide_reg (i, bb);
544
	}
545
    }
546
}
Richard Kenner committed
547

548 549
/* Updates life information starting with the basic blocks set in BLOCKS.
   If BLOCKS is null, consider it to be the universal set.
550

551
   If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholing,
552 553 554 555 556
   we are only expecting local modifications to basic blocks.  If we find
   extra registers live at the beginning of a block, then we either killed
   useful data, or we have a broken split that wants data not provided.
   If we find registers removed from live_at_start, that means we have
   a broken peephole that is killing a register it shouldn't.
557

558 559 560
   ??? This is not true in one situation -- when a pre-reload splitter
   generates subregs of a multi-word pseudo, current life analysis will
   lose the kill.  So we _can_ have a pseudo go live.  How irritating.
561

562 563 564
   It is also not true when a peephole decides that it doesn't need one
   or more of the inputs.

565 566
   Including PROP_REG_INFO does not properly refresh regs_ever_live
   unless the caller resets it to zero.  */
567

568
int
569 570
update_life_info (sbitmap blocks, enum update_life_extent extent,
		  int prop_flags)
571
{
572
  regset tmp;
573
  unsigned i;
574
  int stabilized_prop_flags = prop_flags;
575
  basic_block bb;
576

577
  tmp = ALLOC_REG_SET (&reg_obstack);
578
  ndead = 0;
579

580 581 582
  if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
    reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);

583 584 585
  timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
		? TV_LIFE_UPDATE : TV_LIFE);

586 587
  /* Changes to the CFG are only allowed when
     doing a global update for the entire CFG.  */
588 589
  gcc_assert (!(prop_flags & PROP_ALLOW_CFG_CHANGES)
	      || (extent != UPDATE_LIFE_LOCAL && !blocks));
590

591 592 593 594 595 596
  /* For a global update, we go through the relaxation process again.  */
  if (extent != UPDATE_LIFE_LOCAL)
    {
      for ( ; ; )
	{
	  int changed = 0;
597

598 599
	  calculate_global_regs_live (blocks, blocks,
				prop_flags & (PROP_SCAN_DEAD_CODE
600
					      | PROP_SCAN_DEAD_STORES
601
					      | PROP_ALLOW_CFG_CHANGES));
602

603 604 605
	  if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
	      != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
	    break;
606

607 608
	  /* Removing dead code may allow the CFG to be simplified which
	     in turn may allow for further dead code detection / removal.  */
609
	  FOR_EACH_BB_REVERSE (bb)
610 611 612 613
	    {
	      COPY_REG_SET (tmp, bb->global_live_at_end);
	      changed |= propagate_block (bb, tmp, NULL, NULL,
				prop_flags & (PROP_SCAN_DEAD_CODE
614
					      | PROP_SCAN_DEAD_STORES
615 616
					      | PROP_KILL_DEAD_CODE));
	    }
617

618 619 620 621 622
	  /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
	     subsequent propagate_block calls, since removing or acting as
	     removing dead code can affect global register liveness, which
	     is supposed to be finalized for this call after this loop.  */
	  stabilized_prop_flags
623 624
	    &= ~(PROP_SCAN_DEAD_CODE | PROP_SCAN_DEAD_STORES
		 | PROP_KILL_DEAD_CODE);
625 626

	  if (! changed)
627
	    break;
628 629 630 631 632 633

	  /* We repeat regardless of what cleanup_cfg says.  If there were
	     instructions deleted above, that might have been only a
	     partial improvement (see MAX_MEM_SET_LIST_LEN usage).
	     Further improvement may be possible.  */
	  cleanup_cfg (CLEANUP_EXPENSIVE);
634

635
	  /* Zap the life information from the last round.  If we don't
636
	     do this, we can wind up with registers that no longer appear
637
	     in the code being marked live at entry.  */
638 639 640 641 642
	  FOR_EACH_BB (bb)
	    {
	      CLEAR_REG_SET (bb->global_live_at_start);
	      CLEAR_REG_SET (bb->global_live_at_end);
	    }
643
	}
644

645 646 647 648 649
      /* If asked, remove notes from the blocks we'll update.  */
      if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
	count_or_remove_death_notes (blocks, 1);
    }

650 651 652 653
  /* Clear log links in case we are asked to (re)compute them.  */
  if (prop_flags & PROP_LOG_LINKS)
    clear_log_links (blocks);

654 655 656 657
  if (blocks)
    {
      EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
	{
658
	  bb = BASIC_BLOCK (i);
659 660

	  COPY_REG_SET (tmp, bb->global_live_at_end);
661
	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
662 663 664 665

	  if (extent == UPDATE_LIFE_LOCAL)
	    verify_local_live_at_start (tmp, bb);
	});
666
    }
667 668
  else
    {
669
      FOR_EACH_BB_REVERSE (bb)
670
	{
671
	  COPY_REG_SET (tmp, bb->global_live_at_end);
672 673

	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
674

675 676
	  if (extent == UPDATE_LIFE_LOCAL)
	    verify_local_live_at_start (tmp, bb);
677 678 679
	}
    }

680
  FREE_REG_SET (tmp);
681

682 683
  if (prop_flags & PROP_REG_INFO)
    {
684 685
      reg_set_iterator rsi;

686 687 688 689 690
      /* The only pseudos that are live at the beginning of the function
	 are those that were not set anywhere in the function.  local-alloc
	 doesn't know how to handle these correctly, so mark them as not
	 local to any one basic block.  */
      EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
691 692
				 FIRST_PSEUDO_REGISTER, i, rsi)
	REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
693

694 695 696 697 698 699 700 701 702
      /* We have a problem with any pseudoreg that lives across the setjmp.
	 ANSI says that if a user variable does not change in value between
	 the setjmp and the longjmp, then the longjmp preserves it.  This
	 includes longjmp from a place where the pseudo appears dead.
	 (In principle, the value still exists if it is in scope.)
	 If the pseudo goes in a hard reg, some other value may occupy
	 that hard reg where this pseudo is dead, thus clobbering the pseudo.
	 Conclusion: such a pseudo must not go in a hard reg.  */
      EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
703 704 705 706 707 708 709 710
				 FIRST_PSEUDO_REGISTER, i, rsi)
	{
	  if (regno_reg_rtx[i] != 0)
	    {
	      REG_LIVE_LENGTH (i) = -1;
	      REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
	    }
	}
711
    }
712 713 714 715 716
  if (reg_deaths)
    {
      free (reg_deaths);
      reg_deaths = NULL;
    }
717 718
  timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
	       ? TV_LIFE_UPDATE : TV_LIFE);
719 720
  if (ndead && dump_file)
    fprintf (dump_file, "deleted %i dead insns\n", ndead);
721
  return ndead;
722
}
723

724 725
/* Update life information in all blocks where BB_DIRTY is set.  */

726
int
727
update_life_info_in_dirty_blocks (enum update_life_extent extent, int prop_flags)
728
{
729
  sbitmap update_life_blocks = sbitmap_alloc (last_basic_block);
730
  int n = 0;
731
  basic_block bb;
732
  int retval = 0;
733 734

  sbitmap_zero (update_life_blocks);
735
  FOR_EACH_BB (bb)
736
    {
737
      if (bb->flags & BB_DIRTY)
738 739
	{
	  SET_BIT (update_life_blocks, bb->index);
740
	  n++;
741 742
	}
    }
743 744

  if (n)
745
    retval = update_life_info (update_life_blocks, extent, prop_flags);
746 747

  sbitmap_free (update_life_blocks);
748
  return retval;
749 750
}

751
/* Free the variables allocated by find_basic_blocks.  */
752

753
void
754
free_basic_block_vars (void)
755
{
756
  if (basic_block_info)
757
    {
758
      clear_edges ();
759
      basic_block_info = NULL;
760
    }
761 762
  n_basic_blocks = 0;
  last_basic_block = 0;
763 764 765
  n_edges = 0;

  label_to_block_map = NULL;
766 767 768 769 770

  ENTRY_BLOCK_PTR->aux = NULL;
  ENTRY_BLOCK_PTR->global_live_at_end = NULL;
  EXIT_BLOCK_PTR->aux = NULL;
  EXIT_BLOCK_PTR->global_live_at_start = NULL;
771 772
}

773
/* Delete any insns that copy a register to itself.  */
774

775
int
776
delete_noop_moves (void)
777
{
778 779
  rtx insn, next;
  basic_block bb;
780
  int nnoops = 0;
781

782
  FOR_EACH_BB (bb)
783
    {
784
      for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
785
	{
786 787 788
	  next = NEXT_INSN (insn);
	  if (INSN_P (insn) && noop_move_p (insn))
	    {
789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805
	      rtx note;

	      /* If we're about to remove the first insn of a libcall
		 then move the libcall note to the next real insn and
		 update the retval note.  */
	      if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
		       && XEXP (note, 0) != insn)
		{
		  rtx new_libcall_insn = next_real_insn (insn);
		  rtx retval_note = find_reg_note (XEXP (note, 0),
						   REG_RETVAL, NULL_RTX);
		  REG_NOTES (new_libcall_insn)
		    = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
					 REG_NOTES (new_libcall_insn));
		  XEXP (retval_note, 0) = new_libcall_insn;
		}

806 807
	      delete_insn_and_edges (insn);
	      nnoops++;
808
	    }
809 810
	}
    }
811 812
  if (nnoops && dump_file)
    fprintf (dump_file, "deleted %i noop moves", nnoops);
813
  return nnoops;
814 815
}

816
/* Delete any jump tables never referenced.  We can't delete them at the
817
   time of removing tablejump insn as they are referenced by the preceding
818 819
   insns computing the destination, so we delay deleting and garbagecollect
   them once life information is computed.  */
820
void
821
delete_dead_jumptables (void)
822
{
823 824 825 826 827
  basic_block bb;

  /* A dead jump table does not belong to any basic block.  Scan insns
     between two adjacent basic blocks.  */
  FOR_EACH_BB (bb)
828
    {
829 830 831 832 833
      rtx insn, next;

      for (insn = NEXT_INSN (BB_END (bb));
	   insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
	   insn = next)
834
	{
835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851
	  next = NEXT_INSN (insn);
	  if (LABEL_P (insn)
	      && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
	      && JUMP_P (next)
	      && (GET_CODE (PATTERN (next)) == ADDR_VEC
		  || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
	    {
	      rtx label = insn, jump = next;

	      if (dump_file)
		fprintf (dump_file, "Dead jumptable %i removed\n",
			 INSN_UID (insn));

	      next = NEXT_INSN (next);
	      delete_insn (jump);
	      delete_insn (label);
	    }
852
	}
853
    }
854 855
}

856 857
/* Determine if the stack pointer is constant over the life of the function.
   Only useful before prologues have been emitted.  */
858 859

static void
860 861
notice_stack_pointer_modification_1 (rtx x, rtx pat ATTRIBUTE_UNUSED,
				     void *data ATTRIBUTE_UNUSED)
862
{
863 864 865 866
  if (x == stack_pointer_rtx
      /* The stack pointer is only modified indirectly as the result
	 of a push until later in flow.  See the comments in rtl.texi
	 regarding Embedded Side-Effects on Addresses.  */
867
      || (MEM_P (x)
868
	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_AUTOINC
869 870
	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
    current_function_sp_is_unchanging = 0;
871
}
Andrew MacLeod committed
872

873
static void
874
notice_stack_pointer_modification (void)
875
{
876
  basic_block bb;
877
  rtx insn;
878

879 880 881 882 883
  /* Assume that the stack pointer is unchanging if alloca hasn't
     been used.  */
  current_function_sp_is_unchanging = !current_function_calls_alloca;
  if (! current_function_sp_is_unchanging)
    return;
884

885 886 887 888 889 890 891 892 893 894 895 896 897
  FOR_EACH_BB (bb)
    FOR_BB_INSNS (bb, insn)
      {
	if (INSN_P (insn))
	  {
	    /* Check if insn modifies the stack pointer.  */
	    note_stores (PATTERN (insn),
			 notice_stack_pointer_modification_1,
			 NULL);
	    if (! current_function_sp_is_unchanging)
	      return;
	  }
      }
898
}
899

900 901
/* Mark a register in SET.  Hard registers in large modes get all
   of their component registers set as well.  */
902

903
static void
904
mark_reg (rtx reg, void *xset)
905
{
906 907
  regset set = (regset) xset;
  int regno = REGNO (reg);
908

909
  gcc_assert (GET_MODE (reg) != BLKmode);
910

911 912
  SET_REGNO_REG_SET (set, regno);
  if (regno < FIRST_PSEUDO_REGISTER)
913
    {
914
      int n = hard_regno_nregs[regno][GET_MODE (reg)];
915 916
      while (--n > 0)
	SET_REGNO_REG_SET (set, regno + n);
917 918
    }
}
919

920 921
/* Mark those regs which are needed at the end of the function as live
   at the end of the last basic block.  */
922

923
static void
924
mark_regs_live_at_end (regset set)
925 926
{
  unsigned int i;
927

928 929
  /* If exiting needs the right stack value, consider the stack pointer
     live at the end of the function.  */
Stephen Clarke committed
930
  if ((HAVE_epilogue && epilogue_completed)
931 932 933 934 935
      || ! EXIT_IGNORE_STACK
      || (! FRAME_POINTER_REQUIRED
	  && ! current_function_calls_alloca
	  && flag_omit_frame_pointer)
      || current_function_sp_is_unchanging)
936
    {
937
      SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
938 939
    }

940 941 942
  /* Mark the frame pointer if needed at the end of the function.  If
     we end up eliminating it, it will be removed from the live list
     of each basic block by reload.  */
943

944
  if (! reload_completed || frame_pointer_needed)
945
    {
946 947 948 949
      SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
      /* If they are different, also mark the hard frame pointer as live.  */
      if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
Kazu Hirata committed
950
	SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
951
#endif
952
    }
953

954 955 956 957
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
  /* Many architectures have a GP register even without flag_pic.
     Assume the pic register is not in use, or will be handled by
     other means, if it is not fixed.  */
958
  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
959 960 961
      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
    SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
#endif
962

963 964 965 966 967 968
  /* Mark all global registers, and all registers used by the epilogue
     as being live at the end of the function since they may be
     referenced by our caller.  */
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    if (global_regs[i] || EPILOGUE_USES (i))
      SET_REGNO_REG_SET (set, i);
969

Stephen Clarke committed
970
  if (HAVE_epilogue && epilogue_completed)
971
    {
972 973 974 975 976
      /* Mark all call-saved registers that we actually used.  */
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
	if (regs_ever_live[i] && ! LOCAL_REGNO (i)
	    && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
	  SET_REGNO_REG_SET (set, i);
977
    }
978

979 980 981 982 983 984 985 986 987 988
#ifdef EH_RETURN_DATA_REGNO
  /* Mark the registers that will contain data for the handler.  */
  if (reload_completed && current_function_calls_eh_return)
    for (i = 0; ; ++i)
      {
	unsigned regno = EH_RETURN_DATA_REGNO(i);
	if (regno == INVALID_REGNUM)
	  break;
	SET_REGNO_REG_SET (set, regno);
      }
989
#endif
990
#ifdef EH_RETURN_STACKADJ_RTX
Stephen Clarke committed
991
  if ((! HAVE_epilogue || ! epilogue_completed)
992
      && current_function_calls_eh_return)
993
    {
994 995 996
      rtx tmp = EH_RETURN_STACKADJ_RTX;
      if (tmp && REG_P (tmp))
	mark_reg (tmp, set);
997
    }
998 999
#endif
#ifdef EH_RETURN_HANDLER_RTX
Stephen Clarke committed
1000
  if ((! HAVE_epilogue || ! epilogue_completed)
1001
      && current_function_calls_eh_return)
1002
    {
1003 1004 1005
      rtx tmp = EH_RETURN_HANDLER_RTX;
      if (tmp && REG_P (tmp))
	mark_reg (tmp, set);
1006
    }
1007
#endif
1008

1009 1010
  /* Mark function return value.  */
  diddle_return_value (mark_reg, set);
1011 1012
}

1013 1014 1015
/* Propagate global life info around the graph of basic blocks.  Begin
   considering blocks with their corresponding bit set in BLOCKS_IN.
   If BLOCKS_IN is null, consider it the universal set.
1016

1017
   BLOCKS_OUT is set for every block that was changed.  */
1018

1019
static void
1020
calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags)
1021
{
1022
  basic_block *queue, *qhead, *qtail, *qend, bb;
1023
  regset tmp, new_live_at_end, invalidated_by_call;
1024 1025 1026
  regset registers_made_dead;
  bool failure_strategy_required = false;
  int *block_accesses;
1027 1028 1029 1030 1031 1032 1033 1034

  /* The registers that are modified within this in block.  */
  regset *local_sets;

  /* The registers that are conditionally modified within this block.
     In other words, regs that are set only as part of a COND_EXEC.  */
  regset *cond_local_sets;

1035
  int i;
1036

1037
  /* Some passes used to forget clear aux field of basic block causing
1038
     sick behavior here.  */
1039
#ifdef ENABLE_CHECKING
1040
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1041
    gcc_assert (!bb->aux);
1042 1043
#endif

1044 1045 1046
  tmp = ALLOC_REG_SET (&reg_obstack);
  new_live_at_end = ALLOC_REG_SET (&reg_obstack);
  invalidated_by_call = ALLOC_REG_SET (&reg_obstack);
1047
  registers_made_dead = ALLOC_REG_SET (&reg_obstack);
1048

1049
  /* Inconveniently, this is only readily available in hard reg set form.  */
1050
  for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1051 1052
    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
      SET_REGNO_REG_SET (invalidated_by_call, i);
1053

1054 1055 1056 1057 1058 1059
  /* Allocate space for the sets of local properties.  */
  local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
			sizeof (regset));
  cond_local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
			     sizeof (regset));

1060 1061 1062
  /* Create a worklist.  Allocate an extra slot for ENTRY_BLOCK, and one
     because the `head == tail' style test for an empty queue doesn't
     work with a full queue.  */
1063
  queue = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1)) * sizeof (*queue));
1064
  qtail = queue;
1065
  qhead = qend = queue + n_basic_blocks - (INVALID_BLOCK + 1);
1066

1067 1068 1069 1070
  /* Queue the blocks set in the initial mask.  Do this in reverse block
     number order so that we are more likely for the first round to do
     useful work.  We use AUX non-null to flag that the block is queued.  */
  if (blocks_in)
1071
    {
1072 1073 1074 1075 1076 1077
      FOR_EACH_BB (bb)
	if (TEST_BIT (blocks_in, bb->index))
	  {
	    *--qhead = bb;
	    bb->aux = bb;
	  }
1078
    }
1079
  else
1080
    {
1081
      FOR_EACH_BB (bb)
1082 1083 1084 1085
	{
	  *--qhead = bb;
	  bb->aux = bb;
	}
1086 1087
    }

1088 1089
  block_accesses = xcalloc (last_basic_block, sizeof (int));
  
1090 1091 1092 1093 1094
  /* We clean aux when we remove the initially-enqueued bbs, but we
     don't enqueue ENTRY and EXIT initially, so clean them upfront and
     unconditionally.  */
  ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;

1095 1096
  if (blocks_out)
    sbitmap_zero (blocks_out);
1097

1098 1099 1100 1101 1102 1103 1104 1105 1106
  /* We work through the queue until there are no more blocks.  What
     is live at the end of this block is precisely the union of what
     is live at the beginning of all its successors.  So, we set its
     GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
     for its successors.  Then, we compute GLOBAL_LIVE_AT_START for
     this block by walking through the instructions in this block in
     reverse order and updating as we go.  If that changed
     GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
     queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1107

1108 1109 1110 1111 1112 1113 1114
     We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
     never shrinks.  If a register appears in GLOBAL_LIVE_AT_START, it
     must either be live at the end of the block, or used within the
     block.  In the latter case, it will certainly never disappear
     from GLOBAL_LIVE_AT_START.  In the former case, the register
     could go away only if it disappeared from GLOBAL_LIVE_AT_START
     for one of the successor blocks.  By induction, that cannot
1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149
     occur.

     ??? This reasoning doesn't work if we start from non-empty initial
     GLOBAL_LIVE_AT_START sets.  And there are actually two problems:
       1) Updating may not terminate (endless oscillation).
       2) Even if it does (and it usually does), the resulting information
	  may be inaccurate.  Consider for example the following case:

	  a = ...;
	  while (...) {...}  -- 'a' not mentioned at all
	  ... = a;

	  If the use of 'a' is deleted between two calculations of liveness
	  information and the initial sets are not cleared, the information
	  about a's liveness will get stuck inside the loop and the set will
	  appear not to be dead.

     We do not attempt to solve 2) -- the information is conservatively
     correct (i.e. we never claim that something live is dead) and the
     amount of optimization opportunities missed due to this problem is
     not significant.

     1) is more serious.  In order to fix it, we monitor the number of times
     each block is processed.  Once one of the blocks has been processed more
     times than the maximum number of rounds, we use the following strategy:
     When a register disappears from one of the sets, we add it to a MAKE_DEAD
     set, remove all registers in this set from all GLOBAL_LIVE_AT_* sets and
     add the blocks with changed sets into the queue.  Thus we are guaranteed
     to terminate (the worst case corresponds to all registers in MADE_DEAD,
     in which case the original reasoning above is valid), but in general we
     only fix up a few offending registers.

     The maximum number of rounds for computing liveness is the largest of
     MAX_LIVENESS_ROUNDS and the latest loop depth count for this function.  */

1150
  while (qhead != qtail)
1151
    {
1152 1153
      int rescan, changed;
      basic_block bb;
1154
      edge e;
1155
      edge_iterator ei;
1156

1157 1158 1159 1160 1161
      bb = *qhead++;
      if (qhead == qend)
	qhead = queue;
      bb->aux = NULL;

1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172
      /* Should we start using the failure strategy?  */
      if (bb != ENTRY_BLOCK_PTR)
	{
	  int max_liveness_rounds =
	    MAX (MAX_LIVENESS_ROUNDS, cfun->max_loop_depth);

	  block_accesses[bb->index]++;
	  if (block_accesses[bb->index] > max_liveness_rounds)
	    failure_strategy_required = true;
	}

1173 1174
      /* Begin by propagating live_at_start from the successor blocks.  */
      CLEAR_REG_SET (new_live_at_end);
1175

1176 1177
      if (EDGE_COUNT (bb->succs) > 0)
	FOR_EACH_EDGE (e, ei, bb->succs)
1178 1179 1180 1181 1182 1183 1184 1185
	  {
	    basic_block sb = e->dest;

	    /* Call-clobbered registers die across exception and
	       call edges.  */
	    /* ??? Abnormal call edges ignored for the moment, as this gets
	       confused by sibling call edges, which crashes reg-stack.  */
	    if (e->flags & EDGE_EH)
1186 1187 1188
	      bitmap_ior_and_compl_into (new_live_at_end,
					 sb->global_live_at_start,
					 invalidated_by_call);
1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206
	    else
	      IOR_REG_SET (new_live_at_end, sb->global_live_at_start);

	    /* If a target saves one register in another (instead of on
	       the stack) the save register will need to be live for EH.  */
	    if (e->flags & EDGE_EH)
	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
		if (EH_USES (i))
		  SET_REGNO_REG_SET (new_live_at_end, i);
	  }
      else
	{
	  /* This might be a noreturn function that throws.  And
	     even if it isn't, getting the unwind info right helps
	     debugging.  */
	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
	    if (EH_USES (i))
	      SET_REGNO_REG_SET (new_live_at_end, i);
1207
	}
1208

1209 1210
      /* The all-important stack pointer must always be live.  */
      SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1211

1212 1213 1214 1215 1216 1217 1218 1219
      /* Before reload, there are a few registers that must be forced
	 live everywhere -- which might not already be the case for
	 blocks within infinite loops.  */
      if (! reload_completed)
	{
	  /* Any reference to any pseudo before reload is a potential
	     reference of the frame pointer.  */
	  SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
Kazu Hirata committed
1220

1221 1222 1223 1224 1225 1226
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
	  /* Pseudos with argument area equivalences may require
	     reloading via the argument pointer.  */
	  if (fixed_regs[ARG_POINTER_REGNUM])
	    SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
#endif
1227

1228 1229
	  /* Any constant, or pseudo with constant equivalences, may
	     require reloading from memory using the pic register.  */
1230
	  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1231 1232
	      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
	    SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1233 1234
	}

1235 1236 1237 1238 1239
      if (bb == ENTRY_BLOCK_PTR)
	{
	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
	  continue;
	}
1240

1241
      /* On our first pass through this block, we'll go ahead and continue.
1242 1243 1244
	 Recognize first pass by checking if local_set is NULL for this
         basic block.  On subsequent passes, we get to skip out early if
	 live_at_end wouldn't have changed.  */
1245

1246
      if (local_sets[bb->index - (INVALID_BLOCK + 1)] == NULL)
1247
	{
1248 1249 1250 1251
	  local_sets[bb->index - (INVALID_BLOCK + 1)]
	    = ALLOC_REG_SET (&reg_obstack);
	  cond_local_sets[bb->index - (INVALID_BLOCK + 1)]
	    = ALLOC_REG_SET (&reg_obstack);
1252 1253 1254 1255 1256 1257 1258 1259
	  rescan = 1;
	}
      else
	{
	  /* If any bits were removed from live_at_end, we'll have to
	     rescan the block.  This wouldn't be necessary if we had
	     precalculated local_live, however with PROP_SCAN_DEAD_CODE
	     local_live is really dependent on live_at_end.  */
1260 1261 1262 1263
	  rescan = bitmap_intersect_compl_p (bb->global_live_at_end,
					     new_live_at_end);

	  if (!rescan)
1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277
	    {
	      regset cond_local_set;

	       /* If any of the registers in the new live_at_end set are
		  conditionally set in this basic block, we must rescan.
		  This is because conditional lifetimes at the end of the
		  block do not just take the live_at_end set into
		  account, but also the liveness at the start of each
		  successor block.  We can miss changes in those sets if
		  we only compare the new live_at_end against the
		  previous one.  */
	      cond_local_set = cond_local_sets[bb->index - (INVALID_BLOCK + 1)];
	      rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
	    }
1278 1279

	  if (!rescan)
1280
	    {
1281 1282
	      regset local_set;

1283 1284
	      /* Find the set of changed bits.  Take this opportunity
		 to notice that this set is empty and early out.  */
1285 1286
	      bitmap_xor (tmp, bb->global_live_at_end, new_live_at_end);
	      if (bitmap_empty_p (tmp))
1287
		continue;
1288
  
1289
	      /* If any of the changed bits overlap with local_sets[bb],
1290
 		 we'll have to rescan the block.  */
1291 1292
	      local_set = local_sets[bb->index - (INVALID_BLOCK + 1)];
	      rescan = bitmap_intersect_p (tmp, local_set);
1293 1294
	    }
	}
1295

1296 1297 1298
      /* Let our caller know that BB changed enough to require its
	 death notes updated.  */
      if (blocks_out)
1299
	SET_BIT (blocks_out, bb->index);
1300

1301 1302 1303 1304
      if (! rescan)
	{
	  /* Add to live_at_start the set of all registers in
	     new_live_at_end that aren't in the old live_at_end.  */
1305 1306 1307 1308
	  
	  changed = bitmap_ior_and_compl_into (bb->global_live_at_start,
					       new_live_at_end,
					       bb->global_live_at_end);
1309 1310 1311
	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
	  if (! changed)
	    continue;
1312 1313 1314
	}
      else
	{
1315
	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1316

1317 1318
	  /* Rescan the block insn by insn to turn (a copy of) live_at_end
	     into live_at_start.  */
1319 1320 1321 1322
	  propagate_block (bb, new_live_at_end,
			   local_sets[bb->index - (INVALID_BLOCK + 1)],
			   cond_local_sets[bb->index - (INVALID_BLOCK + 1)],
			   flags);
1323

1324 1325 1326
	  /* If live_at start didn't change, no need to go farther.  */
	  if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
	    continue;
1327

1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383
	  if (failure_strategy_required)
	    {
	      /* Get the list of registers that were removed from the
	         bb->global_live_at_start set.  */
	      bitmap_and_compl (tmp, bb->global_live_at_start,
				new_live_at_end);
	      if (!bitmap_empty_p (tmp))
		{
		  bool pbb_changed;
		  basic_block pbb;
                
		  /* It should not happen that one of registers we have
		     removed last time is disappears again before any other
		     register does.  */
		  pbb_changed = bitmap_ior_into (registers_made_dead, tmp);
		  gcc_assert (pbb_changed);

		  /* Now remove the registers from all sets.  */
		  FOR_EACH_BB (pbb)
		    {
		      pbb_changed = false;

		      pbb_changed
			|= bitmap_and_compl_into (pbb->global_live_at_start,
						  registers_made_dead);
		      pbb_changed
			|= bitmap_and_compl_into (pbb->global_live_at_end,
						  registers_made_dead);
		      if (!pbb_changed)
			continue;

		      /* Note the (possible) change.  */
		      if (blocks_out)
			SET_BIT (blocks_out, pbb->index);

		      /* Makes sure to really rescan the block.  */
		      if (local_sets[pbb->index - (INVALID_BLOCK + 1)])
			{
			  FREE_REG_SET (local_sets[pbb->index - (INVALID_BLOCK + 1)]);
			  FREE_REG_SET (cond_local_sets[pbb->index - (INVALID_BLOCK + 1)]);
			  local_sets[pbb->index - (INVALID_BLOCK + 1)] = 0;
			}

		      /* Add it to the queue.  */
		      if (pbb->aux == NULL)
			{
			  *qtail++ = pbb;
			  if (qtail == qend)
			    qtail = queue;
			  pbb->aux = pbb;
			}
		    }
		  continue;
		}
	    } /* end of failure_strategy_required */

1384 1385
	  COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
	}
1386

1387 1388
      /* Queue all predecessors of BB so that we may re-examine
	 their live_at_end.  */
1389
      FOR_EACH_EDGE (e, ei, bb->preds)
1390 1391 1392 1393 1394 1395 1396 1397 1398 1399
	{
	  basic_block pb = e->src;
	  if (pb->aux == NULL)
	    {
	      *qtail++ = pb;
	      if (qtail == qend)
		qtail = queue;
	      pb->aux = pb;
	    }
	}
1400 1401
    }

1402 1403
  FREE_REG_SET (tmp);
  FREE_REG_SET (new_live_at_end);
1404
  FREE_REG_SET (invalidated_by_call);
1405
  FREE_REG_SET (registers_made_dead);
1406

1407 1408 1409 1410
  if (blocks_out)
    {
      EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
	{
1411
	  basic_block bb = BASIC_BLOCK (i);
1412 1413
	  FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
	  FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
1414
	});
1415 1416 1417
    }
  else
    {
1418
      FOR_EACH_BB (bb)
1419
	{
1420 1421
	  FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
	  FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
1422
	}
1423
    }
1424

1425
  free (block_accesses);
1426
  free (queue);
1427 1428
  free (cond_local_sets);
  free (local_sets);
1429
}
1430 1431


1432
/* This structure is used to pass parameters to and from the
1433 1434
   the function find_regno_partial(). It is used to pass in the
   register number we are looking, as well as to return any rtx
1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445
   we find.  */

typedef struct {
  unsigned regno_to_find;
  rtx retval;
} find_regno_partial_param;


/* Find the rtx for the reg numbers specified in 'data' if it is
   part of an expression which only uses part of the register.  Return
   it in the structure passed in.  */
1446
static int
1447
find_regno_partial (rtx *ptr, void *data)
1448 1449 1450 1451 1452 1453 1454 1455
{
  find_regno_partial_param *param = (find_regno_partial_param *)data;
  unsigned reg = param->regno_to_find;
  param->retval = NULL_RTX;

  if (*ptr == NULL_RTX)
    return 0;

1456
  switch (GET_CODE (*ptr))
1457
    {
1458 1459 1460
    case ZERO_EXTRACT:
    case SIGN_EXTRACT:
    case STRICT_LOW_PART:
1461
      if (REG_P (XEXP (*ptr, 0)) && REGNO (XEXP (*ptr, 0)) == reg)
1462 1463 1464 1465 1466
	{
	  param->retval = XEXP (*ptr, 0);
	  return 1;
	}
      break;
1467

1468
    case SUBREG:
1469
      if (REG_P (SUBREG_REG (*ptr))
1470 1471 1472 1473 1474 1475 1476 1477 1478
	  && REGNO (SUBREG_REG (*ptr)) == reg)
	{
	  param->retval = SUBREG_REG (*ptr);
	  return 1;
	}
      break;

    default:
      break;
1479 1480 1481 1482 1483 1484
    }

  return 0;
}

/* Process all immediate successors of the entry block looking for pseudo
1485 1486
   registers which are live on entry. Find all of those whose first
   instance is a partial register reference of some kind, and initialize
1487
   them to 0 after the entry block.  This will prevent bit sets within
1488
   registers whose value is unknown, and may contain some kind of sticky
1489 1490 1491
   bits we don't want.  */

int
1492
initialize_uninitialized_subregs (void)
1493 1494 1495
{
  rtx insn;
  edge e;
1496
  unsigned reg, did_something = 0;
1497
  find_regno_partial_param param;
1498
  edge_iterator ei;
1499

1500
  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1501 1502 1503
    {
      basic_block bb = e->dest;
      regset map = bb->global_live_at_start;
1504 1505 1506
      reg_set_iterator rsi;

      EXECUTE_IF_SET_IN_REG_SET (map, FIRST_PSEUDO_REGISTER, reg, rsi)
1507 1508 1509 1510 1511 1512
	{
	  int uid = REGNO_FIRST_UID (reg);
	  rtx i;

	  /* Find an insn which mentions the register we are looking for.
	     Its preferable to have an instance of the register's rtl since
1513
	     there may be various flags set which we need to duplicate.
1514
	     If we can't find it, its probably an automatic whose initial
1515
	     value doesn't matter, or hopefully something we don't care about.  */
1516 1517 1518 1519 1520 1521 1522 1523 1524
	  for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
	    ;
	  if (i != NULL_RTX)
	    {
	      /* Found the insn, now get the REG rtx, if we can.  */
	      param.regno_to_find = reg;
	      for_each_rtx (&i, find_regno_partial, &param);
	      if (param.retval != NULL_RTX)
		{
1525 1526 1527 1528 1529
		  start_sequence ();
		  emit_move_insn (param.retval,
				  CONST0_RTX (GET_MODE (param.retval)));
		  insn = get_insns ();
		  end_sequence ();
1530 1531 1532 1533
		  insert_insn_on_edge (insn, e);
		  did_something = 1;
		}
	    }
1534
	}
1535 1536 1537 1538 1539 1540 1541
    }

  if (did_something)
    commit_edge_insertions ();
  return did_something;
}

1542 1543

/* Subroutines of life analysis.  */
1544

1545
/* Allocate the permanent data structures that represent the results
1546
   of life analysis.  */
1547

1548
static void
1549
allocate_bb_life_data (void)
1550
{
1551
  basic_block bb;
Kazu Hirata committed
1552

1553
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1554
    {
1555 1556
      bb->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
      bb->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1557
    }
1558

1559
  regs_live_at_setjmp = ALLOC_REG_SET (&reg_obstack);
1560
}
1561

1562
void
1563
allocate_reg_life_data (void)
1564 1565 1566
{
  int i;

1567
  max_regno = max_reg_num ();
1568
  gcc_assert (!reg_deaths);
1569
  reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
1570

1571 1572 1573
  /* Recalculate the register space, in case it has grown.  Old style
     vector oriented regsets would set regset_{size,bytes} here also.  */
  allocate_reg_info (max_regno, FALSE, FALSE);
1574

1575 1576 1577
  /* Reset all the data we'll collect in propagate_block and its
     subroutines.  */
  for (i = 0; i < max_regno; i++)
1578
    {
1579 1580 1581 1582 1583
      REG_N_SETS (i) = 0;
      REG_N_REFS (i) = 0;
      REG_N_DEATHS (i) = 0;
      REG_N_CALLS_CROSSED (i) = 0;
      REG_LIVE_LENGTH (i) = 0;
1584
      REG_FREQ (i) = 0;
1585
      REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1586
    }
1587
}
1588

1589
/* Delete dead instructions for propagate_block.  */
1590

1591
static void
1592
propagate_block_delete_insn (rtx insn)
1593 1594
{
  rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1595

1596 1597 1598 1599
  /* If the insn referred to a label, and that label was attached to
     an ADDR_VEC, it's safe to delete the ADDR_VEC.  In fact, it's
     pretty much mandatory to delete it, because the ADDR_VEC may be
     referencing labels that no longer exist.
1600

1601 1602 1603
     INSN may reference a deleted label, particularly when a jump
     table has been optimized into a direct jump.  There's no
     real good way to fix up the reference to the deleted label
1604
     when the label is deleted, so we just allow it here.  */
1605

1606
  if (inote && LABEL_P (inote))
1607
    {
1608 1609
      rtx label = XEXP (inote, 0);
      rtx next;
1610

1611 1612 1613 1614 1615
      /* The label may be forced if it has been put in the constant
	 pool.  If that is the only use we must discard the table
	 jump following it, but not the label itself.  */
      if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
	  && (next = next_nonnote_insn (label)) != NULL
1616
	  && JUMP_P (next)
1617 1618
	  && (GET_CODE (PATTERN (next)) == ADDR_VEC
	      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1619
	{
1620 1621 1622 1623
	  rtx pat = PATTERN (next);
	  int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
	  int len = XVECLEN (pat, diff_vec_p);
	  int i;
1624

1625 1626
	  for (i = 0; i < len; i++)
	    LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1627

1628 1629
	  delete_insn_and_edges (next);
	  ndead++;
1630 1631 1632
	}
    }

1633 1634
  delete_insn_and_edges (insn);
  ndead++;
1635
}
1636

1637 1638
/* Delete dead libcalls for propagate_block.  Return the insn
   before the libcall.  */
1639

1640
static rtx
1641
propagate_block_delete_libcall (rtx insn, rtx note)
1642 1643 1644
{
  rtx first = XEXP (note, 0);
  rtx before = PREV_INSN (first);
1645

1646 1647
  delete_insn_chain_and_edges (first, insn);
  ndead++;
1648
  return before;
1649 1650
}

1651 1652 1653
/* Update the life-status of regs for one insn.  Return the previous insn.  */

rtx
1654
propagate_one_insn (struct propagate_block_info *pbi, rtx insn)
1655
{
1656 1657 1658 1659 1660
  rtx prev = PREV_INSN (insn);
  int flags = pbi->flags;
  int insn_is_dead = 0;
  int libcall_is_dead = 0;
  rtx note;
1661
  unsigned i;
1662

1663 1664
  if (! INSN_P (insn))
    return prev;
1665

1666 1667 1668 1669 1670 1671 1672
  note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
  if (flags & PROP_SCAN_DEAD_CODE)
    {
      insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
      libcall_is_dead = (insn_is_dead && note != 0
			 && libcall_dead_p (pbi, note, insn));
    }
1673

1674 1675 1676
  /* If an instruction consists of just dead store(s) on final pass,
     delete it.  */
  if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1677
    {
1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691
      /* If we're trying to delete a prologue or epilogue instruction
	 that isn't flagged as possibly being dead, something is wrong.
	 But if we are keeping the stack pointer depressed, we might well
	 be deleting insns that are used to compute the amount to update
	 it by, so they are fine.  */
      if (reload_completed
	  && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
		&& (TYPE_RETURNS_STACK_DEPRESSED
		    (TREE_TYPE (current_function_decl))))
	  && (((HAVE_epilogue || HAVE_prologue)
	       && prologue_epilogue_contains (insn))
	      || (HAVE_sibcall_epilogue
		  && sibcall_epilogue_contains (insn)))
	  && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1692
	fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1693

1694
      /* Record sets.  Do this even for dead instructions, since they
1695 1696 1697 1698
	 would have killed the values if they hadn't been deleted.  To
	 be consistent, we also have to emit a clobber when we delete
	 an insn that clobbers a live register.  */
      pbi->flags |= PROP_DEAD_INSN;
1699
      mark_set_regs (pbi, PATTERN (insn), insn);
1700
      pbi->flags &= ~PROP_DEAD_INSN;
1701

1702 1703 1704 1705
      /* CC0 is now known to be dead.  Either this insn used it,
	 in which case it doesn't anymore, or clobbered it,
	 so the next insn can't use it.  */
      pbi->cc0_live = 0;
1706

1707
      if (libcall_is_dead)
1708
	prev = propagate_block_delete_libcall (insn, note);
1709 1710 1711
      else
	{

1712 1713 1714 1715
	/* If INSN contains a RETVAL note and is dead, but the libcall
	   as a whole is not dead, then we want to remove INSN, but
	   not the whole libcall sequence.

1716
	   However, we need to also remove the dangling REG_LIBCALL
1717 1718
	   note so that we do not have mis-matched LIBCALL/RETVAL
	   notes.  In theory we could find a new location for the
1719
	   REG_RETVAL note, but it hardly seems worth the effort.
1720 1721

	   NOTE at this point will be the RETVAL note if it exists.  */
1722 1723 1724
	  if (note)
	    {
	      rtx libcall_note;
1725

1726 1727 1728 1729
	      libcall_note
		= find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
	      remove_note (XEXP (note, 0), libcall_note);
	    }
1730 1731

	  /* Similarly if INSN contains a LIBCALL note, remove the
1732
	     dangling REG_RETVAL note.  */
1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743
	  note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
	  if (note)
	    {
	      rtx retval_note;

	      retval_note
		= find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
	      remove_note (XEXP (note, 0), retval_note);
	    }

	  /* Now delete INSN.  */
1744 1745
	  propagate_block_delete_insn (insn);
	}
1746

1747 1748
      return prev;
    }
1749

1750 1751 1752 1753
  /* See if this is an increment or decrement that can be merged into
     a following memory address.  */
#ifdef AUTO_INC_DEC
  {
1754
    rtx x = single_set (insn);
1755

1756 1757 1758
    /* Does this instruction increment or decrement a register?  */
    if ((flags & PROP_AUTOINC)
	&& x != 0
1759
	&& REG_P (SET_DEST (x))
1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771
	&& (GET_CODE (SET_SRC (x)) == PLUS
	    || GET_CODE (SET_SRC (x)) == MINUS)
	&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
	&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
	/* Ok, look for a following memory ref we can combine with.
	   If one is found, change the memory ref to a PRE_INC
	   or PRE_DEC, cancel this insn, and return 1.
	   Return 0 if nothing has been done.  */
	&& try_pre_increment_1 (pbi, insn))
      return prev;
  }
#endif /* AUTO_INC_DEC */
1772

1773
  CLEAR_REG_SET (pbi->new_set);
1774

1775 1776 1777 1778
  /* If this is not the final pass, and this insn is copying the value of
     a library call and it's dead, don't scan the insns that perform the
     library call, so that the call's arguments are not marked live.  */
  if (libcall_is_dead)
1779
    {
1780 1781
      /* Record the death of the dest reg.  */
      mark_set_regs (pbi, PATTERN (insn), insn);
1782

1783 1784
      insn = XEXP (note, 0);
      return PREV_INSN (insn);
1785
    }
1786 1787 1788 1789 1790
  else if (GET_CODE (PATTERN (insn)) == SET
	   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
	   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
	   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
	   && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802
    {
      /* We have an insn to pop a constant amount off the stack.
         (Such insns use PLUS regardless of the direction of the stack,
         and any insn to adjust the stack by a constant is always a pop
	 or part of a push.)
         These insns, if not dead stores, have no effect on life, though
         they do have an effect on the memory stores we are tracking.  */
      invalidate_mems_from_set (pbi, stack_pointer_rtx);
      /* Still, we need to update local_set, lest ifcvt.c:dead_or_predicable
	 concludes that the stack pointer is not modified.  */
      mark_set_regs (pbi, PATTERN (insn), insn);
    }
1803 1804 1805 1806 1807
  else
    {
      /* Any regs live at the time of a call instruction must not go
	 in a register clobbered by calls.  Find all regs now live and
	 record this for them.  */
1808

1809
      if (CALL_P (insn) && (flags & PROP_REG_INFO))
1810 1811 1812 1813 1814
	{
	  reg_set_iterator rsi;
	  EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
	    REG_N_CALLS_CROSSED (i)++;
	}
1815

1816 1817 1818
      /* Record sets.  Do this even for dead instructions, since they
	 would have killed the values if they hadn't been deleted.  */
      mark_set_regs (pbi, PATTERN (insn), insn);
1819

1820
      if (CALL_P (insn))
1821
	{
1822 1823
	  regset live_at_end;
	  bool sibcall_p;
1824
	  rtx note, cond;
1825
	  int i;
1826

1827 1828 1829
	  cond = NULL_RTX;
	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
	    cond = COND_EXEC_TEST (PATTERN (insn));
1830

1831 1832 1833
	  /* Non-constant calls clobber memory, constant calls do not
	     clobber memory, though they may clobber outgoing arguments
	     on the stack.  */
1834 1835 1836 1837 1838
	  if (! CONST_OR_PURE_CALL_P (insn))
	    {
	      free_EXPR_LIST_list (&pbi->mem_set_list);
	      pbi->mem_set_list_len = 0;
	    }
Kazu Hirata committed
1839
	  else
1840
	    invalidate_mems_from_set (pbi, stack_pointer_rtx);
1841

1842 1843 1844 1845 1846 1847 1848
	  /* There may be extra registers to be clobbered.  */
	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
	       note;
	       note = XEXP (note, 1))
	    if (GET_CODE (XEXP (note, 0)) == CLOBBER)
	      mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
			  cond, insn, pbi->flags);
Kazu Hirata committed
1849

1850
	  /* Calls change all call-used and global registers; sibcalls do not
1851 1852
	     clobber anything that must be preserved at end-of-function,
	     except for return values.  */
1853 1854 1855

	  sibcall_p = SIBLING_CALL_P (insn);
	  live_at_end = EXIT_BLOCK_PTR->global_live_at_start;
1856
	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1857
	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
1858 1859
		&& ! (sibcall_p
		      && REGNO_REG_SET_P (live_at_end, i)
1860 1861 1862
		      && ! refers_to_regno_p (i, i+1,
					      current_function_return_rtx,
					      (rtx *) 0)))
1863
	      {
1864
		enum rtx_code code = global_regs[i] ? SET : CLOBBER;
1865
		/* We do not want REG_UNUSED notes for these registers.  */
1866
		mark_set_1 (pbi, code, regno_reg_rtx[i], cond, insn,
1867 1868 1869
			    pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
	      }
	}
1870

1871 1872 1873 1874
      /* If an insn doesn't use CC0, it becomes dead since we assume
	 that every insn clobbers it.  So show it dead here;
	 mark_used_regs will set it live if it is referenced.  */
      pbi->cc0_live = 0;
1875

1876 1877 1878
      /* Record uses.  */
      if (! insn_is_dead)
	mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1879

1880 1881 1882 1883 1884
      /* Sometimes we may have inserted something before INSN (such as a move)
	 when we make an auto-inc.  So ensure we will scan those insns.  */
#ifdef AUTO_INC_DEC
      prev = PREV_INSN (insn);
#endif
1885

1886
      if (! insn_is_dead && CALL_P (insn))
1887
	{
1888
	  int i;
1889
	  rtx note, cond;
1890

1891 1892 1893
	  cond = NULL_RTX;
	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
	    cond = COND_EXEC_TEST (PATTERN (insn));
1894

1895 1896
	  /* Calls use their arguments, and may clobber memory which
	     address involves some register.  */
1897 1898 1899
	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
	       note;
	       note = XEXP (note, 1))
1900 1901 1902
	    /* We find USE or CLOBBER entities in a FUNCTION_USAGE list: both
	       of which mark_used_regs knows how to handle.  */
	    mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), cond, insn);
1903

1904
	  /* The stack ptr is used (honorarily) by a CALL insn.  */
1905 1906 1907
	  if ((flags & PROP_REG_INFO)
	      && !REGNO_REG_SET_P (pbi->reg_live, STACK_POINTER_REGNUM))
	    reg_deaths[STACK_POINTER_REGNUM] = pbi->insn_num;
1908
	  SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1909

1910 1911 1912 1913
	  /* Calls may also reference any of the global registers,
	     so they are made live.  */
	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
	    if (global_regs[i])
1914
	      mark_used_reg (pbi, regno_reg_rtx[i], cond, insn);
1915
	}
1916 1917
    }

1918
  pbi->insn_num++;
1919 1920

  return prev;
1921 1922
}

1923 1924 1925
/* Initialize a propagate_block_info struct for public consumption.
   Note that the structure itself is opaque to this file, but that
   the user can use the regsets provided here.  */
1926

1927
struct propagate_block_info *
1928 1929
init_propagate_block_info (basic_block bb, regset live, regset local_set,
			   regset cond_local_set, int flags)
1930
{
1931
  struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1932

1933 1934 1935 1936 1937 1938 1939 1940
  pbi->bb = bb;
  pbi->reg_live = live;
  pbi->mem_set_list = NULL_RTX;
  pbi->mem_set_list_len = 0;
  pbi->local_set = local_set;
  pbi->cond_local_set = cond_local_set;
  pbi->cc0_live = 0;
  pbi->flags = flags;
1941
  pbi->insn_num = 0;
Kazu Hirata committed
1942

1943
  if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1944
    pbi->reg_next_use = xcalloc (max_reg_num (), sizeof (rtx));
1945
  else
1946
    pbi->reg_next_use = NULL;
Andrew MacLeod committed
1947

1948
  pbi->new_set = BITMAP_ALLOC (NULL);
1949

1950 1951 1952
#ifdef HAVE_conditional_execution
  pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
				       free_reg_cond_life_info);
1953
  pbi->reg_cond_reg = BITMAP_ALLOC (NULL);
1954

1955 1956 1957
  /* If this block ends in a conditional branch, for each register
     live from one side of the branch and not the other, record the
     register as conditionally dead.  */
1958
  if (JUMP_P (BB_END (bb))
1959
      && any_condjump_p (BB_END (bb)))
1960
    {
1961
      regset diff = ALLOC_REG_SET (&reg_obstack);
1962
      basic_block bb_true, bb_false;
1963
      unsigned i;
1964

1965
      /* Identify the successor blocks.  */
1966
      bb_true = EDGE_SUCC (bb, 0)->dest;
1967
      if (!single_succ_p (bb))
1968
	{
1969
	  bb_false = EDGE_SUCC (bb, 1)->dest;
Kazu Hirata committed
1970

1971
	  if (EDGE_SUCC (bb, 0)->flags & EDGE_FALLTHRU)
1972 1973 1974 1975 1976
	    {
	      basic_block t = bb_false;
	      bb_false = bb_true;
	      bb_true = t;
	    }
1977
	  else
1978
	    gcc_assert (EDGE_SUCC (bb, 1)->flags & EDGE_FALLTHRU);
1979 1980 1981 1982
	}
      else
	{
	  /* This can happen with a conditional jump to the next insn.  */
1983
	  gcc_assert (JUMP_LABEL (BB_END (bb)) == BB_HEAD (bb_true));
1984

1985 1986 1987
	  /* Simplest way to do nothing.  */
	  bb_false = bb_true;
	}
1988

1989
      /* Compute which register lead different lives in the successors.  */
1990 1991 1992 1993 1994
      bitmap_xor (diff, bb_true->global_live_at_start,
		  bb_false->global_live_at_start);
      
      if (!bitmap_empty_p (diff))
	  {
1995
	  /* Extract the condition from the branch.  */
1996
	  rtx set_src = SET_SRC (pc_set (BB_END (bb)));
1997
	  rtx cond_true = XEXP (set_src, 0);
1998
	  rtx reg = XEXP (cond_true, 0);
1999
 	  enum rtx_code inv_cond;
2000

2001 2002
	  if (GET_CODE (reg) == SUBREG)
	    reg = SUBREG_REG (reg);
2003

2004
	  /* We can only track conditional lifetimes if the condition is
2005 2006 2007 2008 2009 2010
 	     in the form of a reversible comparison of a register against
 	     zero.  If the condition is more complex than that, then it is
 	     safe not to record any information.  */
 	  inv_cond = reversed_comparison_code (cond_true, BB_END (bb));
 	  if (inv_cond != UNKNOWN
 	      && REG_P (reg)
2011 2012 2013
	      && XEXP (cond_true, 1) == const0_rtx)
	    {
	      rtx cond_false
2014
		= gen_rtx_fmt_ee (inv_cond,
2015 2016
				  GET_MODE (cond_true), XEXP (cond_true, 0),
				  XEXP (cond_true, 1));
2017 2018
	      reg_set_iterator rsi;

2019 2020 2021 2022 2023 2024
	      if (GET_CODE (XEXP (set_src, 1)) == PC)
		{
		  rtx t = cond_false;
		  cond_false = cond_true;
		  cond_true = t;
		}
2025

2026
	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
2027

2028
	      /* For each such register, mark it conditionally dead.  */
2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046
	      EXECUTE_IF_SET_IN_REG_SET (diff, 0, i, rsi)
		{
		  struct reg_cond_life_info *rcli;
		  rtx cond;

		  rcli = xmalloc (sizeof (*rcli));

		  if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
		    cond = cond_false;
		  else
		    cond = cond_true;
		  rcli->condition = cond;
		  rcli->stores = const0_rtx;
		  rcli->orig_condition = cond;

		  splay_tree_insert (pbi->reg_cond_dead, i,
				     (splay_tree_value) rcli);
		}
2047
	    }
2048 2049
	}

2050
      FREE_REG_SET (diff);
2051
    }
2052 2053 2054 2055 2056 2057 2058 2059 2060 2061
#endif

  /* If this block has no successors, any stores to the frame that aren't
     used later in the block are dead.  So make a pass over the block
     recording any such that are made and show them dead at the end.  We do
     a very conservative and simple job here.  */
  if (optimize
      && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
	    && (TYPE_RETURNS_STACK_DEPRESSED
		(TREE_TYPE (current_function_decl))))
2062
      && (flags & PROP_SCAN_DEAD_STORES)
2063
      && (EDGE_COUNT (bb->succs) == 0
2064 2065
	  || (single_succ_p (bb)
	      && single_succ (bb) == EXIT_BLOCK_PTR
2066
	      && ! current_function_calls_eh_return)))
2067
    {
2068
      rtx insn, set;
2069
      for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
2070
	if (NONJUMP_INSN_P (insn)
2071
	    && (set = single_set (insn))
2072
	    && MEM_P (SET_DEST (set)))
2073 2074 2075 2076 2077 2078 2079 2080 2081 2082
	  {
	    rtx mem = SET_DEST (set);
	    rtx canon_mem = canon_rtx (mem);

	    if (XEXP (canon_mem, 0) == frame_pointer_rtx
		|| (GET_CODE (XEXP (canon_mem, 0)) == PLUS
		    && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
		    && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
	      add_to_mem_set_list (pbi, canon_mem);
	  }
2083 2084
    }

2085
  return pbi;
2086 2087
}

2088
/* Release a propagate_block_info struct.  */
2089

2090
void
2091
free_propagate_block_info (struct propagate_block_info *pbi)
2092
{
2093
  free_EXPR_LIST_list (&pbi->mem_set_list);
2094

2095
  BITMAP_FREE (pbi->new_set);
2096

2097 2098
#ifdef HAVE_conditional_execution
  splay_tree_delete (pbi->reg_cond_dead);
2099
  BITMAP_FREE (pbi->reg_cond_reg);
2100
#endif
2101

2102 2103 2104
  if (pbi->flags & PROP_REG_INFO)
    {
      int num = pbi->insn_num;
2105
      unsigned i;
2106
      reg_set_iterator rsi;
2107

2108 2109 2110 2111 2112
      EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
	{
	  REG_LIVE_LENGTH (i) += num - reg_deaths[i];
	  reg_deaths[i] = 0;
	}
2113
    }
2114 2115
  if (pbi->reg_next_use)
    free (pbi->reg_next_use);
2116

2117 2118
  free (pbi);
}
2119

2120 2121
/* Compute the registers live at the beginning of a basic block BB from
   those live at the end.
Kazu Hirata committed
2122

2123 2124
   When called, REG_LIVE contains those live at the end.  On return, it
   contains those live at the beginning.
2125

2126 2127 2128 2129 2130 2131 2132 2133 2134
   LOCAL_SET, if non-null, will be set with all registers killed
   unconditionally by this basic block.
   Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
   killed conditionally by this basic block.  If there is any unconditional
   set of a register, then the corresponding bit will be set in LOCAL_SET
   and cleared in COND_LOCAL_SET.
   It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set.  In this
   case, the resulting set will be equal to the union of the two sets that
   would otherwise be computed.
2135

2136
   Return nonzero if an INSN is deleted (i.e. by dead code removal).  */
2137

2138
int
2139 2140
propagate_block (basic_block bb, regset live, regset local_set,
		 regset cond_local_set, int flags)
2141
{
2142 2143 2144
  struct propagate_block_info *pbi;
  rtx insn, prev;
  int changed;
2145

2146
  pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2147

2148
  if (flags & PROP_REG_INFO)
2149
    {
2150
      unsigned i;
2151
      reg_set_iterator rsi;
2152

2153 2154
      /* Process the regs live at the end of the block.
	 Mark them as not local to any one basic block.  */
2155 2156
      EXECUTE_IF_SET_IN_REG_SET (live, 0, i, rsi)
	REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2157
    }
2158

2159
  /* Scan the block an insn at a time from end to beginning.  */
2160

2161
  changed = 0;
2162
  for (insn = BB_END (bb); ; insn = prev)
2163 2164 2165 2166
    {
      /* If this is a call to `setjmp' et al, warn if any
	 non-volatile datum is live.  */
      if ((flags & PROP_REG_INFO)
2167
	  && CALL_P (insn)
2168 2169
	  && find_reg_note (insn, REG_SETJMP, NULL))
	IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2170

2171
      prev = propagate_one_insn (pbi, insn);
2172 2173 2174 2175
      if (!prev)
        changed |= insn != get_insns ();
      else
        changed |= NEXT_INSN (prev) != insn;
2176

2177
      if (insn == BB_HEAD (bb))
2178
	break;
2179 2180
    }

2181 2182 2183
  free_propagate_block_info (pbi);

  return changed;
2184
}
2185 2186 2187 2188 2189

/* Return 1 if X (the body of an insn, or part of it) is just dead stores
   (SET expressions whose destinations are registers dead after the insn).
   NEEDED is the regset that says which regs are alive after the insn.

2190
   Unless CALL_OK is nonzero, an insn is needed if it contains a CALL.
2191

2192 2193
   If X is the entire body of an insn, NOTES contains the reg notes
   pertaining to the insn.  */
2194 2195

static int
2196 2197
insn_dead_p (struct propagate_block_info *pbi, rtx x, int call_ok,
	     rtx notes ATTRIBUTE_UNUSED)
2198
{
2199
  enum rtx_code code = GET_CODE (x);
2200

2201 2202 2203 2204
  /* Don't eliminate insns that may trap.  */
  if (flag_non_call_exceptions && may_trap_p (x))
    return 0;

2205
#ifdef AUTO_INC_DEC
2206 2207 2208
  /* As flow is invoked after combine, we must take existing AUTO_INC
     expressions into account.  */
  for (; notes; notes = XEXP (notes, 1))
2209
    {
2210
      if (REG_NOTE_KIND (notes) == REG_INC)
2211
	{
2212
	  int regno = REGNO (XEXP (notes, 0));
2213

2214 2215 2216 2217
	  /* Don't delete insns to set global regs.  */
	  if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
	      || REGNO_REG_SET_P (pbi->reg_live, regno))
	    return 0;
2218
	}
2219
    }
2220
#endif
2221

2222 2223
  /* If setting something that's a reg or part of one,
     see if that register's altered value will be live.  */
2224

2225
  if (code == SET)
2226
    {
2227
      rtx r = SET_DEST (x);
2228

2229 2230 2231 2232
#ifdef HAVE_cc0
      if (GET_CODE (r) == CC0)
	return ! pbi->cc0_live;
#endif
2233

2234 2235 2236 2237 2238 2239
      /* A SET that is a subroutine call cannot be dead.  */
      if (GET_CODE (SET_SRC (x)) == CALL)
	{
	  if (! call_ok)
	    return 0;
	}
2240

2241 2242 2243
      /* Don't eliminate loads from volatile memory or volatile asms.  */
      else if (volatile_refs_p (SET_SRC (x)))
	return 0;
2244

2245
      if (MEM_P (r))
2246
	{
2247
	  rtx temp, canon_r;
2248

2249 2250
	  if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
	    return 0;
2251

2252
	  canon_r = canon_rtx (r);
2253

2254 2255 2256 2257 2258 2259 2260
	  /* Walk the set of memory locations we are currently tracking
	     and see if one is an identical match to this memory location.
	     If so, this memory write is dead (remember, we're walking
	     backwards from the end of the block to the start).  Since
	     rtx_equal_p does not check the alias set or flags, we also
	     must have the potential for them to conflict (anti_dependence).  */
	  for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2261
	    if (anti_dependence (r, XEXP (temp, 0)))
2262 2263
	      {
		rtx mem = XEXP (temp, 0);
2264

2265 2266 2267 2268
		if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
		    && (GET_MODE_SIZE (GET_MODE (canon_r))
			<= GET_MODE_SIZE (GET_MODE (mem))))
		  return 1;
2269

2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281
#ifdef AUTO_INC_DEC
		/* Check if memory reference matches an auto increment. Only
		   post increment/decrement or modify are valid.  */
		if (GET_MODE (mem) == GET_MODE (r)
		    && (GET_CODE (XEXP (mem, 0)) == POST_DEC
			|| GET_CODE (XEXP (mem, 0)) == POST_INC
			|| GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
		    && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
		    && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
		  return 1;
#endif
	      }
2282
	}
2283
      else
2284
	{
2285 2286 2287 2288
	  while (GET_CODE (r) == SUBREG
		 || GET_CODE (r) == STRICT_LOW_PART
		 || GET_CODE (r) == ZERO_EXTRACT)
	    r = XEXP (r, 0);
2289

2290
	  if (REG_P (r))
2291
	    {
2292
	      int regno = REGNO (r);
2293

2294 2295 2296
	      /* Obvious.  */
	      if (REGNO_REG_SET_P (pbi->reg_live, regno))
		return 0;
2297

2298 2299 2300 2301
	      /* If this is a hard register, verify that subsequent
		 words are not needed.  */
	      if (regno < FIRST_PSEUDO_REGISTER)
		{
2302
		  int n = hard_regno_nregs[regno][GET_MODE (r)];
2303

2304 2305 2306 2307
		  while (--n > 0)
		    if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
		      return 0;
		}
2308

2309 2310 2311
	      /* Don't delete insns to set global regs.  */
	      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
		return 0;
2312

2313 2314 2315
	      /* Make sure insns to set the stack pointer aren't deleted.  */
	      if (regno == STACK_POINTER_REGNUM)
		return 0;
2316

2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329
	      /* ??? These bits might be redundant with the force live bits
		 in calculate_global_regs_live.  We would delete from
		 sequential sets; whether this actually affects real code
		 for anything but the stack pointer I don't know.  */
	      /* Make sure insns to set the frame pointer aren't deleted.  */
	      if (regno == FRAME_POINTER_REGNUM
		  && (! reload_completed || frame_pointer_needed))
		return 0;
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
	      if (regno == HARD_FRAME_POINTER_REGNUM
		  && (! reload_completed || frame_pointer_needed))
		return 0;
#endif
2330

2331 2332 2333 2334 2335 2336 2337
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
	      /* Make sure insns to set arg pointer are never deleted
		 (if the arg pointer isn't fixed, there will be a USE
		 for it, so we can treat it normally).  */
	      if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
		return 0;
#endif
2338

2339 2340 2341 2342 2343
	      /* Otherwise, the set is dead.  */
	      return 1;
	    }
	}
    }
2344

2345 2346 2347 2348 2349 2350 2351
  /* If performing several activities, insn is dead if each activity
     is individually dead.  Also, CLOBBERs and USEs can be ignored; a
     CLOBBER or USE that's inside a PARALLEL doesn't make the insn
     worth keeping.  */
  else if (code == PARALLEL)
    {
      int i = XVECLEN (x, 0);
2352

2353 2354 2355 2356 2357
      for (i--; i >= 0; i--)
	if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
	    && GET_CODE (XVECEXP (x, 0, i)) != USE
	    && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
	  return 0;
2358

2359 2360
      return 1;
    }
2361

2362
  /* A CLOBBER of a pseudo-register that is dead serves no purpose.  That
2363 2364 2365
     is not necessarily true for hard registers until after reload.  */
  else if (code == CLOBBER)
    {
2366
      if (REG_P (XEXP (x, 0))
2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378
	  && (REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
	      || reload_completed)
	  && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
	return 1;
    }

  /* ??? A base USE is a historical relic.  It ought not be needed anymore.
     Instances where it is still used are either (1) temporary and the USE
     escaped the pass, (2) cruft and the USE need not be emitted anymore,
     or (3) hiding bugs elsewhere that are not properly representing data
     flow.  */

2379 2380
  return 0;
}
2381

2382 2383 2384 2385 2386 2387
/* If INSN is the last insn in a libcall, and assuming INSN is dead,
   return 1 if the entire library call is dead.
   This is true if INSN copies a register (hard or pseudo)
   and if the hard return reg of the call insn is dead.
   (The caller should have tested the destination of the SET inside
   INSN already for death.)
2388

2389 2390 2391 2392
   If this insn doesn't just copy a register, then we don't
   have an ordinary libcall.  In that case, cse could not have
   managed to substitute the source for the dest later on,
   so we can assume the libcall is dead.
2393

2394 2395
   PBI is the block info giving pseudoregs live before this insn.
   NOTE is the REG_RETVAL note of the insn.  */
2396

2397
static int
2398
libcall_dead_p (struct propagate_block_info *pbi, rtx note, rtx insn)
2399 2400
{
  rtx x = single_set (insn);
2401

2402 2403
  if (x)
    {
2404
      rtx r = SET_SRC (x);
2405

2406
      if (REG_P (r) || GET_CODE (r) == SUBREG)
2407 2408 2409
	{
	  rtx call = XEXP (note, 0);
	  rtx call_pat;
2410
	  int i;
2411

2412
	  /* Find the call insn.  */
2413
	  while (call != insn && !CALL_P (call))
2414
	    call = NEXT_INSN (call);
2415

2416 2417 2418 2419
	  /* If there is none, do nothing special,
	     since ordinary death handling can understand these insns.  */
	  if (call == insn)
	    return 0;
2420

2421 2422 2423 2424
	  /* See if the hard reg holding the value is dead.
	     If this is a PARALLEL, find the call within it.  */
	  call_pat = PATTERN (call);
	  if (GET_CODE (call_pat) == PARALLEL)
2425
	    {
2426 2427 2428 2429 2430 2431 2432 2433 2434 2435 2436 2437
	      for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
		if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
		    && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
		  break;

	      /* This may be a library call that is returning a value
		 via invisible pointer.  Do nothing special, since
		 ordinary death handling can understand these insns.  */
	      if (i < 0)
		return 0;

	      call_pat = XVECEXP (call_pat, 0, i);
2438 2439
	    }

2440 2441 2442 2443 2444 2445 2446 2447 2448 2449 2450
	  if (! insn_dead_p (pbi, call_pat, 1, REG_NOTES (call)))
	    return 0;

	  while ((insn = PREV_INSN (insn)) != call)
	    {
	      if (! INSN_P (insn))
		continue;
	      if (! insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn)))
		return 0;
	    }
	  return 1;
2451 2452
	}
    }
2453
  return 0;
2454
}
2455

2456 2457 2458
/* 1 if register REGNO was alive at a place where `setjmp' was called
   and was set more than once or is an argument.
   Such regs may be clobbered by `longjmp'.  */
2459

2460
int
2461
regno_clobbered_at_setjmp (int regno)
2462
{
2463
  if (n_basic_blocks == 0)
2464 2465 2466
    return 0;

  return ((REG_N_SETS (regno) > 1
2467
	   || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->global_live_at_end, regno))
2468 2469 2470 2471 2472 2473
	  && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
}

/* Add MEM to PBI->MEM_SET_LIST.  MEM should be canonical.  Respect the
   maximal list size; look for overlaps in mode and select the largest.  */
static void
2474
add_to_mem_set_list (struct propagate_block_info *pbi, rtx mem)
2475
{
2476 2477 2478 2479 2480 2481 2482 2483
  rtx i;

  /* We don't know how large a BLKmode store is, so we must not
     take them into consideration.  */
  if (GET_MODE (mem) == BLKmode)
    return;

  for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2484
    {
2485 2486
      rtx e = XEXP (i, 0);
      if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2487
	{
2488
	  if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2489
	    {
2490 2491 2492 2493 2494 2495 2496 2497
#ifdef AUTO_INC_DEC
	      /* If we must store a copy of the mem, we can just modify
		 the mode of the stored copy.  */
	      if (pbi->flags & PROP_AUTOINC)
	        PUT_MODE (e, GET_MODE (mem));
	      else
#endif
	        XEXP (i, 0) = mem;
2498
	    }
2499
	  return;
2500 2501
	}
    }
2502

2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513
  if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
    {
#ifdef AUTO_INC_DEC
      /* Store a copy of mem, otherwise the address may be
	 scrogged by find_auto_inc.  */
      if (pbi->flags & PROP_AUTOINC)
	mem = shallow_copy_rtx (mem);
#endif
      pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
      pbi->mem_set_list_len++;
    }
2514 2515
}

2516 2517 2518
/* INSN references memory, possibly using autoincrement addressing modes.
   Find any entries on the mem_set_list that need to be invalidated due
   to an address change.  */
2519

2520
static int
2521
invalidate_mems_from_autoinc (rtx *px, void *data)
2522
{
2523 2524 2525
  rtx x = *px;
  struct propagate_block_info *pbi = data;

2526
  if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
2527 2528 2529 2530 2531 2532
    {
      invalidate_mems_from_set (pbi, XEXP (x, 0));
      return -1;
    }

  return 0;
2533
}
2534

2535 2536
/* EXP is a REG or MEM.  Remove any dependent entries from
   pbi->mem_set_list.  */
2537

2538
static void
2539
invalidate_mems_from_set (struct propagate_block_info *pbi, rtx exp)
2540 2541 2542 2543
{
  rtx temp = pbi->mem_set_list;
  rtx prev = NULL_RTX;
  rtx next;
2544

2545
  while (temp)
2546
    {
2547
      next = XEXP (temp, 1);
2548 2549 2550 2551 2552 2553
      if ((REG_P (exp) && reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
	  /* When we get an EXP that is a mem here, we want to check if EXP
	     overlaps the *address* of any of the mems in the list (i.e. not
	     whether the mems actually overlap; that's done elsewhere).  */
	  || (MEM_P (exp)
	      && reg_overlap_mentioned_p (exp, XEXP (XEXP (temp, 0), 0))))
2554
	{
2555 2556 2557 2558 2559 2560 2561
	  /* Splice this entry out of the list.  */
	  if (prev)
	    XEXP (prev, 1) = next;
	  else
	    pbi->mem_set_list = next;
	  free_EXPR_LIST_node (temp);
	  pbi->mem_set_list_len--;
2562 2563
	}
      else
2564 2565
	prev = temp;
      temp = next;
2566
    }
2567
}
2568

2569 2570
/* Process the registers that are set within X.  Their bits are set to
   1 in the regset DEAD, because they are dead prior to this insn.
2571

2572
   If INSN is nonzero, it is the insn being processed.
2573

2574
   FLAGS is the set of operations to perform.  */
2575

2576
static void
2577
mark_set_regs (struct propagate_block_info *pbi, rtx x, rtx insn)
2578
{
2579 2580 2581
  rtx cond = NULL_RTX;
  rtx link;
  enum rtx_code code;
2582
  int flags = pbi->flags;
2583

2584 2585 2586 2587 2588 2589 2590
  if (insn)
    for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
      {
	if (REG_NOTE_KIND (link) == REG_INC)
	  mark_set_1 (pbi, SET, XEXP (link, 0),
		      (GET_CODE (x) == COND_EXEC
		       ? COND_EXEC_TEST (x) : NULL_RTX),
2591
		      insn, flags);
2592 2593 2594
      }
 retry:
  switch (code = GET_CODE (x))
2595
    {
2596
    case SET:
2597 2598
      if (GET_CODE (XEXP (x, 1)) == ASM_OPERANDS)
	flags |= PROP_ASM_SCAN;
2599
      /* Fall through */
2600
    case CLOBBER:
2601
      mark_set_1 (pbi, code, SET_DEST (x), cond, insn, flags);
2602
      return;
2603

2604 2605 2606 2607
    case COND_EXEC:
      cond = COND_EXEC_TEST (x);
      x = COND_EXEC_CODE (x);
      goto retry;
2608

2609 2610
    case PARALLEL:
      {
2611 2612
	int i;

2613 2614 2615
	/* We must scan forwards.  If we have an asm, we need to set
	   the PROP_ASM_SCAN flag before scanning the clobbers.  */
	for (i = 0; i < XVECLEN (x, 0); i++)
2616 2617 2618 2619 2620
	  {
	    rtx sub = XVECEXP (x, 0, i);
	    switch (code = GET_CODE (sub))
	      {
	      case COND_EXEC:
2621
		gcc_assert (!cond);
2622

2623 2624
		cond = COND_EXEC_TEST (sub);
		sub = COND_EXEC_CODE (sub);
2625 2626 2627 2628 2629
		if (GET_CODE (sub) == SET)
		  goto mark_set;
		if (GET_CODE (sub) == CLOBBER)
		  goto mark_clob;
		break;
2630

2631
	      case SET:
2632 2633 2634
	      mark_set:
		if (GET_CODE (XEXP (sub, 1)) == ASM_OPERANDS)
		  flags |= PROP_ASM_SCAN;
2635
		/* Fall through */
2636
	      case CLOBBER:
2637 2638
	      mark_clob:
		mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, flags);
2639
		break;
2640

2641 2642 2643 2644
	      case ASM_OPERANDS:
		flags |= PROP_ASM_SCAN;
		break;

2645 2646 2647 2648 2649 2650
	      default:
		break;
	      }
	  }
	break;
      }
2651

2652 2653
    default:
      break;
2654 2655 2656
    }
}

2657 2658 2659 2660 2661
/* Process a single set, which appears in INSN.  REG (which may not
   actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
   being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
   If the set is conditional (because it appear in a COND_EXEC), COND
   will be the condition.  */
2662

2663
static void
2664
mark_set_1 (struct propagate_block_info *pbi, enum rtx_code code, rtx reg, rtx cond, rtx insn, int flags)
2665
{
2666 2667
  int regno_first = -1, regno_last = -1;
  unsigned long not_dead = 0;
2668 2669
  int i;

2670 2671 2672
  /* Modifying just one hardware register of a multi-reg value or just a
     byte field of a register does not mean the value from before this insn
     is now dead.  Of course, if it was dead after it's unused now.  */
2673

2674
  switch (GET_CODE (reg))
2675
    {
2676 2677 2678 2679 2680 2681 2682 2683 2684 2685 2686
    case PARALLEL:
      /* Some targets place small structures in registers for return values of
	 functions.  We have to detect this case specially here to get correct
	 flow information.  */
      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
	if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
	  mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
		      flags);
      return;

    case SIGN_EXTRACT:
2687 2688 2689 2690
      /* SIGN_EXTRACT cannot be an lvalue.  */
      gcc_unreachable ();

    case ZERO_EXTRACT:
2691 2692 2693 2694 2695 2696 2697
    case STRICT_LOW_PART:
      /* ??? Assumes STRICT_LOW_PART not used on multi-word registers.  */
      do
	reg = XEXP (reg, 0);
      while (GET_CODE (reg) == SUBREG
	     || GET_CODE (reg) == ZERO_EXTRACT
	     || GET_CODE (reg) == STRICT_LOW_PART);
2698
      if (MEM_P (reg))
2699 2700 2701
	break;
      not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
      /* Fall through.  */
2702

2703 2704 2705
    case REG:
      regno_last = regno_first = REGNO (reg);
      if (regno_first < FIRST_PSEUDO_REGISTER)
2706
	regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
2707
      break;
2708

2709
    case SUBREG:
2710
      if (REG_P (SUBREG_REG (reg)))
2711
	{
2712 2713
	  enum machine_mode outer_mode = GET_MODE (reg);
	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2714

2715 2716
	  /* Identify the range of registers affected.  This is moderately
	     tricky for hard registers.  See alter_subreg.  */
2717

2718 2719
	  regno_last = regno_first = REGNO (SUBREG_REG (reg));
	  if (regno_first < FIRST_PSEUDO_REGISTER)
2720
	    {
2721 2722 2723 2724
	      regno_first += subreg_regno_offset (regno_first, inner_mode,
						  SUBREG_BYTE (reg),
						  outer_mode);
	      regno_last = (regno_first
2725
			    + hard_regno_nregs[regno_first][outer_mode] - 1);
2726

2727 2728 2729 2730 2731
	      /* Since we've just adjusted the register number ranges, make
		 sure REG matches.  Otherwise some_was_live will be clear
		 when it shouldn't have been, and we'll create incorrect
		 REG_UNUSED notes.  */
	      reg = gen_rtx_REG (outer_mode, regno_first);
2732
	    }
2733
	  else
2734
	    {
2735 2736 2737
	      /* If the number of words in the subreg is less than the number
		 of words in the full register, we have a well-defined partial
		 set.  Otherwise the high bits are undefined.
2738

2739 2740 2741 2742 2743 2744 2745 2746
		 This is only really applicable to pseudos, since we just took
		 care of multi-word hard registers.  */
	      if (((GET_MODE_SIZE (outer_mode)
		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
		  < ((GET_MODE_SIZE (inner_mode)
		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
		not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
							    regno_first);
2747

2748
	      reg = SUBREG_REG (reg);
2749 2750
	    }
	}
2751 2752 2753
      else
	reg = SUBREG_REG (reg);
      break;
2754

2755 2756
    default:
      break;
2757 2758
    }

2759 2760
  /* If this set is a MEM, then it kills any aliased writes and any
     other MEMs which use it.
2761
     If this set is a REG, then it kills any MEMs which use the reg.  */
2762
  if (optimize && (flags & PROP_SCAN_DEAD_STORES))
2763
    {
2764
      if (REG_P (reg) || MEM_P (reg))
2765
	invalidate_mems_from_set (pbi, reg);
2766

2767
      /* If the memory reference had embedded side effects (autoincrement
2768
	 address modes) then we may need to kill some entries on the
2769
	 memory set list.  */
2770
      if (insn && MEM_P (reg))
2771
	for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
2772

2773
      if (MEM_P (reg) && ! side_effects_p (reg)
2774
	  /* ??? With more effort we could track conditional memory life.  */
2775
	  && ! cond)
Kazu Hirata committed
2776
	add_to_mem_set_list (pbi, canon_rtx (reg));
2777
    }
2778

2779
  if (REG_P (reg)
2780 2781 2782 2783 2784 2785 2786 2787 2788 2789
      && ! (regno_first == FRAME_POINTER_REGNUM
	    && (! reload_completed || frame_pointer_needed))
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
      && ! (regno_first == HARD_FRAME_POINTER_REGNUM
	    && (! reload_completed || frame_pointer_needed))
#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
      && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
#endif
      )
2790
    {
2791
      int some_was_live = 0, some_was_dead = 0;
2792

2793
      for (i = regno_first; i <= regno_last; ++i)
2794
	{
2795 2796
	  int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
	  if (pbi->local_set)
2797
	    {
2798 2799 2800 2801 2802 2803 2804 2805
	      /* Order of the set operation matters here since both
		 sets may be the same.  */
	      CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
	      if (cond != NULL_RTX
		  && ! REGNO_REG_SET_P (pbi->local_set, i))
		SET_REGNO_REG_SET (pbi->cond_local_set, i);
	      else
		SET_REGNO_REG_SET (pbi->local_set, i);
2806
	    }
2807 2808
	  if (code != CLOBBER)
	    SET_REGNO_REG_SET (pbi->new_set, i);
2809

2810 2811
	  some_was_live |= needed_regno;
	  some_was_dead |= ! needed_regno;
2812
	}
2813 2814 2815 2816 2817 2818 2819 2820 2821 2822

#ifdef HAVE_conditional_execution
      /* Consider conditional death in deciding that the register needs
	 a death note.  */
      if (some_was_live && ! not_dead
	  /* The stack pointer is never dead.  Well, not strictly true,
	     but it's very difficult to tell from here.  Hopefully
	     combine_stack_adjustments will fix up the most egregious
	     errors.  */
	  && regno_first != STACK_POINTER_REGNUM)
2823
	{
2824 2825 2826
	  for (i = regno_first; i <= regno_last; ++i)
	    if (! mark_regno_cond_dead (pbi, i, cond))
	      not_dead |= ((unsigned long) 1) << (i - regno_first);
2827
	}
2828
#endif
2829

2830 2831 2832
      /* Additional data to record if this is the final pass.  */
      if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
		   | PROP_DEATH_NOTES | PROP_AUTOINC))
2833
	{
2834
	  rtx y;
2835
	  int blocknum = pbi->bb->index;
2836 2837 2838

	  y = NULL_RTX;
	  if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2839
	    {
2840
	      y = pbi->reg_next_use[regno_first];
2841

2842 2843 2844 2845
	      /* The next use is no longer next, since a store intervenes.  */
	      for (i = regno_first; i <= regno_last; ++i)
		pbi->reg_next_use[i] = 0;
	    }
2846

2847
	  if (flags & PROP_REG_INFO)
2848
	    {
2849 2850 2851 2852 2853 2854 2855 2856 2857 2858 2859 2860 2861 2862 2863 2864 2865
	      for (i = regno_first; i <= regno_last; ++i)
		{
		  /* Count (weighted) references, stores, etc.  This counts a
		     register twice if it is modified, but that is correct.  */
		  REG_N_SETS (i) += 1;
		  REG_N_REFS (i) += 1;
		  REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);

	          /* The insns where a reg is live are normally counted
		     elsewhere, but we want the count to include the insn
		     where the reg is set, and the normal counting mechanism
		     would not count it.  */
		  REG_LIVE_LENGTH (i) += 1;
		}

	      /* If this is a hard reg, record this function uses the reg.  */
	      if (regno_first < FIRST_PSEUDO_REGISTER)
2866
		{
2867 2868
		  for (i = regno_first; i <= regno_last; i++)
		    regs_ever_live[i] = 1;
2869 2870 2871
		  if (flags & PROP_ASM_SCAN)
		    for (i = regno_first; i <= regno_last; i++)
		      regs_asm_clobbered[i] = 1;
2872 2873 2874
		}
	      else
		{
2875 2876 2877 2878 2879
		  /* Keep track of which basic blocks each reg appears in.  */
		  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
		    REG_BASIC_BLOCK (regno_first) = blocknum;
		  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
		    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2880
		}
2881
	    }
2882

2883
	  if (! some_was_dead)
2884
	    {
2885 2886 2887 2888 2889 2890 2891 2892 2893 2894
	      if (flags & PROP_LOG_LINKS)
		{
		  /* Make a logical link from the next following insn
		     that uses this register, back to this insn.
		     The following insns have already been processed.

		     We don't build a LOG_LINK for hard registers containing
		     in ASM_OPERANDs.  If these registers get replaced,
		     we might wind up changing the semantics of the insn,
		     even if reload can make what appear to be valid
2895 2896 2897 2898 2899 2900
		     assignments later.

		     We don't build a LOG_LINK for global registers to
		     or from a function call.  We don't want to let
		     combine think that it knows what is going on with
		     global registers.  */
2901 2902
		  if (y && (BLOCK_NUM (y) == blocknum)
		      && (regno_first >= FIRST_PSEUDO_REGISTER
2903
			  || (asm_noperands (PATTERN (y)) < 0
2904 2905
			      && ! ((CALL_P (insn)
				     || CALL_P (y))
2906
				    && global_regs[regno_first]))))
2907 2908
		    LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
		}
2909
	    }
2910 2911 2912 2913 2914 2915
	  else if (not_dead)
	    ;
	  else if (! some_was_live)
	    {
	      if (flags & PROP_REG_INFO)
		REG_N_DEATHS (regno_first) += 1;
2916

2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928
	      if (flags & PROP_DEATH_NOTES)
		{
		  /* Note that dead stores have already been deleted
		     when possible.  If we get here, we have found a
		     dead store that cannot be eliminated (because the
		     same insn does something useful).  Indicate this
		     by marking the reg being set as dying here.  */
		  REG_NOTES (insn)
		    = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
		}
	    }
	  else
2929
	    {
2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941
	      if (flags & PROP_DEATH_NOTES)
		{
		  /* This is a case where we have a multi-word hard register
		     and some, but not all, of the words of the register are
		     needed in subsequent insns.  Write REG_UNUSED notes
		     for those parts that were not needed.  This case should
		     be rare.  */

		  for (i = regno_first; i <= regno_last; ++i)
		    if (! REGNO_REG_SET_P (pbi->reg_live, i))
		      REG_NOTES (insn)
			= alloc_EXPR_LIST (REG_UNUSED,
2942
					   regno_reg_rtx[i],
2943 2944
					   REG_NOTES (insn));
		}
2945 2946
	    }
	}
2947 2948 2949 2950 2951 2952 2953 2954

      /* Mark the register as being dead.  */
      if (some_was_live
	  /* The stack pointer is never dead.  Well, not strictly true,
	     but it's very difficult to tell from here.  Hopefully
	     combine_stack_adjustments will fix up the most egregious
	     errors.  */
	  && regno_first != STACK_POINTER_REGNUM)
2955
	{
2956 2957
	  for (i = regno_first; i <= regno_last; ++i)
	    if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2958 2959 2960 2961 2962 2963 2964 2965 2966
	      {
		if ((pbi->flags & PROP_REG_INFO)
		    && REGNO_REG_SET_P (pbi->reg_live, i))
		  {
		    REG_LIVE_LENGTH (i) += pbi->insn_num - reg_deaths[i];
		    reg_deaths[i] = 0;
		  }
		CLEAR_REGNO_REG_SET (pbi->reg_live, i);
	      }
2967 2968
	  if (flags & PROP_DEAD_INSN)
	    emit_insn_after (gen_rtx_CLOBBER (VOIDmode, reg), insn);
2969
	}
2970
    }
2971
  else if (REG_P (reg))
2972 2973 2974
    {
      if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
	pbi->reg_next_use[regno_first] = 0;
2975 2976 2977 2978 2979 2980 2981 2982

      if ((flags & PROP_REG_INFO) != 0
	  && (flags & PROP_ASM_SCAN) != 0
	  &&  regno_first < FIRST_PSEUDO_REGISTER)
	{
	  for (i = regno_first; i <= regno_last; i++)
	    regs_asm_clobbered[i] = 1;
	}
2983 2984 2985 2986 2987 2988 2989 2990 2991 2992 2993 2994 2995 2996 2997 2998 2999
    }

  /* If this is the last pass and this is a SCRATCH, show it will be dying
     here and count it.  */
  else if (GET_CODE (reg) == SCRATCH)
    {
      if (flags & PROP_DEATH_NOTES)
	REG_NOTES (insn)
	  = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
    }
}

#ifdef HAVE_conditional_execution
/* Mark REGNO conditionally dead.
   Return true if the register is now unconditionally dead.  */

static int
3000
mark_regno_cond_dead (struct propagate_block_info *pbi, int regno, rtx cond)
3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021
{
  /* If this is a store to a predicate register, the value of the
     predicate is changing, we don't know that the predicate as seen
     before is the same as that seen after.  Flush all dependent
     conditions from reg_cond_dead.  This will make all such
     conditionally live registers unconditionally live.  */
  if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
    flush_reg_cond_reg (pbi, regno);

  /* If this is an unconditional store, remove any conditional
     life that may have existed.  */
  if (cond == NULL_RTX)
    splay_tree_remove (pbi->reg_cond_dead, regno);
  else
    {
      splay_tree_node node;
      struct reg_cond_life_info *rcli;
      rtx ncond;

      /* Otherwise this is a conditional set.  Record that fact.
	 It may have been conditionally used, or there may be a
3022
	 subsequent set with a complementary condition.  */
3023

3024 3025
      node = splay_tree_lookup (pbi->reg_cond_dead, regno);
      if (node == NULL)
3026
	{
3027 3028 3029
	  /* The register was unconditionally live previously.
	     Record the current condition as the condition under
	     which it is dead.  */
3030
	  rcli = xmalloc (sizeof (*rcli));
3031 3032 3033 3034 3035 3036 3037 3038
	  rcli->condition = cond;
	  rcli->stores = cond;
	  rcli->orig_condition = const0_rtx;
	  splay_tree_insert (pbi->reg_cond_dead, regno,
			     (splay_tree_value) rcli);

	  SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));

3039
	  /* Not unconditionally dead.  */
3040
	  return 0;
3041 3042 3043
	}
      else
	{
3044 3045 3046 3047 3048 3049 3050 3051 3052
	  /* The register was conditionally live previously.
	     Add the new condition to the old.  */
	  rcli = (struct reg_cond_life_info *) node->value;
	  ncond = rcli->condition;
	  ncond = ior_reg_cond (ncond, cond, 1);
	  if (rcli->stores == const0_rtx)
	    rcli->stores = cond;
	  else if (rcli->stores != const1_rtx)
	    rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
3053

3054 3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067
	  /* If the register is now unconditionally dead, remove the entry
	     in the splay_tree.  A register is unconditionally dead if the
	     dead condition ncond is true.  A register is also unconditionally
	     dead if the sum of all conditional stores is an unconditional
	     store (stores is true), and the dead condition is identically the
	     same as the original dead condition initialized at the end of
	     the block.  This is a pointer compare, not an rtx_equal_p
	     compare.  */
	  if (ncond == const1_rtx
	      || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
	    splay_tree_remove (pbi->reg_cond_dead, regno);
	  else
	    {
	      rcli->condition = ncond;
3068

3069
	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3070

3071
	      /* Not unconditionally dead.  */
3072
	      return 0;
3073 3074 3075 3076
	    }
	}
    }

3077 3078
  return 1;
}
3079

3080
/* Called from splay_tree_delete for pbi->reg_cond_life.  */
3081

3082
static void
3083
free_reg_cond_life_info (splay_tree_value value)
3084 3085 3086 3087
{
  struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
  free (rcli);
}
3088

3089
/* Helper function for flush_reg_cond_reg.  */
3090

3091
static int
3092
flush_reg_cond_reg_1 (splay_tree_node node, void *data)
3093 3094 3095 3096
{
  struct reg_cond_life_info *rcli;
  int *xdata = (int *) data;
  unsigned int regno = xdata[0];
3097

3098 3099 3100 3101
  /* Don't need to search if last flushed value was farther on in
     the in-order traversal.  */
  if (xdata[1] >= (int) node->key)
    return 0;
3102

3103 3104 3105 3106 3107
  /* Splice out portions of the expression that refer to regno.  */
  rcli = (struct reg_cond_life_info *) node->value;
  rcli->condition = elim_reg_cond (rcli->condition, regno);
  if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
    rcli->stores = elim_reg_cond (rcli->stores, regno);
3108

3109 3110 3111 3112 3113
  /* If the entire condition is now false, signal the node to be removed.  */
  if (rcli->condition == const0_rtx)
    {
      xdata[1] = node->key;
      return -1;
3114
    }
3115 3116
  else
    gcc_assert (rcli->condition != const1_rtx);
3117

3118
  return 0;
3119
}
3120

3121
/* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE.  */
3122

3123
static void
3124
flush_reg_cond_reg (struct propagate_block_info *pbi, int regno)
3125 3126
{
  int pair[2];
3127

3128 3129 3130 3131 3132
  pair[0] = regno;
  pair[1] = -1;
  while (splay_tree_foreach (pbi->reg_cond_dead,
			     flush_reg_cond_reg_1, pair) == -1)
    splay_tree_remove (pbi->reg_cond_dead, pair[1]);
3133

3134 3135
  CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
}
3136

3137 3138 3139 3140
/* Logical arithmetic on predicate conditions.  IOR, NOT and AND.
   For ior/and, the ADD flag determines whether we want to add the new
   condition X to the old one unconditionally.  If it is zero, we will
   only return a new expression if X allows us to simplify part of
3141
   OLD, otherwise we return NULL to the caller.
3142 3143 3144
   If ADD is nonzero, we will return a new condition in all cases.  The
   toplevel caller of one of these functions should always pass 1 for
   ADD.  */
3145

3146
static rtx
3147
ior_reg_cond (rtx old, rtx x, int add)
3148 3149
{
  rtx op0, op1;
3150

3151
  if (COMPARISON_P (old))
3152
    {
3153
      if (COMPARISON_P (x)
3154
	  && REVERSE_CONDEXEC_PREDICATES_P (x, old)
3155 3156 3157 3158 3159 3160
	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
	return const1_rtx;
      if (GET_CODE (x) == GET_CODE (old)
	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
	return old;
      if (! add)
3161
	return NULL;
3162
      return gen_rtx_IOR (0, old, x);
3163
    }
Kazu Hirata committed
3164

3165
  switch (GET_CODE (old))
3166
    {
3167 3168 3169
    case IOR:
      op0 = ior_reg_cond (XEXP (old, 0), x, 0);
      op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3170
      if (op0 != NULL || op1 != NULL)
3171 3172
	{
	  if (op0 == const0_rtx)
3173
	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3174
	  if (op1 == const0_rtx)
3175
	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3176 3177
	  if (op0 == const1_rtx || op1 == const1_rtx)
	    return const1_rtx;
3178 3179 3180 3181 3182 3183 3184 3185 3186 3187
	  if (op0 == NULL)
	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
	  else if (rtx_equal_p (x, op0))
	    /* (x | A) | x ~ (x | A).  */
	    return old;
	  if (op1 == NULL)
	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
	  else if (rtx_equal_p (x, op1))
	    /* (A | x) | x ~ (A | x).  */
	    return old;
3188 3189 3190
	  return gen_rtx_IOR (0, op0, op1);
	}
      if (! add)
3191
	return NULL;
3192
      return gen_rtx_IOR (0, old, x);
3193

3194 3195 3196
    case AND:
      op0 = ior_reg_cond (XEXP (old, 0), x, 0);
      op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3197
      if (op0 != NULL || op1 != NULL)
3198
	{
3199
	  if (op0 == const1_rtx)
3200
	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3201
	  if (op1 == const1_rtx)
3202
	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3203 3204
	  if (op0 == const0_rtx || op1 == const0_rtx)
	    return const0_rtx;
3205 3206 3207 3208 3209 3210 3211 3212 3213 3214
	  if (op0 == NULL)
	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
	  else if (rtx_equal_p (x, op0))
	    /* (x & A) | x ~ x.  */
	    return op0;
	  if (op1 == NULL)
	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
	  else if (rtx_equal_p (x, op1))
	    /* (A & x) | x ~ x.  */
	    return op1;
3215
	  return gen_rtx_AND (0, op0, op1);
3216
	}
3217
      if (! add)
3218
	return NULL;
3219
      return gen_rtx_IOR (0, old, x);
3220

3221 3222
    case NOT:
      op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3223
      if (op0 != NULL)
3224 3225
	return not_reg_cond (op0);
      if (! add)
3226
	return NULL;
3227
      return gen_rtx_IOR (0, old, x);
Kazu Hirata committed
3228

3229
    default:
3230
      gcc_unreachable ();
3231 3232 3233
    }
}

3234
static rtx
3235
not_reg_cond (rtx x)
3236
{
3237 3238 3239 3240
  if (x == const0_rtx)
    return const1_rtx;
  else if (x == const1_rtx)
    return const0_rtx;
3241
  if (GET_CODE (x) == NOT)
3242
    return XEXP (x, 0);
3243
  if (COMPARISON_P (x)
3244
      && REG_P (XEXP (x, 0)))
3245
    {
3246
      gcc_assert (XEXP (x, 1) == const0_rtx);
3247

3248
      return gen_rtx_fmt_ee (reversed_comparison_code (x, NULL),
3249
			     VOIDmode, XEXP (x, 0), const0_rtx);
3250
    }
3251
  return gen_rtx_NOT (0, x);
3252 3253
}

3254
static rtx
3255
and_reg_cond (rtx old, rtx x, int add)
3256
{
3257
  rtx op0, op1;
3258

3259
  if (COMPARISON_P (old))
3260
    {
3261
      if (COMPARISON_P (x)
3262
	  && GET_CODE (x) == reversed_comparison_code (old, NULL)
3263 3264 3265 3266 3267 3268
	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
	return const0_rtx;
      if (GET_CODE (x) == GET_CODE (old)
	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
	return old;
      if (! add)
3269
	return NULL;
3270 3271
      return gen_rtx_AND (0, old, x);
    }
3272

3273 3274 3275 3276 3277
  switch (GET_CODE (old))
    {
    case IOR:
      op0 = and_reg_cond (XEXP (old, 0), x, 0);
      op1 = and_reg_cond (XEXP (old, 1), x, 0);
3278
      if (op0 != NULL || op1 != NULL)
3279
	{
3280
	  if (op0 == const0_rtx)
3281
	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3282
	  if (op1 == const0_rtx)
3283
	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3284 3285
	  if (op0 == const1_rtx || op1 == const1_rtx)
	    return const1_rtx;
3286 3287 3288 3289 3290 3291 3292 3293 3294 3295
	  if (op0 == NULL)
	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
	  else if (rtx_equal_p (x, op0))
	    /* (x | A) & x ~ x.  */
	    return op0;
	  if (op1 == NULL)
	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
	  else if (rtx_equal_p (x, op1))
	    /* (A | x) & x ~ x.  */
	    return op1;
3296
	  return gen_rtx_IOR (0, op0, op1);
3297
	}
3298
      if (! add)
3299
	return NULL;
3300 3301 3302 3303 3304
      return gen_rtx_AND (0, old, x);

    case AND:
      op0 = and_reg_cond (XEXP (old, 0), x, 0);
      op1 = and_reg_cond (XEXP (old, 1), x, 0);
3305
      if (op0 != NULL || op1 != NULL)
3306
	{
3307
	  if (op0 == const1_rtx)
3308
	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3309
	  if (op1 == const1_rtx)
3310
	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3311 3312
	  if (op0 == const0_rtx || op1 == const0_rtx)
	    return const0_rtx;
3313 3314 3315 3316 3317 3318 3319 3320 3321 3322
	  if (op0 == NULL)
	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
	  else if (rtx_equal_p (x, op0))
	    /* (x & A) & x ~ (x & A).  */
	    return old;
	  if (op1 == NULL)
	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
	  else if (rtx_equal_p (x, op1))
	    /* (A & x) & x ~ (A & x).  */
	    return old;
3323
	  return gen_rtx_AND (0, op0, op1);
3324
	}
3325
      if (! add)
3326
	return NULL;
3327
      return gen_rtx_AND (0, old, x);
3328

3329 3330
    case NOT:
      op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3331
      if (op0 != NULL)
3332 3333
	return not_reg_cond (op0);
      if (! add)
3334
	return NULL;
3335
      return gen_rtx_AND (0, old, x);
3336

3337
    default:
3338
      gcc_unreachable ();
Kazu Hirata committed
3339
    }
3340 3341
}

3342 3343 3344 3345
/* Given a condition X, remove references to reg REGNO and return the
   new condition.  The removal will be done so that all conditions
   involving REGNO are considered to evaluate to false.  This function
   is used when the value of REGNO changes.  */
Kazu Hirata committed
3346

3347
static rtx
3348
elim_reg_cond (rtx x, unsigned int regno)
3349
{
3350 3351
  rtx op0, op1;

3352
  if (COMPARISON_P (x))
3353
    {
3354 3355 3356
      if (REGNO (XEXP (x, 0)) == regno)
	return const0_rtx;
      return x;
3357
    }
Kazu Hirata committed
3358

3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372
  switch (GET_CODE (x))
    {
    case AND:
      op0 = elim_reg_cond (XEXP (x, 0), regno);
      op1 = elim_reg_cond (XEXP (x, 1), regno);
      if (op0 == const0_rtx || op1 == const0_rtx)
	return const0_rtx;
      if (op0 == const1_rtx)
	return op1;
      if (op1 == const1_rtx)
	return op0;
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return x;
      return gen_rtx_AND (0, op0, op1);
3373

3374 3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385
    case IOR:
      op0 = elim_reg_cond (XEXP (x, 0), regno);
      op1 = elim_reg_cond (XEXP (x, 1), regno);
      if (op0 == const1_rtx || op1 == const1_rtx)
	return const1_rtx;
      if (op0 == const0_rtx)
	return op1;
      if (op1 == const0_rtx)
	return op0;
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return x;
      return gen_rtx_IOR (0, op0, op1);
3386

3387 3388 3389 3390 3391 3392 3393 3394 3395
    case NOT:
      op0 = elim_reg_cond (XEXP (x, 0), regno);
      if (op0 == const0_rtx)
	return const1_rtx;
      if (op0 == const1_rtx)
	return const0_rtx;
      if (op0 != XEXP (x, 0))
	return not_reg_cond (op0);
      return x;
3396

3397
    default:
3398
      gcc_unreachable ();
3399
    }
3400
}
3401 3402 3403
#endif /* HAVE_conditional_execution */

#ifdef AUTO_INC_DEC
3404

3405 3406 3407 3408 3409
/* Try to substitute the auto-inc expression INC as the address inside
   MEM which occurs in INSN.  Currently, the address of MEM is an expression
   involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
   that has a single set whose source is a PLUS of INCR_REG and something
   else.  */
Kazu Hirata committed
3410

3411
static void
3412 3413
attempt_auto_inc (struct propagate_block_info *pbi, rtx inc, rtx insn,
		  rtx mem, rtx incr, rtx incr_reg)
3414
{
3415 3416 3417 3418 3419
  int regno = REGNO (incr_reg);
  rtx set = single_set (incr);
  rtx q = SET_DEST (set);
  rtx y = SET_SRC (set);
  int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3420
  int changed;
Kazu Hirata committed
3421

3422 3423 3424
  /* Make sure this reg appears only once in this insn.  */
  if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
    return;
3425

3426 3427 3428 3429 3430 3431 3432 3433 3434 3435 3436
  if (dead_or_set_p (incr, incr_reg)
      /* Mustn't autoinc an eliminable register.  */
      && (regno >= FIRST_PSEUDO_REGISTER
	  || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
    {
      /* This is the simple case.  Try to make the auto-inc.  If
	 we can't, we are done.  Otherwise, we will do any
	 needed updates below.  */
      if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
	return;
    }
3437
  else if (REG_P (q)
3438 3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449 3450 3451
	   /* PREV_INSN used here to check the semi-open interval
	      [insn,incr).  */
	   && ! reg_used_between_p (q,  PREV_INSN (insn), incr)
	   /* We must also check for sets of q as q may be
	      a call clobbered hard register and there may
	      be a call between PREV_INSN (insn) and incr.  */
	   && ! reg_set_between_p (q,  PREV_INSN (insn), incr))
    {
      /* We have *p followed sometime later by q = p+size.
	 Both p and q must be live afterward,
	 and q is not used between INSN and its assignment.
	 Change it to q = p, ...*q..., q = q+size.
	 Then fall into the usual case.  */
      rtx insns, temp;
3452

3453 3454 3455 3456
      start_sequence ();
      emit_move_insn (q, incr_reg);
      insns = get_insns ();
      end_sequence ();
3457

3458 3459 3460 3461
      /* If we can't make the auto-inc, or can't make the
	 replacement into Y, exit.  There's no point in making
	 the change below if we can't do the auto-inc and doing
	 so is not correct in the pre-inc case.  */
3462

3463 3464 3465 3466 3467
      XEXP (inc, 0) = q;
      validate_change (insn, &XEXP (mem, 0), inc, 1);
      validate_change (incr, &XEXP (y, opnum), q, 1);
      if (! apply_change_group ())
	return;
3468

3469 3470
      /* We now know we'll be doing this change, so emit the
	 new insn(s) and do the updates.  */
3471
      emit_insn_before (insns, insn);
3472

3473 3474
      if (BB_HEAD (pbi->bb) == insn)
	BB_HEAD (pbi->bb) = insns;
3475

3476 3477 3478 3479
      /* INCR will become a NOTE and INSN won't contain a
	 use of INCR_REG.  If a use of INCR_REG was just placed in
	 the insn before INSN, make that the next use.
	 Otherwise, invalidate it.  */
3480
      if (NONJUMP_INSN_P (PREV_INSN (insn))
3481 3482 3483 3484 3485
	  && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
	  && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
	pbi->reg_next_use[regno] = PREV_INSN (insn);
      else
	pbi->reg_next_use[regno] = 0;
Kazu Hirata committed
3486

3487 3488
      incr_reg = q;
      regno = REGNO (q);
3489

3490 3491 3492 3493
      if ((pbi->flags & PROP_REG_INFO)
	  && !REGNO_REG_SET_P (pbi->reg_live, regno))
	reg_deaths[regno] = pbi->insn_num;

3494 3495 3496 3497 3498
      /* REGNO is now used in INCR which is below INSN, but
	 it previously wasn't live here.  If we don't mark
	 it as live, we'll put a REG_DEAD note for it
	 on this insn, which is incorrect.  */
      SET_REGNO_REG_SET (pbi->reg_live, regno);
3499

3500 3501 3502
      /* If there are any calls between INSN and INCR, show
	 that REGNO now crosses them.  */
      for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3503
	if (CALL_P (temp))
3504
	  REG_N_CALLS_CROSSED (regno)++;
Kazu Hirata committed
3505

3506 3507
      /* Invalidate alias info for Q since we just changed its value.  */
      clear_reg_alias_info (q);
3508
    }
3509 3510
  else
    return;
3511

3512 3513 3514
  /* If we haven't returned, it means we were able to make the
     auto-inc, so update the status.  First, record that this insn
     has an implicit side effect.  */
3515

3516
  REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3517

3518 3519
  /* Modify the old increment-insn to simply copy
     the already-incremented value of our register.  */
3520 3521
  changed = validate_change (incr, &SET_SRC (set), incr_reg, 0);
  gcc_assert (changed);
3522

3523 3524 3525 3526
  /* If that makes it a no-op (copying the register into itself) delete
     it so it won't appear to be a "use" and a "set" of this
     register.  */
  if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3527
    {
3528 3529
      /* If the original source was dead, it's dead now.  */
      rtx note;
3530

3531 3532 3533 3534
      while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
	{
	  remove_note (incr, note);
	  if (XEXP (note, 0) != incr_reg)
3535 3536 3537 3538 3539 3540 3541 3542 3543 3544 3545
	    {
	      unsigned int regno = REGNO (XEXP (note, 0));

	      if ((pbi->flags & PROP_REG_INFO)
		  && REGNO_REG_SET_P (pbi->reg_live, regno))
		{
		  REG_LIVE_LENGTH (regno) += pbi->insn_num - reg_deaths[regno];
		  reg_deaths[regno] = 0;
		}
	      CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
	    }
3546
	}
Kazu Hirata committed
3547

3548
      SET_INSN_DELETED (incr);
3549
    }
3550

3551 3552 3553 3554 3555 3556
  if (regno >= FIRST_PSEUDO_REGISTER)
    {
      /* Count an extra reference to the reg.  When a reg is
	 incremented, spilling it is worse, so we want to make
	 that less likely.  */
      REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3557

3558 3559 3560 3561
      /* Count the increment as a setting of the register,
	 even though it isn't a SET in rtl.  */
      REG_N_SETS (regno)++;
    }
3562
}
3563 3564 3565

/* X is a MEM found in INSN.  See if we can convert it into an auto-increment
   reference.  */
Kazu Hirata committed
3566

3567
static void
3568
find_auto_inc (struct propagate_block_info *pbi, rtx x, rtx insn)
3569
{
3570 3571 3572 3573 3574
  rtx addr = XEXP (x, 0);
  HOST_WIDE_INT offset = 0;
  rtx set, y, incr, inc_val;
  int regno;
  int size = GET_MODE_SIZE (GET_MODE (x));
3575

3576
  if (JUMP_P (insn))
3577 3578
    return;

3579 3580 3581 3582 3583
  /* Here we detect use of an index register which might be good for
     postincrement, postdecrement, preincrement, or predecrement.  */

  if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
    offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3584

3585
  if (!REG_P (addr))
3586
    return;
Kazu Hirata committed
3587

3588
  regno = REGNO (addr);
3589

3590 3591 3592 3593 3594 3595 3596 3597
  /* Is the next use an increment that might make auto-increment? */
  incr = pbi->reg_next_use[regno];
  if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
    return;
  set = single_set (incr);
  if (set == 0 || GET_CODE (set) != SET)
    return;
  y = SET_SRC (set);
3598

3599
  if (GET_CODE (y) != PLUS)
3600 3601
    return;

3602 3603 3604 3605 3606 3607
  if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
    inc_val = XEXP (y, 1);
  else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
    inc_val = XEXP (y, 0);
  else
    return;
3608

3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632
  if (GET_CODE (inc_val) == CONST_INT)
    {
      if (HAVE_POST_INCREMENT
	  && (INTVAL (inc_val) == size && offset == 0))
	attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
			  incr, addr);
      else if (HAVE_POST_DECREMENT
	       && (INTVAL (inc_val) == -size && offset == 0))
	attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
			  incr, addr);
      else if (HAVE_PRE_INCREMENT
	       && (INTVAL (inc_val) == size && offset == size))
	attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
			  incr, addr);
      else if (HAVE_PRE_DECREMENT
	       && (INTVAL (inc_val) == -size && offset == -size))
	attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
			  incr, addr);
      else if (HAVE_POST_MODIFY_DISP && offset == 0)
	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
						    gen_rtx_PLUS (Pmode,
								  addr,
								  inc_val)),
			  insn, x, incr, addr);
3633 3634 3635 3636 3637 3638
      else if (HAVE_PRE_MODIFY_DISP && offset == INTVAL (inc_val))
	attempt_auto_inc (pbi, gen_rtx_PRE_MODIFY (Pmode, addr,
						    gen_rtx_PLUS (Pmode,
								  addr,
								  inc_val)),
			  insn, x, incr, addr);
3639
    }
3640
  else if (REG_P (inc_val)
3641 3642
	   && ! reg_set_between_p (inc_val, PREV_INSN (insn),
				   NEXT_INSN (incr)))
3643

3644 3645 3646 3647 3648 3649 3650 3651 3652
    {
      if (HAVE_POST_MODIFY_REG && offset == 0)
	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
						    gen_rtx_PLUS (Pmode,
								  addr,
								  inc_val)),
			  insn, x, incr, addr);
    }
}
Kazu Hirata committed
3653

3654 3655
#endif /* AUTO_INC_DEC */

3656
static void
3657 3658
mark_used_reg (struct propagate_block_info *pbi, rtx reg,
	       rtx cond ATTRIBUTE_UNUSED, rtx insn)
3659
{
3660 3661
  unsigned int regno_first, regno_last, i;
  int some_was_live, some_was_dead, some_not_set;
3662

3663 3664
  regno_last = regno_first = REGNO (reg);
  if (regno_first < FIRST_PSEUDO_REGISTER)
3665
    regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
3666

3667 3668 3669
  /* Find out if any of this register is live after this instruction.  */
  some_was_live = some_was_dead = 0;
  for (i = regno_first; i <= regno_last; ++i)
3670
    {
3671 3672 3673
      int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
      some_was_live |= needed_regno;
      some_was_dead |= ! needed_regno;
3674 3675
    }

3676 3677 3678 3679 3680 3681
  /* Find out if any of the register was set this insn.  */
  some_not_set = 0;
  for (i = regno_first; i <= regno_last; ++i)
    some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);

  if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3682
    {
3683 3684 3685
      /* Record where each reg is used, so when the reg is set we know
	 the next insn that uses it.  */
      pbi->reg_next_use[regno_first] = insn;
3686
    }
Kazu Hirata committed
3687

3688 3689 3690 3691 3692 3693 3694 3695 3696 3697
  if (pbi->flags & PROP_REG_INFO)
    {
      if (regno_first < FIRST_PSEUDO_REGISTER)
	{
	  /* If this is a register we are going to try to eliminate,
	     don't mark it live here.  If we are successful in
	     eliminating it, it need not be live unless it is used for
	     pseudos, in which case it will have been set live when it
	     was allocated to the pseudos.  If the register will not
	     be eliminated, reload will set it live at that point.
3698

3699 3700 3701 3702
	     Otherwise, record that this function uses this register.  */
	  /* ??? The PPC backend tries to "eliminate" on the pic
	     register to itself.  This should be fixed.  In the mean
	     time, hack around it.  */
Kazu Hirata committed
3703

3704 3705 3706 3707 3708 3709 3710 3711 3712
	  if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
	         && (regno_first == FRAME_POINTER_REGNUM
		     || regno_first == ARG_POINTER_REGNUM)))
	    for (i = regno_first; i <= regno_last; ++i)
	      regs_ever_live[i] = 1;
	}
      else
	{
	  /* Keep track of which basic block each reg appears in.  */
3713

3714
	  int blocknum = pbi->bb->index;
3715 3716 3717 3718
	  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
	    REG_BASIC_BLOCK (regno_first) = blocknum;
	  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
	    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3719

3720 3721 3722 3723
	  /* Count (weighted) number of uses of each reg.  */
	  REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
	  REG_N_REFS (regno_first)++;
	}
3724 3725 3726
      for (i = regno_first; i <= regno_last; ++i)
	if (! REGNO_REG_SET_P (pbi->reg_live, i))
	  {
3727
	    gcc_assert (!reg_deaths[i]);
3728 3729
	    reg_deaths[i] = pbi->insn_num;
	  }
3730
    }
3731

3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744
  /* Record and count the insns in which a reg dies.  If it is used in
     this insn and was dead below the insn then it dies in this insn.
     If it was set in this insn, we do not make a REG_DEAD note;
     likewise if we already made such a note.  */
  if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
      && some_was_dead
      && some_not_set)
    {
      /* Check for the case where the register dying partially
	 overlaps the register set by this insn.  */
      if (regno_first != regno_last)
	for (i = regno_first; i <= regno_last; ++i)
	  some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3745

3746 3747 3748 3749 3750 3751 3752 3753
      /* If none of the words in X is needed, make a REG_DEAD note.
	 Otherwise, we must make partial REG_DEAD notes.  */
      if (! some_was_live)
	{
	  if ((pbi->flags & PROP_DEATH_NOTES)
	      && ! find_regno_note (insn, REG_DEAD, regno_first))
	    REG_NOTES (insn)
	      = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3754

3755 3756 3757 3758 3759 3760 3761 3762 3763 3764 3765 3766
	  if (pbi->flags & PROP_REG_INFO)
	    REG_N_DEATHS (regno_first)++;
	}
      else
	{
	  /* Don't make a REG_DEAD note for a part of a register
	     that is set in the insn.  */
	  for (i = regno_first; i <= regno_last; ++i)
	    if (! REGNO_REG_SET_P (pbi->reg_live, i)
		&& ! dead_or_set_regno_p (insn, i))
	      REG_NOTES (insn)
		= alloc_EXPR_LIST (REG_DEAD,
3767
				   regno_reg_rtx[i],
3768 3769 3770
				   REG_NOTES (insn));
	}
    }
3771

3772 3773
  /* Mark the register as being live.  */
  for (i = regno_first; i <= regno_last; ++i)
3774
    {
3775 3776 3777 3778
#ifdef HAVE_conditional_execution
      int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
#endif

3779
      SET_REGNO_REG_SET (pbi->reg_live, i);
3780

3781 3782 3783 3784
#ifdef HAVE_conditional_execution
      /* If this is a conditional use, record that fact.  If it is later
	 conditionally set, we'll know to kill the register.  */
      if (cond != NULL_RTX)
3785
	{
3786 3787 3788 3789
	  splay_tree_node node;
	  struct reg_cond_life_info *rcli;
	  rtx ncond;

3790
	  if (this_was_live)
3791 3792 3793 3794 3795 3796 3797 3798 3799 3800 3801 3802 3803 3804
	    {
	      node = splay_tree_lookup (pbi->reg_cond_dead, i);
	      if (node == NULL)
		{
		  /* The register was unconditionally live previously.
		     No need to do anything.  */
		}
	      else
		{
		  /* The register was conditionally live previously.
		     Subtract the new life cond from the old death cond.  */
		  rcli = (struct reg_cond_life_info *) node->value;
		  ncond = rcli->condition;
		  ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3805

3806 3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817 3818
		  /* If the register is now unconditionally live,
		     remove the entry in the splay_tree.  */
		  if (ncond == const0_rtx)
		    splay_tree_remove (pbi->reg_cond_dead, i);
		  else
		    {
		      rcli->condition = ncond;
		      SET_REGNO_REG_SET (pbi->reg_cond_reg,
					 REGNO (XEXP (cond, 0)));
		    }
		}
	    }
	  else
3819
	    {
3820 3821
	      /* The register was not previously live at all.  Record
		 the condition under which it is still dead.  */
3822
	      rcli = xmalloc (sizeof (*rcli));
3823 3824 3825 3826 3827
	      rcli->condition = not_reg_cond (cond);
	      rcli->stores = const0_rtx;
	      rcli->orig_condition = const0_rtx;
	      splay_tree_insert (pbi->reg_cond_dead, i,
				 (splay_tree_value) rcli);
3828

3829
	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3830 3831
	    }
	}
3832
      else if (this_was_live)
3833
	{
3834 3835 3836 3837 3838
	  /* The register may have been conditionally live previously, but
	     is now unconditionally live.  Remove it from the conditionally
	     dead list, so that a conditional set won't cause us to think
	     it dead.  */
	  splay_tree_remove (pbi->reg_cond_dead, i);
3839
	}
3840
#endif
3841 3842 3843
    }
}

3844 3845 3846
/* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
   This is done assuming the registers needed from X are those that
   have 1-bits in PBI->REG_LIVE.
3847

3848 3849
   INSN is the containing instruction.  If INSN is dead, this function
   is not called.  */
3850

3851
static void
3852
mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
3853
{
3854 3855
  RTX_CODE code;
  int regno;
3856
  int flags = pbi->flags;
3857

3858
 retry:
3859 3860
  if (!x)
    return;
3861 3862
  code = GET_CODE (x);
  switch (code)
3863
    {
3864 3865 3866 3867 3868
    case LABEL_REF:
    case SYMBOL_REF:
    case CONST_INT:
    case CONST:
    case CONST_DOUBLE:
3869
    case CONST_VECTOR:
3870 3871 3872 3873
    case PC:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
      return;
3874

3875 3876 3877 3878 3879
#ifdef HAVE_cc0
    case CC0:
      pbi->cc0_live = 1;
      return;
#endif
3880

3881 3882 3883
    case CLOBBER:
      /* If we are clobbering a MEM, mark any registers inside the address
	 as being used.  */
3884
      if (MEM_P (XEXP (x, 0)))
3885 3886
	mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
      return;
3887

3888 3889 3890
    case MEM:
      /* Don't bother watching stores to mems if this is not the
	 final pass.  We'll not be deleting dead stores this round.  */
3891
      if (optimize && (flags & PROP_SCAN_DEAD_STORES))
3892
	{
3893 3894 3895 3896 3897 3898 3899
	  /* Invalidate the data for the last MEM stored, but only if MEM is
	     something that can be stored into.  */
	  if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
	      && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
	    /* Needn't clear the memory set list.  */
	    ;
	  else
3900
	    {
3901 3902 3903 3904 3905 3906 3907
	      rtx temp = pbi->mem_set_list;
	      rtx prev = NULL_RTX;
	      rtx next;

	      while (temp)
		{
		  next = XEXP (temp, 1);
3908
		  if (anti_dependence (XEXP (temp, 0), x))
3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921
		    {
		      /* Splice temp out of the list.  */
		      if (prev)
			XEXP (prev, 1) = next;
		      else
			pbi->mem_set_list = next;
		      free_EXPR_LIST_node (temp);
		      pbi->mem_set_list_len--;
		    }
		  else
		    prev = temp;
		  temp = next;
		}
3922
	    }
3923 3924 3925 3926 3927

	  /* If the memory reference had embedded side effects (autoincrement
	     address modes.  Then we may need to kill some entries on the
	     memory set list.  */
	  if (insn)
3928
	    for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
3929 3930
	}

3931 3932
#ifdef AUTO_INC_DEC
      if (flags & PROP_AUTOINC)
Kazu Hirata committed
3933
	find_auto_inc (pbi, x, insn);
3934 3935
#endif
      break;
3936

3937
    case SUBREG:
3938
#ifdef CANNOT_CHANGE_MODE_CLASS
3939 3940
      if (flags & PROP_REG_INFO)
	record_subregs_of_mode (x);
3941
#endif
3942

3943 3944
      /* While we're here, optimize this case.  */
      x = SUBREG_REG (x);
3945
      if (!REG_P (x))
3946 3947
	goto retry;
      /* Fall through.  */
3948

3949 3950 3951 3952
    case REG:
      /* See a register other than being set => mark it as needed.  */
      mark_used_reg (pbi, x, cond, insn);
      return;
3953

3954 3955
    case SET:
      {
3956
	rtx testreg = SET_DEST (x);
3957
	int mark_dest = 0;
3958

3959 3960
	/* If storing into MEM, don't show it as being used.  But do
	   show the address as being used.  */
3961
	if (MEM_P (testreg))
3962 3963 3964 3965 3966 3967 3968 3969 3970
	  {
#ifdef AUTO_INC_DEC
	    if (flags & PROP_AUTOINC)
	      find_auto_inc (pbi, testreg, insn);
#endif
	    mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
	    return;
	  }
3971

3972 3973 3974
	/* Storing in STRICT_LOW_PART is like storing in a reg
	   in that this SET might be dead, so ignore it in TESTREG.
	   but in some other ways it is like using the reg.
3975

3976 3977 3978 3979 3980 3981 3982
	   Storing in a SUBREG or a bit field is like storing the entire
	   register in that if the register's value is not used
	   then this SET is not needed.  */
	while (GET_CODE (testreg) == STRICT_LOW_PART
	       || GET_CODE (testreg) == ZERO_EXTRACT
	       || GET_CODE (testreg) == SUBREG)
	  {
3983
#ifdef CANNOT_CHANGE_MODE_CLASS
3984 3985
	    if ((flags & PROP_REG_INFO) && GET_CODE (testreg) == SUBREG)
	      record_subregs_of_mode (testreg);
3986
#endif
3987

3988 3989 3990 3991
	    /* Modifying a single register in an alternate mode
	       does not use any of the old value.  But these other
	       ways of storing in a register do use the old value.  */
	    if (GET_CODE (testreg) == SUBREG
3992 3993 3994 3995
		&& !((REG_BYTES (SUBREG_REG (testreg))
		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD
		     > (REG_BYTES (testreg)
			+ UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3996 3997 3998
	      ;
	    else
	      mark_dest = 1;
3999

4000 4001
	    testreg = XEXP (testreg, 0);
	  }
4002

4003 4004
	/* If this is a store into a register or group of registers,
	   recursively scan the value being stored.  */
4005

4006 4007
	if ((GET_CODE (testreg) == PARALLEL
	     && GET_MODE (testreg) == BLKmode)
4008
	    || (REG_P (testreg)
4009 4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027
		&& (regno = REGNO (testreg),
		    ! (regno == FRAME_POINTER_REGNUM
		       && (! reload_completed || frame_pointer_needed)))
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
		&& ! (regno == HARD_FRAME_POINTER_REGNUM
		      && (! reload_completed || frame_pointer_needed))
#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
		&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
#endif
		))
	  {
	    if (mark_dest)
	      mark_used_regs (pbi, SET_DEST (x), cond, insn);
	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
	    return;
	  }
      }
      break;
Kazu Hirata committed
4028

4029 4030 4031 4032 4033 4034 4035 4036
    case ASM_OPERANDS:
    case UNSPEC_VOLATILE:
    case TRAP_IF:
    case ASM_INPUT:
      {
	/* Traditional and volatile asm instructions must be considered to use
	   and clobber all hard registers, all pseudo-registers and all of
	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
4037

4038 4039 4040
	   Consider for instance a volatile asm that changes the fpu rounding
	   mode.  An insn should not be moved across this even if it only uses
	   pseudo-regs because it might give an incorrectly rounded result.
4041

4042 4043 4044
	   ?!? Unfortunately, marking all hard registers as live causes massive
	   problems for the register allocator and marking all pseudos as live
	   creates mountains of uninitialized variable warnings.
4045

4046 4047 4048 4049 4050 4051 4052
	   So for now, just clear the memory set list and mark any regs
	   we can find in ASM_OPERANDS as used.  */
	if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
	  {
	    free_EXPR_LIST_list (&pbi->mem_set_list);
	    pbi->mem_set_list_len = 0;
	  }
Kazu Hirata committed
4053

4054 4055 4056 4057 4058 4059 4060
	/* For all ASM_OPERANDS, we must traverse the vector of input operands.
	   We can not just fall through here since then we would be confused
	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
	   traditional asms unlike their normal usage.  */
	if (code == ASM_OPERANDS)
	  {
	    int j;
4061

4062 4063 4064 4065 4066
	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
	      mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
	  }
	break;
      }
4067

4068
    case COND_EXEC:
4069
      gcc_assert (!cond);
Kazu Hirata committed
4070

4071
      mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
Kazu Hirata committed
4072

4073 4074 4075
      cond = COND_EXEC_TEST (x);
      x = COND_EXEC_CODE (x);
      goto retry;
4076

4077 4078
    default:
      break;
4079
    }
4080

4081
  /* Recursively scan the operands of this expression.  */
4082

4083
  {
4084 4085
    const char * const fmt = GET_RTX_FORMAT (code);
    int i;
4086

4087 4088 4089 4090 4091 4092 4093 4094 4095 4096 4097 4098 4099 4100
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
      {
	if (fmt[i] == 'e')
	  {
	    /* Tail recursive case: save a function call level.  */
	    if (i == 0)
	      {
		x = XEXP (x, 0);
		goto retry;
	      }
	    mark_used_regs (pbi, XEXP (x, i), cond, insn);
	  }
	else if (fmt[i] == 'E')
	  {
4101
	    int j;
4102 4103 4104 4105 4106
	    for (j = 0; j < XVECLEN (x, i); j++)
	      mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
	  }
      }
  }
4107
}
4108 4109

#ifdef AUTO_INC_DEC
4110

4111
static int
4112
try_pre_increment_1 (struct propagate_block_info *pbi, rtx insn)
4113 4114 4115 4116 4117 4118 4119 4120 4121 4122 4123 4124 4125 4126 4127 4128 4129 4130
{
  /* Find the next use of this reg.  If in same basic block,
     make it do pre-increment or pre-decrement if appropriate.  */
  rtx x = single_set (insn);
  HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
			  * INTVAL (XEXP (SET_SRC (x), 1)));
  int regno = REGNO (SET_DEST (x));
  rtx y = pbi->reg_next_use[regno];
  if (y != 0
      && SET_DEST (x) != stack_pointer_rtx
      && BLOCK_NUM (y) == BLOCK_NUM (insn)
      /* Don't do this if the reg dies, or gets set in y; a standard addressing
	 mode would be better.  */
      && ! dead_or_set_p (y, SET_DEST (x))
      && try_pre_increment (y, SET_DEST (x), amount))
    {
      /* We have found a suitable auto-increment and already changed
	 insn Y to do it.  So flush this increment instruction.  */
4131
      propagate_block_delete_insn (insn);
4132

4133 4134 4135 4136 4137 4138 4139 4140
      /* Count a reference to this reg for the increment insn we are
	 deleting.  When a reg is incremented, spilling it is worse,
	 so we want to make that less likely.  */
      if (regno >= FIRST_PSEUDO_REGISTER)
	{
	  REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
	  REG_N_SETS (regno)++;
	}
4141

4142 4143 4144
      /* Flush any remembered memories depending on the value of
	 the incremented register.  */
      invalidate_mems_from_set (pbi, SET_DEST (x));
4145

4146 4147 4148 4149
      return 1;
    }
  return 0;
}
4150

4151 4152 4153 4154 4155
/* Try to change INSN so that it does pre-increment or pre-decrement
   addressing on register REG in order to add AMOUNT to REG.
   AMOUNT is negative for pre-decrement.
   Returns 1 if the change could be made.
   This checks all about the validity of the result of modifying INSN.  */
4156

4157
static int
4158
try_pre_increment (rtx insn, rtx reg, HOST_WIDE_INT amount)
4159
{
4160
  rtx use;
4161

4162 4163 4164 4165 4166 4167 4168 4169
  /* Nonzero if we can try to make a pre-increment or pre-decrement.
     For example, addl $4,r1; movl (r1),... can become movl +(r1),...  */
  int pre_ok = 0;
  /* Nonzero if we can try to make a post-increment or post-decrement.
     For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
     It is possible for both PRE_OK and POST_OK to be nonzero if the machine
     supports both pre-inc and post-inc, or both pre-dec and post-dec.  */
  int post_ok = 0;
4170

4171 4172
  /* Nonzero if the opportunity actually requires post-inc or post-dec.  */
  int do_post = 0;
4173

4174 4175 4176 4177 4178 4179
  /* From the sign of increment, see which possibilities are conceivable
     on this target machine.  */
  if (HAVE_PRE_INCREMENT && amount > 0)
    pre_ok = 1;
  if (HAVE_POST_INCREMENT && amount > 0)
    post_ok = 1;
4180

4181 4182 4183 4184
  if (HAVE_PRE_DECREMENT && amount < 0)
    pre_ok = 1;
  if (HAVE_POST_DECREMENT && amount < 0)
    post_ok = 1;
4185

4186 4187
  if (! (pre_ok || post_ok))
    return 0;
4188

4189 4190 4191
  /* It is not safe to add a side effect to a jump insn
     because if the incremented register is spilled and must be reloaded
     there would be no way to store the incremented value back in memory.  */
Kazu Hirata committed
4192

4193
  if (JUMP_P (insn))
4194
    return 0;
4195

4196 4197 4198
  use = 0;
  if (pre_ok)
    use = find_use_as_address (PATTERN (insn), reg, 0);
4199
  if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4200
    {
4201 4202
      use = find_use_as_address (PATTERN (insn), reg, -amount);
      do_post = 1;
4203 4204
    }

4205
  if (use == 0 || use == (rtx) (size_t) 1)
4206 4207 4208 4209
    return 0;

  if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
    return 0;
4210

4211 4212 4213 4214 4215 4216 4217
  /* See if this combination of instruction and addressing mode exists.  */
  if (! validate_change (insn, &XEXP (use, 0),
			 gen_rtx_fmt_e (amount > 0
					? (do_post ? POST_INC : PRE_INC)
					: (do_post ? POST_DEC : PRE_DEC),
					Pmode, reg), 0))
    return 0;
4218

4219 4220 4221
  /* Record that this insn now has an implicit side effect on X.  */
  REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
  return 1;
4222 4223
}

4224 4225 4226 4227 4228 4229
#endif /* AUTO_INC_DEC */

/* Find the place in the rtx X where REG is used as a memory address.
   Return the MEM rtx that so uses it.
   If PLUSCONST is nonzero, search instead for a memory address equivalent to
   (plus REG (const_int PLUSCONST)).
4230

4231 4232
   If such an address does not appear, return 0.
   If REG appears more than once, or is used other than in such an address,
4233
   return (rtx) 1.  */
4234

4235
rtx
4236
find_use_as_address (rtx x, rtx reg, HOST_WIDE_INT plusconst)
4237
{
4238 4239
  enum rtx_code code = GET_CODE (x);
  const char * const fmt = GET_RTX_FORMAT (code);
4240 4241 4242
  int i;
  rtx value = 0;
  rtx tem;
4243

4244 4245
  if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
    return x;
4246

4247 4248 4249 4250 4251
  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
      && XEXP (XEXP (x, 0), 0) == reg
      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
      && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
    return x;
4252

4253
  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4254
    {
4255 4256 4257
      /* If REG occurs inside a MEM used in a bit-field reference,
	 that is unacceptable.  */
      if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4258
	return (rtx) (size_t) 1;
4259 4260
    }

4261
  if (x == reg)
4262
    return (rtx) (size_t) 1;
4263

4264
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4265
    {
4266 4267 4268 4269 4270 4271
      if (fmt[i] == 'e')
	{
	  tem = find_use_as_address (XEXP (x, i), reg, plusconst);
	  if (value == 0)
	    value = tem;
	  else if (tem != 0)
4272
	    return (rtx) (size_t) 1;
4273 4274
	}
      else if (fmt[i] == 'E')
4275
	{
4276
	  int j;
4277
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4278
	    {
4279 4280 4281 4282
	      tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
	      if (value == 0)
		value = tem;
	      else if (tem != 0)
4283
		return (rtx) (size_t) 1;
4284 4285 4286 4287
	    }
	}
    }

4288 4289 4290 4291 4292
  return value;
}

/* Write information about registers and basic blocks into FILE.
   This is part of making a debugging dump.  */
Kazu Hirata committed
4293

4294
void
4295
dump_regset (regset r, FILE *outf)
4296
{
4297
  unsigned i;
4298 4299
  reg_set_iterator rsi;

4300
  if (r == NULL)
4301
    {
4302
      fputs (" (nil)", outf);
4303 4304
      return;
    }
4305

4306
  EXECUTE_IF_SET_IN_REG_SET (r, 0, i, rsi)
4307
    {
4308 4309 4310 4311
      fprintf (outf, " %d", i);
      if (i < FIRST_PSEUDO_REGISTER)
	fprintf (outf, " [%s]",
		 reg_names[i]);
4312
    }
4313 4314
}

4315
/* Print a human-readable representation of R on the standard error
4316 4317
   stream.  This function is designed to be used from within the
   debugger.  */
Kazu Hirata committed
4318

4319
void
4320
debug_regset (regset r)
4321
{
4322 4323
  dump_regset (r, stderr);
  putc ('\n', stderr);
4324 4325
}

4326 4327
/* Recompute register set/reference counts immediately prior to register
   allocation.
4328

4329 4330
   This avoids problems with set/reference counts changing to/from values
   which have special meanings to the register allocators.
4331

4332 4333 4334
   Additionally, the reference counts are the primary component used by the
   register allocators to prioritize pseudos for allocation to hard regs.
   More accurate reference counts generally lead to better register allocation.
4335

4336 4337
   It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
   possibly other information which is used by the register allocators.  */
4338

4339
void
4340
recompute_reg_usage (void)
4341 4342
{
  allocate_reg_life_data ();
4343 4344 4345 4346
  /* distribute_notes in combiner fails to convert some of the
     REG_UNUSED notes to REG_DEAD notes.  This causes CHECK_DEAD_NOTES
     in sched1 to die.  To solve this update the DEATH_NOTES
     here.  */
4347
  update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO | PROP_DEATH_NOTES);
4348 4349
}

4350 4351 4352
/* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
   blocks.  If BLOCKS is NULL, assume the universal set.  Returns a count
   of the number of registers that died.  */
4353

Kazu Hirata committed
4354
int
4355
count_or_remove_death_notes (sbitmap blocks, int kill)
4356
{
4357
  int count = 0;
4358
  int i;
4359
  basic_block bb;
4360

4361 4362 4363 4364 4365 4366 4367 4368 4369 4370 4371 4372 4373 4374 4375
  /* This used to be a loop over all the blocks with a membership test
     inside the loop.  That can be amazingly expensive on a large CFG
     when only a small number of bits are set in BLOCKs (for example,
     the calls from the scheduler typically have very few bits set).

     For extra credit, someone should convert BLOCKS to a bitmap rather
     than an sbitmap.  */
  if (blocks)
    {
      EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
	{
	  count += count_or_remove_death_notes_bb (BASIC_BLOCK (i), kill);
	});
    }
  else
4376
    {
4377 4378 4379 4380 4381
      FOR_EACH_BB (bb)
	{
	  count += count_or_remove_death_notes_bb (bb, kill);
	}
    }
4382

4383 4384 4385 4386 4387
  return count;
}
  
/* Optionally removes all the REG_DEAD and REG_UNUSED notes from basic
   block BB.  Returns a count of the number of registers that died.  */
4388

4389 4390 4391 4392 4393 4394
static int
count_or_remove_death_notes_bb (basic_block bb, int kill)
{
  int count = 0;
  rtx insn;

4395
  for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
4396 4397
    {
      if (INSN_P (insn))
4398
	{
4399 4400
	  rtx *pprev = &REG_NOTES (insn);
	  rtx link = *pprev;
4401

4402 4403 4404
	  while (link)
	    {
	      switch (REG_NOTE_KIND (link))
4405
		{
4406
		case REG_DEAD:
4407
		  if (REG_P (XEXP (link, 0)))
4408 4409 4410 4411 4412 4413 4414
		    {
		      rtx reg = XEXP (link, 0);
		      int n;

		      if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
		        n = 1;
		      else
4415
		        n = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
4416 4417 4418 4419 4420 4421 4422
		      count += n;
		    }

		  /* Fall through.  */

		case REG_UNUSED:
		  if (kill)
4423
		    {
4424 4425 4426
		      rtx next = XEXP (link, 1);
		      free_EXPR_LIST_node (link);
		      *pprev = link = next;
4427 4428
		      break;
		    }
4429 4430 4431 4432 4433 4434
		  /* Fall through.  */

		default:
		  pprev = &XEXP (link, 1);
		  link = *pprev;
		  break;
4435 4436
		}
	    }
4437
	}
4438

4439
      if (insn == BB_END (bb))
4440
	break;
4441
    }
4442

4443
  return count;
4444
}
4445

4446 4447
/* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
   if blocks is NULL.  */
4448

4449
static void
4450
clear_log_links (sbitmap blocks)
Alex Samuel committed
4451
{
4452 4453
  rtx insn;
  int i;
4454

4455
  if (!blocks)
4456
    {
4457 4458
      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
	if (INSN_P (insn))
4459
	  free_INSN_LIST_list (&LOG_LINKS (insn));
4460
    }
4461 4462 4463 4464
  else
    EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
      {
	basic_block bb = BASIC_BLOCK (i);
4465

4466
	for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
4467 4468
	     insn = NEXT_INSN (insn))
	  if (INSN_P (insn))
4469
	    free_INSN_LIST_list (&LOG_LINKS (insn));
4470
      });
Alex Samuel committed
4471
}
4472 4473 4474 4475 4476 4477 4478

/* Given a register bitmap, turn on the bits in a HARD_REG_SET that
   correspond to the hard registers, if any, set in that map.  This
   could be done far more efficiently by having all sorts of special-cases
   with moving single words, but probably isn't worth the trouble.  */

void
4479
reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
4480
{
4481
  unsigned i;
4482
  bitmap_iterator bi;
4483

4484 4485 4486 4487 4488 4489
  EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
    {
      if (i >= FIRST_PSEUDO_REGISTER)
	return;
      SET_HARD_REG_BIT (*to, i);
    }
4490
}