final.c 114 KB
Newer Older
1
/* Convert RTL to assembler code and output it, for GNU compiler.
Jeff Law committed
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4
   Free Software Foundation, Inc.
5

6
This file is part of GCC.
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43

/* This is the final pass of the compiler.
   It looks at the rtl code for a function and outputs assembler code.

   Call `final_start_function' to output the assembler code for function entry,
   `final' to output assembler code for some RTL code,
   `final_end_function' to output assembler code for function exit.
   If a function is compiled in several pieces, each piece is
   output separately with `final'.

   Some optimizations are also done at this level.
   Move instructions that were made unnecessary by good register allocation
   are detected and omitted from the output.  (Though most of these
   are removed by the last jump pass.)

   Instructions to set the condition codes are omitted when it can be
   seen that the condition codes already had the desired values.

   In some cases it is sufficient if the inherited condition codes
   have related values, but this may require the following insn
   (the one that tests the condition codes) to be modified.

   The code for the function prologue and epilogue are generated
44 45
   directly in assembler by the target functions function_prologue and
   function_epilogue.  Those instructions never exist as rtl.  */
46 47

#include "config.h"
48
#include "system.h"
49 50
#include "coretypes.h"
#include "tm.h"
51 52 53

#include "tree.h"
#include "rtl.h"
54
#include "tm_p.h"
55 56 57 58 59 60 61 62 63
#include "regs.h"
#include "insn-config.h"
#include "insn-attr.h"
#include "recog.h"
#include "conditions.h"
#include "flags.h"
#include "real.h"
#include "hard-reg-set.h"
#include "output.h"
Mike Stump committed
64
#include "except.h"
65
#include "function.h"
Robert Lipe committed
66
#include "toplev.h"
Kaveh R. Ghazi committed
67
#include "reload.h"
68
#include "intl.h"
69
#include "basic-block.h"
70
#include "target.h"
71
#include "debug.h"
72
#include "expr.h"
73
#include "cfglayout.h"
74 75 76 77
#include "tree-pass.h"
#include "timevar.h"
#include "cgraph.h"
#include "coverage.h"
78
#include "df.h"
79
#include "vecprim.h"
80
#include "ggc.h"
81 82
#include "cfgloop.h"
#include "params.h"
83

84 85 86 87 88
#ifdef XCOFF_DEBUGGING_INFO
#include "xcoffout.h"		/* Needed for external data
				   declarations for e.g. AIX 4.x.  */
#endif

89 90 91 92
#if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
#include "dwarf2out.h"
#endif

93 94 95 96
#ifdef DBX_DEBUGGING_INFO
#include "dbxout.h"
#endif

97 98 99 100
#ifdef SDB_DEBUGGING_INFO
#include "sdbout.h"
#endif

101 102 103 104 105 106 107 108 109 110 111 112 113
/* If we aren't using cc0, CC_STATUS_INIT shouldn't exist.  So define a
   null default for it to save conditionalization later.  */
#ifndef CC_STATUS_INIT
#define CC_STATUS_INIT
#endif

/* How to start an assembler comment.  */
#ifndef ASM_COMMENT_START
#define ASM_COMMENT_START ";#"
#endif

/* Is the given character a logical line separator for the assembler?  */
#ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
114
#define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
115 116
#endif

117 118 119 120
#ifndef JUMP_TABLES_IN_TEXT_SECTION
#define JUMP_TABLES_IN_TEXT_SECTION 0
#endif

121 122 123 124 125
/* Bitflags used by final_scan_insn.  */
#define SEEN_BB		1
#define SEEN_NOTE	2
#define SEEN_EMITTED	4

126
/* Last insn processed by final_scan_insn.  */
127 128
static rtx debug_insn;
rtx current_output_insn;
129 130 131 132

/* Line number of last NOTE.  */
static int last_linenum;

133 134 135 136 137 138
/* Highest line number in current block.  */
static int high_block_linenum;

/* Likewise for function.  */
static int high_function_linenum;

139
/* Filename of last NOTE.  */
140
static const char *last_filename;
141

142 143 144 145
/* Override filename and line number.  */
static const char *override_filename;
static int override_linenum;

146 147
/* Whether to force emission of a line note before the next insn.  */
static bool force_source_line = false;
148

149
extern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
150

151
/* Nonzero while outputting an `asm' with operands.
152
   This means that inconsistencies are the user's fault, so don't die.
153
   The precise value is the insn being output, to pass to error_for_asm.  */
154
rtx this_is_asm_operands;
155 156

/* Number of operands of this insn, for an `asm' with operands.  */
157
static unsigned int insn_noperands;
158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181

/* Compare optimization flag.  */

static rtx last_ignored_compare = 0;

/* Assign a unique number to each insn that is output.
   This can be used to generate unique local labels.  */

static int insn_counter = 0;

#ifdef HAVE_cc0
/* This variable contains machine-dependent flags (defined in tm.h)
   set and examined by output routines
   that describe how to interpret the condition codes properly.  */

CC_STATUS cc_status;

/* During output of an insn, this contains a copy of cc_status
   from before the insn.  */

CC_STATUS cc_prev_status;
#endif

/* Nonzero means current function must be given a frame pointer.
182 183
   Initialized in function.c to 0.  Set only in reload1.c as per
   the needs of the function.  */
184 185 186

int frame_pointer_needed;

187
/* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205

static int block_depth;

/* Nonzero if have enabled APP processing of our assembler output.  */

static int app_on;

/* If we are outputting an insn sequence, this contains the sequence rtx.
   Zero otherwise.  */

rtx final_sequence;

#ifdef ASSEMBLER_DIALECT

/* Number of the assembler dialect to use, starting at 0.  */
static int dialect_number;
#endif

206 207 208 209 210
#ifdef HAVE_conditional_execution
/* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
rtx current_insn_predicate;
#endif

211
#ifdef HAVE_ATTR_length
212 213 214 215 216
static int asm_insn_count (rtx);
#endif
static void profile_function (FILE *);
static void profile_after_prologue (FILE *);
static bool notice_source_line (rtx);
217
static rtx walk_alter_subreg (rtx *, bool *);
218 219 220 221 222
static void output_asm_name (void);
static void output_alternate_entry_point (FILE *, rtx);
static tree get_mem_expr_from_op (rtx, int *);
static void output_asm_operand_names (rtx *, int *, int);
static void output_operand (rtx, int);
223
#ifdef LEAF_REGISTERS
224
static void leaf_renumber_regs (rtx);
225 226
#endif
#ifdef HAVE_cc0
227
static int alter_cond (rtx);
228
#endif
229
#ifndef ADDR_VEC_ALIGN
230
static int final_addr_vec_align (rtx);
231
#endif
232
#ifdef HAVE_ATTR_length
233
static int align_fuzz (rtx, rtx, int, unsigned);
234
#endif
235 236 237 238

/* Initialize data in final at the beginning of a compilation.  */

void
239
init_final (const char *filename ATTRIBUTE_UNUSED)
240 241 242 243 244 245 246 247 248
{
  app_on = 0;
  final_sequence = 0;

#ifdef ASSEMBLER_DIALECT
  dialect_number = ASSEMBLER_DIALECT;
#endif
}

249
/* Default target function prologue and epilogue assembler output.
250

251 252 253
   If not overridden for epilogue code, then the function body itself
   contains return instructions wherever needed.  */
void
254 255
default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
			       HOST_WIDE_INT size ATTRIBUTE_UNUSED)
256 257 258
{
}

259 260
/* Default target hook that outputs nothing to a stream.  */
void
261
no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
262 263 264
{
}

265 266 267 268
/* Enable APP processing of subsequent output.
   Used before the output from an `asm' statement.  */

void
269
app_enable (void)
270 271 272
{
  if (! app_on)
    {
Kaveh R. Ghazi committed
273
      fputs (ASM_APP_ON, asm_out_file);
274 275 276 277 278 279 280 281
      app_on = 1;
    }
}

/* Disable APP processing of subsequent output.
   Called from varasm.c before most kinds of output.  */

void
282
app_disable (void)
283 284 285
{
  if (app_on)
    {
Kaveh R. Ghazi committed
286
      fputs (ASM_APP_OFF, asm_out_file);
287 288 289 290
      app_on = 0;
    }
}

Kazu Hirata committed
291
/* Return the number of slots filled in the current
292 293 294 295 296
   delayed branch sequence (we don't count the insn needing the
   delay slot).   Zero if not in a delayed branch sequence.  */

#ifdef DELAY_SLOTS
int
297
dbr_sequence_length (void)
298 299 300 301 302 303 304 305 306 307 308 309 310 311
{
  if (final_sequence != 0)
    return XVECLEN (final_sequence, 0) - 1;
  else
    return 0;
}
#endif

/* The next two pages contain routines used to compute the length of an insn
   and to shorten branches.  */

/* Arrays for insn lengths, and addresses.  The latter is referenced by
   `insn_current_length'.  */

312
static int *insn_lengths;
313

314
VEC(int,heap) *insn_addresses_;
315

316 317 318
/* Max uid for which the above arrays are valid.  */
static int insn_lengths_max_uid;

319 320 321
/* Address of insn being processed.  Used by `insn_current_length'.  */
int insn_current_address;

322 323 324
/* Address of insn being processed in previous iteration.  */
int insn_last_address;

325
/* known invariant alignment of insn being processed.  */
326 327
int insn_current_align;

328 329 330 331 332 333 334 335 336
/* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
   gives the next following alignment insn that increases the known
   alignment, or NULL_RTX if there is no such insn.
   For any alignment obtained this way, we can again index uid_align with
   its uid to obtain the next following align that in turn increases the
   alignment, till we reach NULL_RTX; the sequence obtained this way
   for each insn we'll call the alignment chain of this insn in the following
   comments.  */

Kazu Hirata committed
337 338
struct label_alignment
{
339 340 341 342 343 344 345
  short alignment;
  short max_skip;
};

static rtx *uid_align;
static int *uid_shuid;
static struct label_alignment *label_align;
346

347 348 349
/* Indicate that branch shortening hasn't yet been done.  */

void
350
init_insn_lengths (void)
351
{
352 353 354 355 356 357 358 359 360
  if (uid_shuid)
    {
      free (uid_shuid);
      uid_shuid = 0;
    }
  if (insn_lengths)
    {
      free (insn_lengths);
      insn_lengths = 0;
361
      insn_lengths_max_uid = 0;
362
    }
363 364 365
#ifdef HAVE_ATTR_length
  INSN_ADDRESSES_FREE ();
#endif
366 367 368 369 370
  if (uid_align)
    {
      free (uid_align);
      uid_align = 0;
    }
371 372 373
}

/* Obtain the current length of an insn.  If branch shortening has been done,
374
   get its actual length.  Otherwise, use FALLBACK_FN to calculate the
375 376 377 378
   length.  */
static inline int
get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
		   int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
379 380 381 382 383 384
{
#ifdef HAVE_ATTR_length
  rtx body;
  int i;
  int length = 0;

385
  if (insn_lengths_max_uid > INSN_UID (insn))
386 387 388 389 390 391 392 393 394 395
    return insn_lengths[INSN_UID (insn)];
  else
    switch (GET_CODE (insn))
      {
      case NOTE:
      case BARRIER:
      case CODE_LABEL:
	return 0;

      case CALL_INSN:
396
	length = fallback_fn (insn);
397 398 399 400
	break;

      case JUMP_INSN:
	body = PATTERN (insn);
Kazu Hirata committed
401
	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
402
	  {
403 404
	    /* Alignment is machine-dependent and should be handled by
	       ADDR_VEC_ALIGN.  */
405 406
	  }
	else
407
	  length = fallback_fn (insn);
408 409 410 411 412 413 414 415
	break;

      case INSN:
	body = PATTERN (insn);
	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
	  return 0;

	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
416
	  length = asm_insn_count (body) * fallback_fn (insn);
417 418 419 420
	else if (GET_CODE (body) == SEQUENCE)
	  for (i = 0; i < XVECLEN (body, 0); i++)
	    length += get_attr_length (XVECEXP (body, 0, i));
	else
421
	  length = fallback_fn (insn);
422 423 424 425
	break;

      default:
	break;
426 427 428 429 430 431 432 433
      }

#ifdef ADJUST_INSN_LENGTH
  ADJUST_INSN_LENGTH (insn, length);
#endif
  return length;
#else /* not HAVE_ATTR_length */
  return 0;
434 435
#define insn_default_length 0
#define insn_min_length 0
436 437
#endif /* not HAVE_ATTR_length */
}
438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453

/* Obtain the current length of an insn.  If branch shortening has been done,
   get its actual length.  Otherwise, get its maximum length.  */
int
get_attr_length (rtx insn)
{
  return get_attr_length_1 (insn, insn_default_length);
}

/* Obtain the current length of an insn.  If branch shortening has been done,
   get its actual length.  Otherwise, get its minimum length.  */
int
get_attr_min_length (rtx insn)
{
  return get_attr_length_1 (insn, insn_min_length);
}
454

455 456 457 458 459 460 461
/* Code to handle alignment inside shorten_branches.  */

/* Here is an explanation how the algorithm in align_fuzz can give
   proper results:

   Call a sequence of instructions beginning with alignment point X
   and continuing until the next alignment point `block X'.  When `X'
Kazu Hirata committed
462
   is used in an expression, it means the alignment value of the
463
   alignment point.
Kazu Hirata committed
464

465 466 467
   Call the distance between the start of the first insn of block X, and
   the end of the last insn of block X `IX', for the `inner size of X'.
   This is clearly the sum of the instruction lengths.
Kazu Hirata committed
468

469 470
   Likewise with the next alignment-delimited block following X, which we
   shall call block Y.
Kazu Hirata committed
471

472 473
   Call the distance between the start of the first insn of block X, and
   the start of the first insn of block Y `OX', for the `outer size of X'.
Kazu Hirata committed
474

475
   The estimated padding is then OX - IX.
Kazu Hirata committed
476

477
   OX can be safely estimated as
Kazu Hirata committed
478

479 480 481 482
           if (X >= Y)
                   OX = round_up(IX, Y)
           else
                   OX = round_up(IX, X) + Y - X
Kazu Hirata committed
483

484 485
   Clearly est(IX) >= real(IX), because that only depends on the
   instruction lengths, and those being overestimated is a given.
Kazu Hirata committed
486

487 488
   Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
   we needn't worry about that when thinking about OX.
Kazu Hirata committed
489

490 491 492 493 494 495 496
   When X >= Y, the alignment provided by Y adds no uncertainty factor
   for branch ranges starting before X, so we can just round what we have.
   But when X < Y, we don't know anything about the, so to speak,
   `middle bits', so we have to assume the worst when aligning up from an
   address mod X to one mod Y, which is Y - X.  */

#ifndef LABEL_ALIGN
497
#define LABEL_ALIGN(LABEL) align_labels_log
498 499
#endif

500
#ifndef LABEL_ALIGN_MAX_SKIP
501
#define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
502 503
#endif

504
#ifndef LOOP_ALIGN
505
#define LOOP_ALIGN(LABEL) align_loops_log
506 507
#endif

508
#ifndef LOOP_ALIGN_MAX_SKIP
509
#define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
510 511
#endif

512
#ifndef LABEL_ALIGN_AFTER_BARRIER
513
#define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
514 515
#endif

516
#ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
517 518 519 520 521 522 523 524
#define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
#endif

#ifndef JUMP_ALIGN
#define JUMP_ALIGN(LABEL) align_jumps_log
#endif

#ifndef JUMP_ALIGN_MAX_SKIP
525
#define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
526 527
#endif

528
#ifndef ADDR_VEC_ALIGN
529
static int
530
final_addr_vec_align (rtx addr_vec)
531
{
532
  int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
533 534 535

  if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
    align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
536
  return exact_log2 (align);
537 538

}
Kazu Hirata committed
539

540 541 542 543 544 545 546 547 548
#define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
#endif

#ifndef INSN_LENGTH_ALIGNMENT
#define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
#endif

#define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])

549
static int min_labelno, max_labelno;
550 551

#define LABEL_TO_ALIGNMENT(LABEL) \
552 553 554 555
  (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)

#define LABEL_TO_MAX_SKIP(LABEL) \
  (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
556 557

/* For the benefit of port specific code do this also as a function.  */
Kazu Hirata committed
558

559
int
560
label_to_alignment (rtx label)
561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582
{
  return LABEL_TO_ALIGNMENT (label);
}

#ifdef HAVE_ATTR_length
/* The differences in addresses
   between a branch and its target might grow or shrink depending on
   the alignment the start insn of the range (the branch for a forward
   branch or the label for a backward branch) starts out on; if these
   differences are used naively, they can even oscillate infinitely.
   We therefore want to compute a 'worst case' address difference that
   is independent of the alignment the start insn of the range end
   up on, and that is at least as large as the actual difference.
   The function align_fuzz calculates the amount we have to add to the
   naively computed difference, by traversing the part of the alignment
   chain of the start insn of the range that is in front of the end insn
   of the range, and considering for each alignment the maximum amount
   that it might contribute to a size increase.

   For casesi tables, we also want to know worst case minimum amounts of
   address difference, in case a machine description wants to introduce
   some common offset that is added to all offsets in a table.
583
   For this purpose, align_fuzz with a growth argument of 0 computes the
584 585 586 587 588 589 590 591 592
   appropriate adjustment.  */

/* Compute the maximum delta by which the difference of the addresses of
   START and END might grow / shrink due to a different address for start
   which changes the size of alignment insns between START and END.
   KNOWN_ALIGN_LOG is the alignment known for START.
   GROWTH should be ~0 if the objective is to compute potential code size
   increase, and 0 if the objective is to compute potential shrink.
   The return value is undefined for any other value of GROWTH.  */
Kazu Hirata committed
593

594
static int
595
align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
596 597 598 599 600 601 602 603 604 605 606 607
{
  int uid = INSN_UID (start);
  rtx align_label;
  int known_align = 1 << known_align_log;
  int end_shuid = INSN_SHUID (end);
  int fuzz = 0;

  for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
    {
      int align_addr, new_align;

      uid = INSN_UID (align_label);
608
      align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631
      if (uid_shuid[uid] > end_shuid)
	break;
      known_align_log = LABEL_TO_ALIGNMENT (align_label);
      new_align = 1 << known_align_log;
      if (new_align < known_align)
	continue;
      fuzz += (-align_addr ^ growth) & (new_align - known_align);
      known_align = new_align;
    }
  return fuzz;
}

/* Compute a worst-case reference address of a branch so that it
   can be safely used in the presence of aligned labels.  Since the
   size of the branch itself is unknown, the size of the branch is
   not included in the range.  I.e. for a forward branch, the reference
   address is the end address of the branch as known from the previous
   branch shortening pass, minus a value to account for possible size
   increase due to alignment.  For a backward branch, it is the start
   address of the branch as known from the current pass, plus a value
   to account for possible size increase due to alignment.
   NB.: Therefore, the maximum offset allowed for backward branches needs
   to exclude the branch size.  */
Kazu Hirata committed
632

633
int
634
insn_current_reference_address (rtx branch)
635
{
636 637 638 639 640 641 642 643
  rtx dest, seq;
  int seq_uid;

  if (! INSN_ADDRESSES_SET_P ())
    return 0;

  seq = NEXT_INSN (PREV_INSN (branch));
  seq_uid = INSN_UID (seq);
644
  if (!JUMP_P (branch))
645 646 647 648 649 650 651
    /* This can happen for example on the PA; the objective is to know the
       offset to address something in front of the start of the function.
       Thus, we can treat it like a backward branch.
       We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
       any alignment we'd encounter, so we skip the call to align_fuzz.  */
    return insn_current_address;
  dest = JUMP_LABEL (branch);
652

653
  /* BRANCH has no proper alignment chain set, so use SEQ.
654 655
     BRANCH also has no INSN_SHUID.  */
  if (INSN_SHUID (seq) < INSN_SHUID (dest))
656
    {
Kazu Hirata committed
657
      /* Forward branch.  */
658
      return (insn_last_address + insn_lengths[seq_uid]
659
	      - align_fuzz (seq, dest, length_unit_log, ~0));
660 661 662
    }
  else
    {
Kazu Hirata committed
663
      /* Backward branch.  */
664
      return (insn_current_address
Joern Rennecke committed
665
	      + align_fuzz (dest, seq, length_unit_log, ~0));
666 667 668 669
    }
}
#endif /* HAVE_ATTR_length */

670 671 672
/* Compute branch alignments based on frequency information in the
   CFG.  */

673
static unsigned int
674
compute_alignments (void)
675 676
{
  int log, max_skip, max_log;
677
  basic_block bb;
678 679
  int freq_max = 0;
  int freq_threshold = 0;
680 681 682 683 684 685 686 687 688

  if (label_align)
    {
      free (label_align);
      label_align = 0;
    }

  max_labelno = max_label_num ();
  min_labelno = get_first_label_num ();
689
  label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
690 691

  /* If not optimizing or optimizing for size, don't assign any alignments.  */
692
  if (! optimize || optimize_size)
693
    return 0;
694

695 696 697 698 699 700 701 702 703 704 705 706 707
  if (dump_file)
    {
      dump_flow_info (dump_file, TDF_DETAILS);
      flow_loops_dump (dump_file, NULL, 1);
      loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
    }
  FOR_EACH_BB (bb)
    if (bb->frequency > freq_max)
      freq_max = bb->frequency;
  freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);

  if (dump_file)
    fprintf(dump_file, "freq_max: %i\n",freq_max);
708
  FOR_EACH_BB (bb)
709
    {
710
      rtx label = BB_HEAD (bb);
711 712
      int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
      edge e;
713
      edge_iterator ei;
714

715
      if (!LABEL_P (label)
716
	  || probably_never_executed_bb_p (bb))
717 718 719 720 721 722
	{
	  if (dump_file)
	    fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
		    bb->index, bb->frequency, bb->loop_father->num, bb->loop_depth);
	  continue;
	}
723 724 725
      max_log = LABEL_ALIGN (label);
      max_skip = LABEL_ALIGN_MAX_SKIP;

726
      FOR_EACH_EDGE (e, ei, bb->preds)
727 728 729 730 731 732
	{
	  if (e->flags & EDGE_FALLTHRU)
	    has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
	  else
	    branch_frequency += EDGE_FREQUENCY (e);
	}
733 734 735 736 737 738 739 740 741 742 743 744
      if (dump_file)
	{
	  fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
		  bb->index, bb->frequency, bb->loop_father->num,
		  bb->loop_depth,
		  fallthru_frequency, branch_frequency);
	  if (!bb->loop_father->inner && bb->loop_father->num)
	    fprintf (dump_file, " inner_loop");
	  if (bb->loop_father->header == bb)
	    fprintf (dump_file, " loop_header");
	  fprintf (dump_file, "\n");
	}
745

746
      /* There are two purposes to align block with no fallthru incoming edge:
747
	 1) to avoid fetch stalls when branch destination is near cache boundary
748
	 2) to improve cache efficiency in case the previous block is not executed
749 750 751 752
	    (so it does not need to be in the cache).

	 We to catch first case, we align frequently executed blocks.
	 To catch the second, we align blocks that are executed more frequently
753
	 than the predecessor and the predecessor is likely to not be executed
754 755 756
	 when function is called.  */

      if (!has_fallthru
757
	  && (branch_frequency > freq_threshold
758 759
	      || (bb->frequency > bb->prev_bb->frequency * 10
		  && (bb->prev_bb->frequency
760 761 762
		      <= ENTRY_BLOCK_PTR->frequency / 2))))
	{
	  log = JUMP_ALIGN (label);
763 764
	  if (dump_file)
	    fprintf(dump_file, "  jump alignment added.\n");
765 766 767 768 769 770 771
	  if (max_log < log)
	    {
	      max_log = log;
	      max_skip = JUMP_ALIGN_MAX_SKIP;
	    }
	}
      /* In case block is frequent and reached mostly by non-fallthru edge,
772
	 align it.  It is most likely a first block of loop.  */
773
      if (has_fallthru
774
	  && maybe_hot_bb_p (bb)
775 776 777
	  && branch_frequency + fallthru_frequency > freq_threshold
	  && (branch_frequency
	      > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
778 779
	{
	  log = LOOP_ALIGN (label);
780 781
	  if (dump_file)
	    fprintf(dump_file, "  internal loop alignment added.\n");
782 783 784 785 786 787 788 789 790
	  if (max_log < log)
	    {
	      max_log = log;
	      max_skip = LOOP_ALIGN_MAX_SKIP;
	    }
	}
      LABEL_TO_ALIGNMENT (label) = max_log;
      LABEL_TO_MAX_SKIP (label) = max_skip;
    }
791 792 793

  if (dump_file)
    loop_optimizer_finalize ();
794
  return 0;
795
}
796

797
struct rtl_opt_pass pass_compute_alignments =
798
{
799 800
 {
  RTL_PASS,
801
  "alignments",                         /* name */
802 803 804 805 806 807 808 809 810 811
  NULL,                                 /* gate */
  compute_alignments,                   /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  0,                                    /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
812
  TODO_dump_func | TODO_verify_rtl_sharing
813 814
  | TODO_ggc_collect                    /* todo_flags_finish */
 }
815 816
};

817

818 819 820
/* Make a pass over all insns and compute their actual lengths by shortening
   any branches of variable length if possible.  */

821 822 823 824
/* shorten_branches might be called multiple times:  for example, the SH
   port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
   In order to do this, it needs proper length information, which it obtains
   by calling shorten_branches.  This cannot be collapsed with
825
   shorten_branches itself into a single pass unless we also want to integrate
826 827 828
   reorg.c, since the branch splitting exposes new instructions with delay
   slots.  */

829
void
830
shorten_branches (rtx first ATTRIBUTE_UNUSED)
831 832
{
  rtx insn;
833 834 835
  int max_uid;
  int i;
  int max_log;
836
  int max_skip;
837 838 839
#ifdef HAVE_ATTR_length
#define MAX_CODE_ALIGN 16
  rtx seq;
840 841 842 843
  int something_changed = 1;
  char *varying_length;
  rtx body;
  int uid;
844
  rtx align_tab[MAX_CODE_ALIGN];
845

846
#endif
847

848 849
  /* Compute maximum UID and allocate label_align / uid_shuid.  */
  max_uid = get_max_uid ();
850

851
  /* Free uid_shuid before reallocating it.  */
852
  free (uid_shuid);
853

854
  uid_shuid = XNEWVEC (int, max_uid);
855

856 857 858 859 860 861 862 863 864 865 866
  if (max_labelno != max_label_num ())
    {
      int old = max_labelno;
      int n_labels;
      int n_old_labels;

      max_labelno = max_label_num ();

      n_labels = max_labelno - min_labelno + 1;
      n_old_labels = old - min_labelno + 1;

867 868
      label_align = xrealloc (label_align,
			      n_labels * sizeof (struct label_alignment));
869

870
      /* Range of labels grows monotonically in the function.  Failing here
871
         means that the initialization of array got lost.  */
872
      gcc_assert (n_old_labels <= n_labels);
873 874 875 876 877

      memset (label_align + n_old_labels, 0,
	      (n_labels - n_old_labels) * sizeof (struct label_alignment));
    }

878 879
  /* Initialize label_align and set up uid_shuid to be strictly
     monotonically rising with insn order.  */
880 881 882
  /* We use max_log here to keep track of the maximum alignment we want to
     impose on the next CODE_LABEL (or the current one if we are processing
     the CODE_LABEL itself).  */
Kazu Hirata committed
883

884 885 886 887
  max_log = 0;
  max_skip = 0;

  for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
888 889 890 891
    {
      int log;

      INSN_SHUID (insn) = i++;
892
      if (INSN_P (insn))
893
	continue;
894

895
      if (LABEL_P (insn))
896 897
	{
	  rtx next;
898

899 900 901 902 903 904 905
	  /* Merge in alignments computed by compute_alignments.  */
	  log = LABEL_TO_ALIGNMENT (insn);
	  if (max_log < log)
	    {
	      max_log = log;
	      max_skip = LABEL_TO_MAX_SKIP (insn);
	    }
906 907 908

	  log = LABEL_ALIGN (insn);
	  if (max_log < log)
909 910 911 912
	    {
	      max_log = log;
	      max_skip = LABEL_ALIGN_MAX_SKIP;
	    }
913
	  next = next_nonnote_insn (insn);
914 915
	  /* ADDR_VECs only take room if read-only data goes into the text
	     section.  */
916 917
	  if (JUMP_TABLES_IN_TEXT_SECTION
	      || readonly_data_section == text_section)
918
	    if (next && JUMP_P (next))
919 920 921 922 923 924 925 926 927 928 929 930 931
	      {
		rtx nextbody = PATTERN (next);
		if (GET_CODE (nextbody) == ADDR_VEC
		    || GET_CODE (nextbody) == ADDR_DIFF_VEC)
		  {
		    log = ADDR_VEC_ALIGN (next);
		    if (max_log < log)
		      {
			max_log = log;
			max_skip = LABEL_ALIGN_MAX_SKIP;
		      }
		  }
	      }
932
	  LABEL_TO_ALIGNMENT (insn) = max_log;
933
	  LABEL_TO_MAX_SKIP (insn) = max_skip;
934
	  max_log = 0;
935
	  max_skip = 0;
936
	}
937
      else if (BARRIER_P (insn))
938 939 940
	{
	  rtx label;

941
	  for (label = insn; label && ! INSN_P (label);
942
	       label = NEXT_INSN (label))
943
	    if (LABEL_P (label))
944 945 946
	      {
		log = LABEL_ALIGN_AFTER_BARRIER (insn);
		if (max_log < log)
947 948 949 950
		  {
		    max_log = log;
		    max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
		  }
951 952 953 954 955 956 957
		break;
	      }
	}
    }
#ifdef HAVE_ATTR_length

  /* Allocate the rest of the arrays.  */
958
  insn_lengths = XNEWVEC (int, max_uid);
959
  insn_lengths_max_uid = max_uid;
960 961
  /* Syntax errors can lead to labels being outside of the main insn stream.
     Initialize insn_addresses, so that we get reproducible results.  */
962
  INSN_ADDRESSES_ALLOC (max_uid);
963

964
  varying_length = XCNEWVEC (char, max_uid);
965 966 967 968 969 970

  /* Initialize uid_align.  We scan instructions
     from end to start, and keep in align_tab[n] the last seen insn
     that does an alignment of at least n+1, i.e. the successor
     in the alignment chain for an insn that does / has a known
     alignment of n.  */
971
  uid_align = XCNEWVEC (rtx, max_uid);
972

Kazu Hirata committed
973
  for (i = MAX_CODE_ALIGN; --i >= 0;)
974 975
    align_tab[i] = NULL_RTX;
  seq = get_last_insn ();
976
  for (; seq; seq = PREV_INSN (seq))
977 978 979
    {
      int uid = INSN_UID (seq);
      int log;
980
      log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
981 982 983 984 985 986 987 988
      uid_align[uid] = align_tab[0];
      if (log)
	{
	  /* Found an alignment label.  */
	  uid_align[uid] = align_tab[log];
	  for (i = log - 1; i >= 0; i--)
	    align_tab[i] = seq;
	}
989 990 991 992 993 994 995 996 997 998 999 1000
    }
#ifdef CASE_VECTOR_SHORTEN_MODE
  if (optimize)
    {
      /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
         label fields.  */

      int min_shuid = INSN_SHUID (get_insns ()) - 1;
      int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
      int rel;

      for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1001
	{
1002 1003 1004 1005 1006
	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
	  int len, i, min, max, insn_shuid;
	  int min_align;
	  addr_diff_vec_flags flags;

1007
	  if (!JUMP_P (insn)
1008 1009 1010 1011
	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
	    continue;
	  pat = PATTERN (insn);
	  len = XVECLEN (pat, 1);
1012
	  gcc_assert (len > 0);
1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030
	  min_align = MAX_CODE_ALIGN;
	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
	    {
	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
	      int shuid = INSN_SHUID (lab);
	      if (shuid < min)
		{
		  min = shuid;
		  min_lab = lab;
		}
	      if (shuid > max)
		{
		  max = shuid;
		  max_lab = lab;
		}
	      if (min_align > LABEL_TO_ALIGNMENT (lab))
		min_align = LABEL_TO_ALIGNMENT (lab);
	    }
1031 1032
	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1033 1034
	  insn_shuid = INSN_SHUID (insn);
	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1035
	  memset (&flags, 0, sizeof (flags));
1036 1037 1038 1039 1040 1041 1042
	  flags.min_align = min_align;
	  flags.base_after_vec = rel > insn_shuid;
	  flags.min_after_vec  = min > insn_shuid;
	  flags.max_after_vec  = max > insn_shuid;
	  flags.min_after_base = min > rel;
	  flags.max_after_base = max > rel;
	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
1043 1044
	}
    }
1045
#endif /* CASE_VECTOR_SHORTEN_MODE */
1046 1047

  /* Compute initial lengths, addresses, and varying flags for each insn.  */
1048
  for (insn_current_address = 0, insn = first;
1049 1050 1051 1052
       insn != 0;
       insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
    {
      uid = INSN_UID (insn);
1053

1054
      insn_lengths[uid] = 0;
1055

1056
      if (LABEL_P (insn))
1057 1058 1059 1060 1061
	{
	  int log = LABEL_TO_ALIGNMENT (insn);
	  if (log)
	    {
	      int align = 1 << log;
Kaveh R. Ghazi committed
1062
	      int new_address = (insn_current_address + align - 1) & -align;
1063 1064 1065 1066
	      insn_lengths[uid] = new_address - insn_current_address;
	    }
	}

1067
      INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
Kazu Hirata committed
1068

1069 1070
      if (NOTE_P (insn) || BARRIER_P (insn)
	  || LABEL_P (insn))
1071
	continue;
1072 1073
      if (INSN_DELETED_P (insn))
	continue;
1074 1075 1076

      body = PATTERN (insn);
      if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1077 1078 1079
	{
	  /* This only takes room if read-only data goes into the text
	     section.  */
1080 1081
	  if (JUMP_TABLES_IN_TEXT_SECTION
	      || readonly_data_section == text_section)
1082 1083 1084
	    insn_lengths[uid] = (XVECLEN (body,
					  GET_CODE (body) == ADDR_DIFF_VEC)
				 * GET_MODE_SIZE (GET_MODE (body)));
1085 1086
	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
	}
1087
      else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099
	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
      else if (GET_CODE (body) == SEQUENCE)
	{
	  int i;
	  int const_delay_slots;
#ifdef DELAY_SLOTS
	  const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
#else
	  const_delay_slots = 0;
#endif
	  /* Inside a delay slot sequence, we do not do any branch shortening
	     if the shortening could change the number of delay slots
Mike Stump committed
1100
	     of the branch.  */
1101 1102 1103 1104 1105 1106
	  for (i = 0; i < XVECLEN (body, 0); i++)
	    {
	      rtx inner_insn = XVECEXP (body, 0, i);
	      int inner_uid = INSN_UID (inner_insn);
	      int inner_length;

1107 1108
	      if (GET_CODE (body) == ASM_INPUT
		  || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1109 1110 1111 1112
		inner_length = (asm_insn_count (PATTERN (inner_insn))
				* insn_default_length (inner_insn));
	      else
		inner_length = insn_default_length (inner_insn);
Kazu Hirata committed
1113

1114 1115 1116 1117 1118 1119
	      insn_lengths[inner_uid] = inner_length;
	      if (const_delay_slots)
		{
		  if ((varying_length[inner_uid]
		       = insn_variable_length_p (inner_insn)) != 0)
		    varying_length[uid] = 1;
1120 1121
		  INSN_ADDRESSES (inner_uid) = (insn_current_address
						+ insn_lengths[uid]);
1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136
		}
	      else
		varying_length[inner_uid] = 0;
	      insn_lengths[uid] += inner_length;
	    }
	}
      else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
	{
	  insn_lengths[uid] = insn_default_length (insn);
	  varying_length[uid] = insn_variable_length_p (insn);
	}

      /* If needed, do any adjustment.  */
#ifdef ADJUST_INSN_LENGTH
      ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1137
      if (insn_lengths[uid] < 0)
1138
	fatal_insn ("negative insn length", insn);
1139 1140 1141 1142 1143 1144 1145 1146 1147 1148
#endif
    }

  /* Now loop over all the insns finding varying length insns.  For each,
     get the current insn length.  If it has changed, reflect the change.
     When nothing changes for a full pass, we are done.  */

  while (something_changed)
    {
      something_changed = 0;
1149
      insn_current_align = MAX_CODE_ALIGN - 1;
1150
      for (insn_current_address = 0, insn = first;
1151 1152 1153 1154
	   insn != 0;
	   insn = NEXT_INSN (insn))
	{
	  int new_length;
1155
#ifdef ADJUST_INSN_LENGTH
1156
	  int tmp_length;
1157
#endif
1158
	  int length_align;
1159 1160

	  uid = INSN_UID (insn);
1161

1162
	  if (LABEL_P (insn))
1163 1164 1165 1166 1167
	    {
	      int log = LABEL_TO_ALIGNMENT (insn);
	      if (log > insn_current_align)
		{
		  int align = 1 << log;
Kaveh R. Ghazi committed
1168
		  int new_address= (insn_current_address + align - 1) & -align;
1169 1170 1171 1172 1173 1174
		  insn_lengths[uid] = new_address - insn_current_address;
		  insn_current_align = log;
		  insn_current_address = new_address;
		}
	      else
		insn_lengths[uid] = 0;
1175
	      INSN_ADDRESSES (uid) = insn_current_address;
1176 1177 1178 1179 1180 1181 1182
	      continue;
	    }

	  length_align = INSN_LENGTH_ALIGNMENT (insn);
	  if (length_align < insn_current_align)
	    insn_current_align = length_align;

1183 1184
	  insn_last_address = INSN_ADDRESSES (uid);
	  INSN_ADDRESSES (uid) = insn_current_address;
1185

1186
#ifdef CASE_VECTOR_SHORTEN_MODE
1187
	  if (optimize && JUMP_P (insn)
1188 1189 1190 1191 1192 1193 1194
	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
	    {
	      rtx body = PATTERN (insn);
	      int old_length = insn_lengths[uid];
	      rtx rel_lab = XEXP (XEXP (body, 0), 0);
	      rtx min_lab = XEXP (XEXP (body, 2), 0);
	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1195 1196 1197
	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1198 1199
	      rtx prev;
	      int rel_align = 0;
1200 1201 1202 1203
	      addr_diff_vec_flags flags;

	      /* Avoid automatic aggregate initialization.  */
	      flags = ADDR_DIFF_VEC_FLAGS (body);
1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281

	      /* Try to find a known alignment for rel_lab.  */
	      for (prev = rel_lab;
		   prev
		   && ! insn_lengths[INSN_UID (prev)]
		   && ! (varying_length[INSN_UID (prev)] & 1);
		   prev = PREV_INSN (prev))
		if (varying_length[INSN_UID (prev)] & 2)
		  {
		    rel_align = LABEL_TO_ALIGNMENT (prev);
		    break;
		  }

	      /* See the comment on addr_diff_vec_flags in rtl.h for the
		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
	      /* Anything after INSN has still addresses from the last
		 pass; adjust these so that they reflect our current
		 estimate for this pass.  */
	      if (flags.base_after_vec)
		rel_addr += insn_current_address - insn_last_address;
	      if (flags.min_after_vec)
		min_addr += insn_current_address - insn_last_address;
	      if (flags.max_after_vec)
		max_addr += insn_current_address - insn_last_address;
	      /* We want to know the worst case, i.e. lowest possible value
		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
		 its offset is positive, and we have to be wary of code shrink;
		 otherwise, it is negative, and we have to be vary of code
		 size increase.  */
	      if (flags.min_after_base)
		{
		  /* If INSN is between REL_LAB and MIN_LAB, the size
		     changes we are about to make can change the alignment
		     within the observed offset, therefore we have to break
		     it up into two parts that are independent.  */
		  if (! flags.base_after_vec && flags.min_after_vec)
		    {
		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
		    }
		  else
		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
		}
	      else
		{
		  if (flags.base_after_vec && ! flags.min_after_vec)
		    {
		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
		    }
		  else
		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
		}
	      /* Likewise, determine the highest lowest possible value
		 for the offset of MAX_LAB.  */
	      if (flags.max_after_base)
		{
		  if (! flags.base_after_vec && flags.max_after_vec)
		    {
		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
		    }
		  else
		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
		}
	      else
		{
		  if (flags.base_after_vec && ! flags.max_after_vec)
		    {
		      max_addr += align_fuzz (max_lab, insn, 0, 0);
		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
		    }
		  else
		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
		}
	      PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
							max_addr - rel_addr,
							body));
1282 1283
	      if (JUMP_TABLES_IN_TEXT_SECTION
		  || readonly_data_section == text_section)
1284 1285 1286 1287 1288 1289 1290 1291
		{
		  insn_lengths[uid]
		    = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
		  insn_current_address += insn_lengths[uid];
		  if (insn_lengths[uid] != old_length)
		    something_changed = 1;
		}

1292 1293
	      continue;
	    }
1294 1295 1296
#endif /* CASE_VECTOR_SHORTEN_MODE */

	  if (! (varying_length[uid]))
1297
	    {
1298
	      if (NONJUMP_INSN_P (insn)
1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312
		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
		{
		  int i;

		  body = PATTERN (insn);
		  for (i = 0; i < XVECLEN (body, 0); i++)
		    {
		      rtx inner_insn = XVECEXP (body, 0, i);
		      int inner_uid = INSN_UID (inner_insn);

		      INSN_ADDRESSES (inner_uid) = insn_current_address;

		      insn_current_address += insn_lengths[inner_uid];
		    }
Kazu Hirata committed
1313
		}
1314 1315 1316
	      else
		insn_current_address += insn_lengths[uid];

1317 1318
	      continue;
	    }
1319

1320
	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1321 1322
	    {
	      int i;
Kazu Hirata committed
1323

1324 1325 1326 1327 1328 1329 1330 1331
	      body = PATTERN (insn);
	      new_length = 0;
	      for (i = 0; i < XVECLEN (body, 0); i++)
		{
		  rtx inner_insn = XVECEXP (body, 0, i);
		  int inner_uid = INSN_UID (inner_insn);
		  int inner_length;

1332
		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368

		  /* insn_current_length returns 0 for insns with a
		     non-varying length.  */
		  if (! varying_length[inner_uid])
		    inner_length = insn_lengths[inner_uid];
		  else
		    inner_length = insn_current_length (inner_insn);

		  if (inner_length != insn_lengths[inner_uid])
		    {
		      insn_lengths[inner_uid] = inner_length;
		      something_changed = 1;
		    }
		  insn_current_address += insn_lengths[inner_uid];
		  new_length += inner_length;
		}
	    }
	  else
	    {
	      new_length = insn_current_length (insn);
	      insn_current_address += new_length;
	    }

#ifdef ADJUST_INSN_LENGTH
	  /* If needed, do any adjustment.  */
	  tmp_length = new_length;
	  ADJUST_INSN_LENGTH (insn, new_length);
	  insn_current_address += (new_length - tmp_length);
#endif

	  if (new_length != insn_lengths[uid])
	    {
	      insn_lengths[uid] = new_length;
	      something_changed = 1;
	    }
	}
1369 1370 1371
      /* For a non-optimizing compile, do only a single pass.  */
      if (!optimize)
	break;
1372
    }
1373 1374 1375

  free (varying_length);

1376 1377 1378 1379 1380 1381 1382 1383 1384
#endif /* HAVE_ATTR_length */
}

#ifdef HAVE_ATTR_length
/* Given the body of an INSN known to be generated by an ASM statement, return
   the number of machine instructions likely to be generated for this insn.
   This is used to compute its length.  */

static int
1385
asm_insn_count (rtx body)
1386
{
1387
  const char *template;
1388 1389
  int count = 1;

1390 1391 1392
  if (GET_CODE (body) == ASM_INPUT)
    template = XSTR (body, 0);
  else
1393
    template = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1394

Kazu Hirata committed
1395
  for (; *template; template++)
1396 1397
    if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template, template)
	|| *template == '\n')
1398 1399 1400 1401 1402 1403
      count++;

  return count;
}
#endif

1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469
/* ??? This is probably the wrong place for these.  */
/* Structure recording the mapping from source file and directory
   names at compile time to those to be embedded in debug
   information.  */
typedef struct debug_prefix_map
{
  const char *old_prefix;
  const char *new_prefix;
  size_t old_len;
  size_t new_len;
  struct debug_prefix_map *next;
} debug_prefix_map;

/* Linked list of such structures.  */
debug_prefix_map *debug_prefix_maps;


/* Record a debug file prefix mapping.  ARG is the argument to
   -fdebug-prefix-map and must be of the form OLD=NEW.  */

void
add_debug_prefix_map (const char *arg)
{
  debug_prefix_map *map;
  const char *p;

  p = strchr (arg, '=');
  if (!p)
    {
      error ("invalid argument %qs to -fdebug-prefix-map", arg);
      return;
    }
  map = XNEW (debug_prefix_map);
  map->old_prefix = ggc_alloc_string (arg, p - arg);
  map->old_len = p - arg;
  p++;
  map->new_prefix = ggc_strdup (p);
  map->new_len = strlen (p);
  map->next = debug_prefix_maps;
  debug_prefix_maps = map;
}

/* Perform user-specified mapping of debug filename prefixes.  Return
   the new name corresponding to FILENAME.  */

const char *
remap_debug_filename (const char *filename)
{
  debug_prefix_map *map;
  char *s;
  const char *name;
  size_t name_len;

  for (map = debug_prefix_maps; map; map = map->next)
    if (strncmp (filename, map->old_prefix, map->old_len) == 0)
      break;
  if (!map)
    return filename;
  name = filename + map->old_len;
  name_len = strlen (name) + 1;
  s = (char *) alloca (name_len + map->new_len);
  memcpy (s, map->new_prefix, map->new_len);
  memcpy (s + map->new_len, name, name_len);
  return ggc_strdup (s);
}

1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480
/* Output assembler code for the start of a function,
   and initialize some of the variables in this file
   for the new function.  The label for the function and associated
   assembler pseudo-ops have already been output in `assemble_start_function'.

   FIRST is the first insn of the rtl for the function being compiled.
   FILE is the file to write assembler code to.
   OPTIMIZE is nonzero if we should eliminate redundant
     test and compare insns.  */

void
1481 1482
final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
		      int optimize ATTRIBUTE_UNUSED)
1483 1484 1485 1486 1487
{
  block_depth = 0;

  this_is_asm_operands = 0;

1488 1489 1490
  last_filename = locator_file (prologue_locator);
  last_linenum = locator_line (prologue_locator);

1491
  high_block_linenum = high_function_linenum = last_linenum;
1492

1493
  (*debug_hooks->begin_prologue) (last_linenum, last_filename);
Jason Merrill committed
1494

1495
#if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1496
  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1497
    dwarf2out_begin_prologue (0, NULL);
Kazu Hirata committed
1498
#endif
1499 1500

#ifdef LEAF_REG_REMAP
1501
  if (current_function_uses_only_leaf_regs)
1502 1503 1504 1505 1506 1507
    leaf_renumber_regs (first);
#endif

  /* The Sun386i and perhaps other machines don't work right
     if the profiling code comes after the prologue.  */
#ifdef PROFILE_BEFORE_PROLOGUE
1508
  if (crtl->profile)
1509 1510 1511
    profile_function (file);
#endif /* PROFILE_BEFORE_PROLOGUE */

Jason Merrill committed
1512 1513
#if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
  if (dwarf2out_do_frame ())
1514
    dwarf2out_frame_debug (NULL_RTX, false);
Jason Merrill committed
1515 1516
#endif

1517 1518 1519 1520
  /* If debugging, assign block numbers to all of the blocks in this
     function.  */
  if (write_symbols)
    {
1521
      reemit_insn_block_notes ();
1522
      number_blocks (current_function_decl);
1523 1524 1525 1526 1527 1528
      /* We never actually put out begin/end notes for the top-level
	 block in the function.  But, conceptually, that block is
	 always needed.  */
      TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
    }

1529 1530 1531 1532 1533 1534 1535 1536 1537
  if (warn_frame_larger_than
    && get_frame_size () > frame_larger_than_size)
  {
      /* Issue a warning */
      warning (OPT_Wframe_larger_than_,
               "the frame size of %wd bytes is larger than %wd bytes",
               get_frame_size (), frame_larger_than_size);
  }

1538
  /* First output the function prologue: code to set up the stack frame.  */
1539
  targetm.asm_out.function_prologue (file, get_frame_size ());
1540 1541 1542 1543 1544 1545 1546 1547 1548 1549

  /* If the machine represents the prologue as RTL, the profiling code must
     be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
#ifdef HAVE_prologue
  if (! HAVE_prologue)
#endif
    profile_after_prologue (file);
}

static void
1550
profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1551 1552
{
#ifndef PROFILE_BEFORE_PROLOGUE
1553
  if (crtl->profile)
1554 1555 1556 1557 1558
    profile_function (file);
#endif /* not PROFILE_BEFORE_PROLOGUE */
}

static void
1559
profile_function (FILE *file ATTRIBUTE_UNUSED)
1560
{
1561
#ifndef NO_PROFILE_COUNTERS
1562
# define NO_PROFILE_COUNTERS	0
1563
#endif
1564
#if defined(ASM_OUTPUT_REG_PUSH)
1565
  int sval = cfun->returns_struct;
DJ Delorie committed
1566
  rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1567
#if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1568
  int cxt = cfun->static_chain_decl != NULL;
1569 1570
#endif
#endif /* ASM_OUTPUT_REG_PUSH */
1571

1572 1573 1574
  if (! NO_PROFILE_COUNTERS)
    {
      int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1575
      switch_to_section (data_section);
1576
      ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1577
      targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1578 1579
      assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
    }
1580

1581
  switch_to_section (current_function_section ());
1582

DJ Delorie committed
1583
#if defined(ASM_OUTPUT_REG_PUSH)
1584
  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1585 1586 1587
    {
      ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
    }
1588 1589
#endif

1590
#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1591 1592 1593
  if (cxt)
    ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
#else
1594
#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1595
  if (cxt)
Kaveh R. Ghazi committed
1596 1597 1598
    {
      ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
    }
1599 1600 1601
#endif
#endif

1602
  FUNCTION_PROFILER (file, current_function_funcdef_no);
1603

1604
#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1605 1606 1607
  if (cxt)
    ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
#else
1608
#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1609
  if (cxt)
Kaveh R. Ghazi committed
1610 1611 1612
    {
      ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
    }
1613 1614 1615
#endif
#endif

DJ Delorie committed
1616
#if defined(ASM_OUTPUT_REG_PUSH)
1617
  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1618 1619 1620
    {
      ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
    }
1621 1622 1623 1624 1625 1626 1627 1628
#endif
}

/* Output assembler code for the end of a function.
   For clarity, args are same as those of `final_start_function'
   even though not all of them are needed.  */

void
1629
final_end_function (void)
1630
{
1631
  app_disable ();
1632

1633
  (*debug_hooks->end_function) (high_function_linenum);
1634 1635 1636

  /* Finally, output the function epilogue:
     code to restore the stack frame and return to the caller.  */
1637
  targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1638

1639
  /* And debug output.  */
1640
  (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1641

1642
#if defined (DWARF2_UNWIND_INFO)
1643 1644
  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
      && dwarf2out_do_frame ())
1645
    dwarf2out_end_epilogue (last_linenum, last_filename);
x  
Jason Merrill committed
1646
#endif
1647 1648 1649
}

/* Output assembler code for some insns: all or part of a function.
1650
   For description of args, see `final_start_function', above.  */
1651 1652

void
1653
final (rtx first, FILE *file, int optimize)
1654
{
1655
  rtx insn;
1656
  int max_uid = 0;
1657
  int seen = 0;
1658 1659 1660 1661

  last_ignored_compare = 0;

  for (insn = first; insn; insn = NEXT_INSN (insn))
1662
    {
1663
      if (INSN_UID (insn) > max_uid)       /* Find largest UID.  */
Kazu Hirata committed
1664
	max_uid = INSN_UID (insn);
1665 1666 1667
#ifdef HAVE_cc0
      /* If CC tracking across branches is enabled, record the insn which
	 jumps to each branch only reached from one place.  */
1668
      if (optimize && JUMP_P (insn))
1669 1670 1671 1672 1673 1674 1675 1676
	{
	  rtx lab = JUMP_LABEL (insn);
	  if (lab && LABEL_NUSES (lab) == 1)
	    {
	      LABEL_REFS (lab) = insn;
	    }
	}
#endif
1677 1678
    }

1679 1680 1681 1682 1683
  init_recog ();

  CC_STATUS_INIT;

  /* Output the insns.  */
1684
  for (insn = first; insn;)
1685 1686
    {
#ifdef HAVE_ATTR_length
1687
      if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1688 1689 1690
	{
	  /* This can be triggered by bugs elsewhere in the compiler if
	     new insns are created after init_insn_lengths is called.  */
1691 1692
	  gcc_assert (NOTE_P (insn));
	  insn_current_address = -1;
1693 1694
	}
      else
1695
	insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1696 1697
#endif /* HAVE_ATTR_length */

1698
      insn = final_scan_insn (insn, file, optimize, 0, &seen);
1699
    }
1700 1701
}

1702
const char *
1703
get_insn_template (int code, rtx insn)
1704 1705 1706 1707
{
  switch (insn_data[code].output_format)
    {
    case INSN_OUTPUT_FORMAT_SINGLE:
1708
      return insn_data[code].output.single;
1709
    case INSN_OUTPUT_FORMAT_MULTI:
1710
      return insn_data[code].output.multi[which_alternative];
1711
    case INSN_OUTPUT_FORMAT_FUNCTION:
1712
      gcc_assert (insn);
1713
      return (*insn_data[code].output.function) (recog_data.operand, insn);
1714 1715

    default:
1716
      gcc_unreachable ();
1717 1718
    }
}
Kazu Hirata committed
1719

1720 1721 1722 1723 1724 1725
/* Emit the appropriate declaration for an alternate-entry-point
   symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
   LABEL_KIND != LABEL_NORMAL.

   The case fall-through in this function is intentional.  */
static void
1726
output_alternate_entry_point (FILE *file, rtx insn)
1727 1728 1729 1730 1731 1732 1733 1734 1735 1736
{
  const char *name = LABEL_NAME (insn);

  switch (LABEL_KIND (insn))
    {
    case LABEL_WEAK_ENTRY:
#ifdef ASM_WEAKEN_LABEL
      ASM_WEAKEN_LABEL (file, name);
#endif
    case LABEL_GLOBAL_ENTRY:
1737
      targetm.asm_out.globalize_label (file, name);
1738
    case LABEL_STATIC_ENTRY:
1739 1740 1741
#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
      ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
#endif
1742 1743 1744 1745 1746
      ASM_OUTPUT_LABEL (file, name);
      break;

    case LABEL_NORMAL:
    default:
1747
      gcc_unreachable ();
1748 1749 1750
    }
}

1751 1752 1753 1754 1755
/* The final scan for one insn, INSN.
   Args are same as in `final', except that INSN
   is the insn being scanned.
   Value returned is the next insn to be scanned.

1756 1757
   NOPEEPHOLES is the flag to disallow peephole processing (currently
   used for within delayed branch sequence output).
1758

1759 1760 1761 1762 1763 1764
   SEEN is used to track the end of the prologue, for emitting
   debug information.  We force the emission of a line note after
   both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
   at the beginning of the second basic block, whichever comes
   first.  */

1765
rtx
1766
final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1767
		 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1768
{
1769 1770 1771
#ifdef HAVE_cc0
  rtx set;
#endif
1772
  rtx next;
1773

1774 1775 1776 1777 1778 1779 1780 1781 1782 1783
  insn_counter++;

  /* Ignore deleted insns.  These can occur when we split insns (due to a
     template of "#") while not optimizing.  */
  if (INSN_DELETED_P (insn))
    return NEXT_INSN (insn);

  switch (GET_CODE (insn))
    {
    case NOTE:
1784
      switch (NOTE_KIND (insn))
1785 1786 1787
	{
	case NOTE_INSN_DELETED:
	  break;
1788

1789
	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1790
	  in_cold_section_p = !in_cold_section_p;
1791 1792 1793 1794 1795 1796 1797
#ifdef DWARF2_UNWIND_INFO
	  if (dwarf2out_do_frame ())
	    dwarf2out_switch_text_section ();
	  else
#endif
	    (*debug_hooks->switch_text_section) ();

1798
	  switch_to_section (current_function_section ());
1799
	  break;
1800

1801
	case NOTE_INSN_BASIC_BLOCK:
1802 1803
#ifdef TARGET_UNWIND_INFO
	  targetm.asm_out.unwind_emit (asm_out_file, insn);
1804
#endif
1805

1806 1807
	  if (flag_debug_asm)
	    fprintf (asm_out_file, "\t%s basic block %d\n",
1808
		     ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1809 1810 1811 1812

	  if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
	    {
	      *seen |= SEEN_EMITTED;
1813
	      force_source_line = true;
1814 1815 1816 1817
	    }
	  else
	    *seen |= SEEN_BB;

1818
	  break;
1819

1820
	case NOTE_INSN_EH_REGION_BEG:
1821 1822
	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
				  NOTE_EH_HANDLER (insn));
Mike Stump committed
1823 1824
	  break;

1825
	case NOTE_INSN_EH_REGION_END:
1826 1827
	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
				  NOTE_EH_HANDLER (insn));
Mike Stump committed
1828 1829
	  break;

1830
	case NOTE_INSN_PROLOGUE_END:
1831
	  targetm.asm_out.function_end_prologue (file);
1832
	  profile_after_prologue (file);
1833 1834 1835 1836

	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
	    {
	      *seen |= SEEN_EMITTED;
1837
	      force_source_line = true;
1838 1839 1840 1841
	    }
	  else
	    *seen |= SEEN_NOTE;

1842 1843
	  break;

1844
	case NOTE_INSN_EPILOGUE_BEG:
1845
	  targetm.asm_out.function_begin_epilogue (file);
1846
	  break;
1847

1848
	case NOTE_INSN_FUNCTION_BEG:
1849
	  app_disable ();
1850
	  (*debug_hooks->end_prologue) (last_linenum, last_filename);
1851 1852 1853 1854

	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
	    {
	      *seen |= SEEN_EMITTED;
1855
	      force_source_line = true;
1856 1857 1858 1859
	    }
	  else
	    *seen |= SEEN_NOTE;

1860
	  break;
1861 1862 1863

	case NOTE_INSN_BLOCK_BEG:
	  if (debug_info_level == DINFO_LEVEL_NORMAL
1864
	      || debug_info_level == DINFO_LEVEL_VERBOSE
1865 1866 1867
	      || write_symbols == DWARF2_DEBUG
	      || write_symbols == VMS_AND_DWARF2_DEBUG
	      || write_symbols == VMS_DEBUG)
1868 1869
	    {
	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1870

1871 1872 1873
	      app_disable ();
	      ++block_depth;
	      high_block_linenum = last_linenum;
1874

1875
	      /* Output debugging info about the symbol-block beginning.  */
1876
	      (*debug_hooks->begin_block) (last_linenum, n);
1877

1878 1879 1880
	      /* Mark this block as output.  */
	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
	    }
1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892
	  if (write_symbols == DBX_DEBUG
	      || write_symbols == SDB_DEBUG)
	    {
	      location_t *locus_ptr
		= block_nonartificial_location (NOTE_BLOCK (insn));

	      if (locus_ptr != NULL)
		{
		  override_filename = LOCATION_FILE (*locus_ptr);
		  override_linenum = LOCATION_LINE (*locus_ptr);
		}
	    }
1893
	  break;
1894

1895 1896 1897
	case NOTE_INSN_BLOCK_END:
	  if (debug_info_level == DINFO_LEVEL_NORMAL
	      || debug_info_level == DINFO_LEVEL_VERBOSE
1898 1899 1900
	      || write_symbols == DWARF2_DEBUG
	      || write_symbols == VMS_AND_DWARF2_DEBUG
	      || write_symbols == VMS_DEBUG)
1901 1902
	    {
	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1903

1904 1905 1906 1907
	      app_disable ();

	      /* End of a symbol-block.  */
	      --block_depth;
1908
	      gcc_assert (block_depth >= 0);
1909

1910
	      (*debug_hooks->end_block) (high_block_linenum, n);
1911
	    }
1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929
	  if (write_symbols == DBX_DEBUG
	      || write_symbols == SDB_DEBUG)
	    {
	      tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
	      location_t *locus_ptr
		= block_nonartificial_location (outer_block);

	      if (locus_ptr != NULL)
		{
		  override_filename = LOCATION_FILE (*locus_ptr);
		  override_linenum = LOCATION_LINE (*locus_ptr);
		}
	      else
		{
		  override_filename = NULL;
		  override_linenum = 0;
		}
	    }
1930 1931 1932 1933 1934 1935
	  break;

	case NOTE_INSN_DELETED_LABEL:
	  /* Emit the label.  We may have deleted the CODE_LABEL because
	     the label could be proved to be unreachable, though still
	     referenced (in the form of having its address taken.  */
1936
	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1937
	  break;
1938

1939 1940 1941 1942
	case NOTE_INSN_VAR_LOCATION:
	  (*debug_hooks->var_location) (insn);
	  break;

1943
	default:
1944
	  gcc_unreachable ();
Kazu Hirata committed
1945
	  break;
1946 1947 1948 1949
	}
      break;

    case BARRIER:
1950
#if defined (DWARF2_UNWIND_INFO)
1951
      if (dwarf2out_do_frame ())
1952
	dwarf2out_frame_debug (insn, false);
1953
#endif
1954 1955 1956
      break;

    case CODE_LABEL:
1957 1958
      /* The target port might emit labels in the output function for
	 some insn, e.g. sh.c output_branchy_insn.  */
1959 1960 1961
      if (CODE_LABEL_NUMBER (insn) <= max_labelno)
	{
	  int align = LABEL_TO_ALIGNMENT (insn);
Kaveh R. Ghazi committed
1962
#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1963
	  int max_skip = LABEL_TO_MAX_SKIP (insn);
Kaveh R. Ghazi committed
1964
#endif
1965

1966
	  if (align && NEXT_INSN (insn))
1967
	    {
1968
#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1969
	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1970
#else
1971 1972 1973
#ifdef ASM_OUTPUT_ALIGN_WITH_NOP
              ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
#else
1974
	      ASM_OUTPUT_ALIGN (file, align);
1975
#endif
1976
#endif
1977
	    }
1978
	}
1979
#ifdef HAVE_cc0
1980
      CC_STATUS_INIT;
1981
#endif
1982

1983 1984 1985
      if (LABEL_NAME (insn))
	(*debug_hooks->label) (insn);

1986 1987
      if (app_on)
	{
Kaveh R. Ghazi committed
1988
	  fputs (ASM_APP_OFF, file);
1989 1990
	  app_on = 0;
	}
1991 1992 1993

      next = next_nonnote_insn (insn);
      if (next != 0 && JUMP_P (next))
1994
	{
1995
	  rtx nextbody = PATTERN (next);
1996 1997 1998 1999 2000 2001 2002 2003

	  /* If this label is followed by a jump-table,
	     make sure we put the label in the read-only section.  Also
	     possibly write the label and jump table together.  */

	  if (GET_CODE (nextbody) == ADDR_VEC
	      || GET_CODE (nextbody) == ADDR_DIFF_VEC)
	    {
2004 2005 2006 2007 2008
#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
	      /* In this case, the case vector is being moved by the
		 target, so don't output the label at all.  Leave that
		 to the back end macros.  */
#else
2009 2010
	      if (! JUMP_TABLES_IN_TEXT_SECTION)
		{
2011 2012
		  int log_align;

2013 2014
		  switch_to_section (targetm.asm_out.function_rodata_section
				     (current_function_decl));
2015 2016

#ifdef ADDR_VEC_ALIGN
2017
		  log_align = ADDR_VEC_ALIGN (next);
2018 2019 2020 2021
#else
		  log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
#endif
		  ASM_OUTPUT_ALIGN (file, log_align);
2022 2023
		}
	      else
2024
		switch_to_section (current_function_section ());
2025

2026 2027
#ifdef ASM_OUTPUT_CASE_LABEL
	      ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2028
				     next);
2029
#else
2030
	      targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2031
#endif
2032
#endif
2033 2034 2035
	      break;
	    }
	}
2036 2037
      if (LABEL_ALT_ENTRY_P (insn))
	output_alternate_entry_point (file, insn);
2038
      else
2039
	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2040 2041 2042 2043
      break;

    default:
      {
2044
	rtx body = PATTERN (insn);
2045
	int insn_code_number;
2046
	const char *template;
2047

2048 2049 2050 2051
#ifdef HAVE_conditional_execution
	/* Reset this early so it is correct for ASM statements.  */
	current_insn_predicate = NULL_RTX;
#endif
2052 2053 2054
	/* An INSN, JUMP_INSN or CALL_INSN.
	   First check for special kinds that recog doesn't recognize.  */

2055
	if (GET_CODE (body) == USE /* These are just declarations.  */
2056 2057 2058 2059
	    || GET_CODE (body) == CLOBBER)
	  break;

#ifdef HAVE_cc0
2060 2061 2062 2063 2064
	{
	  /* If there is a REG_CC_SETTER note on this insn, it means that
	     the setting of the condition code was done in the delay slot
	     of the insn that branched here.  So recover the cc status
	     from the insn that set it.  */
2065

2066 2067 2068 2069 2070 2071 2072
	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
	  if (note)
	    {
	      NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
	      cc_prev_status = cc_status;
	    }
	}
2073 2074 2075 2076 2077 2078 2079
#endif

	/* Detect insns that are really jump-tables
	   and output them as such.  */

	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
	  {
Kaveh R. Ghazi committed
2080
#if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2081
	    int vlen, idx;
Kaveh R. Ghazi committed
2082
#endif
2083

2084
	    if (! JUMP_TABLES_IN_TEXT_SECTION)
2085 2086
	      switch_to_section (targetm.asm_out.function_rodata_section
				 (current_function_decl));
2087
	    else
2088
	      switch_to_section (current_function_section ());
2089

2090 2091
	    if (app_on)
	      {
Kaveh R. Ghazi committed
2092
		fputs (ASM_APP_OFF, file);
2093 2094 2095
		app_on = 0;
	      }

2096 2097 2098 2099 2100 2101
#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
	    if (GET_CODE (body) == ADDR_VEC)
	      {
#ifdef ASM_OUTPUT_ADDR_VEC
		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
#else
2102
		gcc_unreachable ();
2103 2104 2105 2106 2107 2108 2109
#endif
	      }
	    else
	      {
#ifdef ASM_OUTPUT_ADDR_DIFF_VEC
		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
#else
2110
		gcc_unreachable ();
2111 2112 2113
#endif
	      }
#else
2114 2115 2116 2117 2118 2119 2120 2121 2122
	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
	    for (idx = 0; idx < vlen; idx++)
	      {
		if (GET_CODE (body) == ADDR_VEC)
		  {
#ifdef ASM_OUTPUT_ADDR_VEC_ELT
		    ASM_OUTPUT_ADDR_VEC_ELT
		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
#else
2123
		    gcc_unreachable ();
2124 2125 2126 2127 2128 2129 2130
#endif
		  }
		else
		  {
#ifdef ASM_OUTPUT_ADDR_DIFF_ELT
		    ASM_OUTPUT_ADDR_DIFF_ELT
		      (file,
2131
		       body,
2132 2133 2134
		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
#else
2135
		    gcc_unreachable ();
2136 2137 2138 2139 2140 2141 2142 2143
#endif
		  }
	      }
#ifdef ASM_OUTPUT_CASE_END
	    ASM_OUTPUT_CASE_END (file,
				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
				 insn);
#endif
2144
#endif
2145

2146
	    switch_to_section (current_function_section ());
2147 2148 2149

	    break;
	  }
2150 2151 2152 2153 2154 2155
	/* Output this line note if it is the first or the last line
	   note in a row.  */
	if (notice_source_line (insn))
	  {
	    (*debug_hooks->source_line) (last_linenum, last_filename);
	  }
2156 2157 2158

	if (GET_CODE (body) == ASM_INPUT)
	  {
2159 2160
	    const char *string = XSTR (body, 0);

2161 2162
	    /* There's no telling what that did to the condition codes.  */
	    CC_STATUS_INIT;
2163 2164

	    if (string[0])
2165
	      {
2166
		expanded_location loc;
2167

2168 2169 2170 2171 2172
		if (! app_on)
		  {
		    fputs (ASM_APP_ON, file);
		    app_on = 1;
		  }
2173
		loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2174
		if (*loc.file && loc.line)
2175 2176
		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
			   ASM_COMMENT_START, loc.line, loc.file);
2177
		fprintf (asm_out_file, "\t%s\n", string);
2178 2179
#if HAVE_AS_LINE_ZERO
		if (*loc.file && loc.line)
2180
		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2181
#endif
2182 2183 2184 2185 2186 2187 2188
	      }
	    break;
	  }

	/* Detect `asm' construct with operands.  */
	if (asm_noperands (body) >= 0)
	  {
2189
	    unsigned int noperands = asm_noperands (body);
2190
	    rtx *ops = alloca (noperands * sizeof (rtx));
2191
	    const char *string;
2192
	    location_t loc;
2193
	    expanded_location expanded;
2194 2195 2196 2197 2198

	    /* There's no telling what that did to the condition codes.  */
	    CC_STATUS_INIT;

	    /* Get out the operand values.  */
2199
	    string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2200
	    /* Inhibit dying on what would otherwise be compiler bugs.  */
2201 2202
	    insn_noperands = noperands;
	    this_is_asm_operands = insn;
2203
	    expanded = expand_location (loc);
2204

2205 2206 2207 2208
#ifdef FINAL_PRESCAN_INSN
	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
#endif

2209
	    /* Output the insn using them.  */
2210 2211 2212 2213 2214 2215 2216
	    if (string[0])
	      {
		if (! app_on)
		  {
		    fputs (ASM_APP_ON, file);
		    app_on = 1;
		  }
2217
		if (expanded.file && expanded.line)
2218
		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2219
			   ASM_COMMENT_START, expanded.line, expanded.file);
2220
	        output_asm_insn (string, ops);
2221
#if HAVE_AS_LINE_ZERO
2222
		if (expanded.file && expanded.line)
2223
		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2224
#endif
2225 2226
	      }

2227 2228 2229 2230
	    this_is_asm_operands = 0;
	    break;
	  }

2231
	if (app_on)
2232
	  {
Kaveh R. Ghazi committed
2233
	    fputs (ASM_APP_OFF, file);
2234 2235 2236 2237 2238 2239
	    app_on = 0;
	  }

	if (GET_CODE (body) == SEQUENCE)
	  {
	    /* A delayed-branch sequence */
2240
	    int i;
2241 2242 2243

	    final_sequence = body;

2244 2245 2246 2247 2248
	    /* Record the delay slots' frame information before the branch.
	       This is needed for delayed calls: see execute_cfa_program().  */
#if defined (DWARF2_UNWIND_INFO)
	    if (dwarf2out_do_frame ())
	      for (i = 1; i < XVECLEN (body, 0); i++)
2249
		dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2250 2251
#endif

2252 2253 2254 2255 2256
	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
	       force the restoration of a comparison that was previously
	       thought unnecessary.  If that happens, cancel this sequence
	       and cause that insn to be restored.  */

2257
	    next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2258 2259 2260 2261 2262 2263 2264
	    if (next != XVECEXP (body, 0, 1))
	      {
		final_sequence = 0;
		return next;
	      }

	    for (i = 1; i < XVECLEN (body, 0); i++)
2265 2266 2267 2268 2269 2270
	      {
		rtx insn = XVECEXP (body, 0, i);
		rtx next = NEXT_INSN (insn);
		/* We loop in case any instruction in a delay slot gets
		   split.  */
		do
2271
		  insn = final_scan_insn (insn, file, 0, 1, seen);
2272 2273
		while (insn != next);
	      }
2274 2275 2276 2277 2278 2279 2280 2281 2282 2283
#ifdef DBR_OUTPUT_SEQEND
	    DBR_OUTPUT_SEQEND (file);
#endif
	    final_sequence = 0;

	    /* If the insn requiring the delay slot was a CALL_INSN, the
	       insns in the delay slot are actually executed before the
	       called function.  Hence we don't preserve any CC-setting
	       actions in these insns and the CC must be marked as being
	       clobbered by the function.  */
2284
	    if (CALL_P (XVECEXP (body, 0, 0)))
2285 2286 2287
	      {
		CC_STATUS_INIT;
	      }
2288 2289 2290 2291 2292 2293 2294 2295
	    break;
	  }

	/* We have a real machine instruction as rtl.  */

	body = PATTERN (insn);

#ifdef HAVE_cc0
Kazu Hirata committed
2296
	set = single_set (insn);
2297

2298 2299 2300 2301 2302 2303 2304 2305
	/* Check for redundant test and compare instructions
	   (when the condition codes are already set up as desired).
	   This is done only when optimizing; if not optimizing,
	   it should be possible for the user to alter a variable
	   with the debugger in between statements
	   and the next statement should reexamine the variable
	   to compute the condition codes.  */

2306
	if (optimize)
2307
	  {
2308 2309 2310
	    if (set
		&& GET_CODE (SET_DEST (set)) == CC0
		&& insn != last_ignored_compare)
2311
	      {
2312
		if (GET_CODE (SET_SRC (set)) == SUBREG)
2313
		  SET_SRC (set) = alter_subreg (&SET_SRC (set));
2314 2315 2316 2317
		else if (GET_CODE (SET_SRC (set)) == COMPARE)
		  {
		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
		      XEXP (SET_SRC (set), 0)
2318
			= alter_subreg (&XEXP (SET_SRC (set), 0));
2319 2320
		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
		      XEXP (SET_SRC (set), 1)
2321
			= alter_subreg (&XEXP (SET_SRC (set), 1));
2322 2323 2324 2325 2326
		  }
		if ((cc_status.value1 != 0
		     && rtx_equal_p (SET_SRC (set), cc_status.value1))
		    || (cc_status.value2 != 0
			&& rtx_equal_p (SET_SRC (set), cc_status.value2)))
2327
		  {
2328
		    /* Don't delete insn if it has an addressing side-effect.  */
2329
		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2330 2331 2332 2333 2334 2335 2336
			/* or if anything in it is volatile.  */
			&& ! volatile_refs_p (PATTERN (insn)))
		      {
			/* We don't really delete the insn; just ignore it.  */
			last_ignored_compare = insn;
			break;
		      }
2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348
		  }
	      }
	  }
#endif

#ifdef HAVE_cc0
	/* If this is a conditional branch, maybe modify it
	   if the cc's are in a nonstandard state
	   so that it accomplishes the same thing that it would
	   do straightforwardly if the cc's were set up normally.  */

	if (cc_status.flags != 0
2349
	    && JUMP_P (insn)
2350 2351 2352
	    && GET_CODE (body) == SET
	    && SET_DEST (body) == pc_rtx
	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2353
	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2354
	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2355 2356 2357 2358 2359 2360
	  {
	    /* This function may alter the contents of its argument
	       and clear some of the cc_status.flags bits.
	       It may also return 1 meaning condition now always true
	       or -1 meaning condition now always false
	       or 2 meaning condition nontrivial but altered.  */
2361
	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373
	    /* If condition now has fixed value, replace the IF_THEN_ELSE
	       with its then-operand or its else-operand.  */
	    if (result == 1)
	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
	    if (result == -1)
	      SET_SRC (body) = XEXP (SET_SRC (body), 2);

	    /* The jump is now either unconditional or a no-op.
	       If it has become a no-op, don't try to output it.
	       (It would not be recognized.)  */
	    if (SET_SRC (body) == pc_rtx)
	      {
2374
	        delete_insn (insn);
2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385
		break;
	      }
	    else if (GET_CODE (SET_SRC (body)) == RETURN)
	      /* Replace (set (pc) (return)) with (return).  */
	      PATTERN (insn) = body = SET_SRC (body);

	    /* Rerecognize the instruction if it has changed.  */
	    if (result != 0)
	      INSN_CODE (insn) = -1;
	  }

2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420
	/* If this is a conditional trap, maybe modify it if the cc's
	   are in a nonstandard state so that it accomplishes the same
	   thing that it would do straightforwardly if the cc's were
	   set up normally.  */
	if (cc_status.flags != 0
	    && NONJUMP_INSN_P (insn)
	    && GET_CODE (body) == TRAP_IF
	    && COMPARISON_P (TRAP_CONDITION (body))
	    && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
	  {
	    /* This function may alter the contents of its argument
	       and clear some of the cc_status.flags bits.
	       It may also return 1 meaning condition now always true
	       or -1 meaning condition now always false
	       or 2 meaning condition nontrivial but altered.  */
	    int result = alter_cond (TRAP_CONDITION (body));

	    /* If TRAP_CONDITION has become always false, delete the
	       instruction.  */
	    if (result == -1)
	      {
		delete_insn (insn);
		break;
	      }

	    /* If TRAP_CONDITION has become always true, replace
	       TRAP_CONDITION with const_true_rtx.  */
	    if (result == 1)
	      TRAP_CONDITION (body) = const_true_rtx;

	    /* Rerecognize the instruction if it has changed.  */
	    if (result != 0)
	      INSN_CODE (insn) = -1;
	  }

2421
	/* Make same adjustments to instructions that examine the
2422 2423
	   condition codes without jumping and instructions that
	   handle conditional moves (if this machine has either one).  */
2424 2425

	if (cc_status.flags != 0
2426
	    && set != 0)
2427
	  {
2428
	    rtx cond_rtx, then_rtx, else_rtx;
Kazu Hirata committed
2429

2430
	    if (!JUMP_P (insn)
2431
		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2432
	      {
2433 2434 2435
		cond_rtx = XEXP (SET_SRC (set), 0);
		then_rtx = XEXP (SET_SRC (set), 1);
		else_rtx = XEXP (SET_SRC (set), 2);
2436 2437 2438
	      }
	    else
	      {
2439
		cond_rtx = SET_SRC (set);
2440 2441 2442
		then_rtx = const_true_rtx;
		else_rtx = const0_rtx;
	      }
Kazu Hirata committed
2443

2444
	    switch (GET_CODE (cond_rtx))
2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456
	      {
	      case GTU:
	      case GT:
	      case LTU:
	      case LT:
	      case GEU:
	      case GE:
	      case LEU:
	      case LE:
	      case EQ:
	      case NE:
		{
2457
		  int result;
2458
		  if (XEXP (cond_rtx, 0) != cc0_rtx)
2459
		    break;
2460
		  result = alter_cond (cond_rtx);
2461
		  if (result == 1)
2462
		    validate_change (insn, &SET_SRC (set), then_rtx, 0);
2463
		  else if (result == -1)
2464
		    validate_change (insn, &SET_SRC (set), else_rtx, 0);
2465 2466
		  else if (result == 2)
		    INSN_CODE (insn) = -1;
2467
		  if (SET_DEST (set) == SET_SRC (set))
2468
		    delete_insn (insn);
2469
		}
2470 2471 2472 2473
		break;

	      default:
		break;
2474 2475
	      }
	  }
2476

2477 2478
#endif

2479
#ifdef HAVE_peephole
2480 2481 2482 2483 2484 2485 2486 2487 2488
	/* Do machine-specific peephole optimizations if desired.  */

	if (optimize && !flag_no_peephole && !nopeepholes)
	  {
	    rtx next = peephole (insn);
	    /* When peepholing, if there were notes within the peephole,
	       emit them before the peephole.  */
	    if (next != 0 && next != NEXT_INSN (insn))
	      {
2489
		rtx note, prev = PREV_INSN (insn);
2490 2491 2492

		for (note = NEXT_INSN (insn); note != next;
		     note = NEXT_INSN (note))
2493
		  final_scan_insn (note, file, optimize, nopeepholes, seen);
2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505

		/* Put the notes in the proper position for a later
		   rescan.  For example, the SH target can do this
		   when generating a far jump in a delayed branch
		   sequence.  */
		note = NEXT_INSN (insn);
		PREV_INSN (note) = prev;
		NEXT_INSN (prev) = note;
		NEXT_INSN (PREV_INSN (next)) = insn;
		PREV_INSN (insn) = PREV_INSN (next);
		NEXT_INSN (insn) = next;
		PREV_INSN (next) = insn;
2506 2507 2508 2509 2510
	      }

	    /* PEEPHOLE might have changed this.  */
	    body = PATTERN (insn);
	  }
2511
#endif
2512 2513 2514 2515 2516 2517 2518

	/* Try to recognize the instruction.
	   If successful, verify that the operands satisfy the
	   constraints for the instruction.  Crash if they don't,
	   since `reload' should have changed them so that they do.  */

	insn_code_number = recog_memoized (insn);
2519
	cleanup_subreg_operands (insn);
2520

Kazu Hirata committed
2521 2522 2523 2524 2525 2526 2527
	/* Dump the insn in the assembly for debugging.  */
	if (flag_dump_rtl_in_asm)
	  {
	    print_rtx_head = ASM_COMMENT_START;
	    print_rtl_single (asm_out_file, insn);
	    print_rtx_head = "";
	  }
2528

2529
	if (! constrain_operands_cached (1))
2530 2531 2532 2533 2534 2535
	  fatal_insn_not_found (insn);

	/* Some target machines need to prescan each insn before
	   it is output.  */

#ifdef FINAL_PRESCAN_INSN
2536
	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2537 2538
#endif

2539 2540 2541 2542 2543
#ifdef HAVE_conditional_execution
	if (GET_CODE (PATTERN (insn)) == COND_EXEC)
	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
#endif

2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554
#ifdef HAVE_cc0
	cc_prev_status = cc_status;

	/* Update `cc_status' for this instruction.
	   The instruction's output routine may change it further.
	   If the output routine for a jump insn needs to depend
	   on the cc status, it should look at cc_prev_status.  */

	NOTICE_UPDATE_CC (body, insn);
#endif

2555
	current_output_insn = debug_insn = insn;
2556

2557
#if defined (DWARF2_UNWIND_INFO)
2558
	if (CALL_P (insn) && dwarf2out_do_frame ())
2559
	  dwarf2out_frame_debug (insn, false);
2560 2561
#endif

2562 2563
	/* Find the proper template for this insn.  */
	template = get_insn_template (insn_code_number, insn);
2564

2565 2566 2567
	/* If the C code returns 0, it means that it is a jump insn
	   which follows a deleted test insn, and that test insn
	   needs to be reinserted.  */
2568 2569
	if (template == 0)
	  {
2570 2571
	    rtx prev;

2572
	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2573 2574 2575 2576 2577 2578 2579 2580 2581

	    /* We have already processed the notes between the setter and
	       the user.  Make sure we don't process them again, this is
	       particularly important if one of the notes is a block
	       scope note or an EH note.  */
	    for (prev = insn;
		 prev != last_ignored_compare;
		 prev = PREV_INSN (prev))
	      {
2582
		if (NOTE_P (prev))
2583
		  delete_insn (prev);	/* Use delete_note.  */
2584 2585 2586
	      }

	    return prev;
2587 2588 2589 2590 2591 2592 2593 2594 2595 2596
	  }

	/* If the template is the string "#", it means that this insn must
	   be split.  */
	if (template[0] == '#' && template[1] == '\0')
	  {
	    rtx new = try_split (body, insn, 0);

	    /* If we didn't split the insn, go away.  */
	    if (new == insn && PATTERN (new) == body)
2597
	      fatal_insn ("could not split insn", insn);
Kazu Hirata committed
2598

2599 2600 2601 2602
#ifdef HAVE_ATTR_length
	    /* This instruction should have been split in shorten_branches,
	       to ensure that we would have valid length info for the
	       splitees.  */
2603
	    gcc_unreachable ();
2604 2605
#endif

2606 2607
	    return new;
	  }
Kazu Hirata committed
2608

2609 2610 2611 2612 2613
#ifdef TARGET_UNWIND_INFO
	/* ??? This will put the directives in the wrong place if
	   get_insn_template outputs assembly directly.  However calling it
	   before get_insn_template breaks if the insns is split.  */
	targetm.asm_out.unwind_emit (asm_out_file, insn);
2614
#endif
2615

2616
	/* Output assembler code from the template.  */
2617
	output_asm_insn (template, recog_data.operand);
2618

2619 2620 2621
	/* If necessary, report the effect that the instruction has on
	   the unwind info.   We've already done this for delay slots
	   and call instructions.  */
Jason Merrill committed
2622
#if defined (DWARF2_UNWIND_INFO)
2623
	if (final_sequence == 0
2624 2625 2626
#if !defined (HAVE_prologue)
	    && !ACCUMULATE_OUTGOING_ARGS
#endif
2627
	    && dwarf2out_do_frame ())
2628
	  dwarf2out_frame_debug (insn, true);
Jason Merrill committed
2629
#endif
x  
Jason Merrill committed
2630

2631
	current_output_insn = debug_insn = 0;
2632 2633 2634 2635 2636
      }
    }
  return NEXT_INSN (insn);
}

2637
/* Return whether a source line note needs to be emitted before INSN.  */
2638

2639
static bool
2640
notice_source_line (rtx insn)
2641
{
2642 2643 2644 2645 2646 2647 2648 2649 2650 2651 2652 2653 2654
  const char *filename;
  int linenum;

  if (override_filename)
    {
      filename = override_filename;
      linenum = override_linenum;
    }
  else
    {
      filename = insn_file (insn);
      linenum = insn_line (insn);
    }
2655

2656 2657 2658 2659
  if (filename
      && (force_source_line
	  || filename != last_filename
	  || last_linenum != linenum))
2660
    {
2661
      force_source_line = false;
2662 2663 2664 2665 2666 2667 2668
      last_filename = filename;
      last_linenum = linenum;
      high_block_linenum = MAX (last_linenum, high_block_linenum);
      high_function_linenum = MAX (last_linenum, high_function_linenum);
      return true;
    }
  return false;
2669 2670
}

2671 2672
/* For each operand in INSN, simplify (subreg (reg)) so that it refers
   directly to the desired hard register.  */
Kazu Hirata committed
2673

2674
void
2675
cleanup_subreg_operands (rtx insn)
2676
{
2677
  int i;
2678
  bool changed = false;
2679
  extract_insn_cached (insn);
2680
  for (i = 0; i < recog_data.n_operands; i++)
2681
    {
2682
      /* The following test cannot use recog_data.operand when testing
2683 2684 2685 2686 2687
	 for a SUBREG: the underlying object might have been changed
	 already if we are inside a match_operator expression that
	 matches the else clause.  Instead we test the underlying
	 expression directly.  */
      if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2688 2689 2690 2691
	{
	  recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
	  changed = true;
	}
2692
      else if (GET_CODE (recog_data.operand[i]) == PLUS
2693
	       || GET_CODE (recog_data.operand[i]) == MULT
2694
	       || MEM_P (recog_data.operand[i]))
2695
	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2696 2697
    }

2698
  for (i = 0; i < recog_data.n_dups; i++)
2699
    {
2700
      if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2701 2702 2703 2704
	{
	  *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
	  changed = true;
	}
2705
      else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2706
	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
2707
	       || MEM_P (*recog_data.dup_loc[i]))
2708
	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2709
    }
2710 2711
  if (changed)
    df_insn_rescan (insn);
2712 2713
}

2714 2715 2716 2717
/* If X is a SUBREG, replace it with a REG or a MEM,
   based on the thing it is a subreg of.  */

rtx
2718
alter_subreg (rtx *xp)
2719
{
2720
  rtx x = *xp;
2721
  rtx y = SUBREG_REG (x);
2722

2723 2724
  /* simplify_subreg does not remove subreg from volatile references.
     We are required to.  */
2725
  if (MEM_P (y))
2726 2727 2728 2729 2730 2731 2732 2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743
    {
      int offset = SUBREG_BYTE (x);

      /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
	 contains 0 instead of the proper offset.  See simplify_subreg.  */
      if (offset == 0
	  && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
        {
          int difference = GET_MODE_SIZE (GET_MODE (y))
			   - GET_MODE_SIZE (GET_MODE (x));
          if (WORDS_BIG_ENDIAN)
            offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
          if (BYTES_BIG_ENDIAN)
            offset += difference % UNITS_PER_WORD;
        }

      *xp = adjust_address (y, GET_MODE (x), offset);
    }
2744
  else
2745 2746 2747 2748 2749 2750
    {
      rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
				 SUBREG_BYTE (x));

      if (new != 0)
	*xp = new;
2751
      else if (REG_P (y))
2752
	{
2753
	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
2754 2755 2756 2757 2758 2759 2760 2761 2762
	  unsigned int regno;
	  HOST_WIDE_INT offset;

	  regno = subreg_regno (x);
	  if (subreg_lowpart_p (x))
	    offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
	  else
	    offset = SUBREG_BYTE (x);
	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
2763 2764 2765
	}
    }

2766
  return *xp;
2767 2768 2769 2770 2771
}

/* Do alter_subreg on all the SUBREGs contained in X.  */

static rtx
2772
walk_alter_subreg (rtx *xp, bool *changed)
2773
{
2774
  rtx x = *xp;
2775 2776 2777 2778
  switch (GET_CODE (x))
    {
    case PLUS:
    case MULT:
2779
    case AND:
2780 2781
      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
      XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
2782 2783 2784
      break;

    case MEM:
2785
    case ZERO_EXTEND:
2786
      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2787 2788 2789
      break;

    case SUBREG:
2790
      *changed = true;
2791
      return alter_subreg (xp);
Kazu Hirata committed
2792

2793 2794
    default:
      break;
2795 2796
    }

2797
  return *xp;
2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811
}

#ifdef HAVE_cc0

/* Given BODY, the body of a jump instruction, alter the jump condition
   as required by the bits that are set in cc_status.flags.
   Not all of the bits there can be handled at this level in all cases.

   The value is normally 0.
   1 means that the condition has become always true.
   -1 means that the condition has become always false.
   2 means that COND has been altered.  */

static int
2812
alter_cond (rtx cond)
2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851
{
  int value = 0;

  if (cc_status.flags & CC_REVERSED)
    {
      value = 2;
      PUT_CODE (cond, swap_condition (GET_CODE (cond)));
    }

  if (cc_status.flags & CC_INVERTED)
    {
      value = 2;
      PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
    }

  if (cc_status.flags & CC_NOT_POSITIVE)
    switch (GET_CODE (cond))
      {
      case LE:
      case LEU:
      case GEU:
	/* Jump becomes unconditional.  */
	return 1;

      case GT:
      case GTU:
      case LTU:
	/* Jump becomes no-op.  */
	return -1;

      case GE:
	PUT_CODE (cond, EQ);
	value = 2;
	break;

      case LT:
	PUT_CODE (cond, NE);
	value = 2;
	break;
Kazu Hirata committed
2852

2853 2854
      default:
	break;
2855 2856 2857 2858 2859 2860 2861 2862 2863 2864 2865 2866 2867 2868 2869 2870 2871 2872 2873 2874 2875 2876 2877 2878 2879 2880
      }

  if (cc_status.flags & CC_NOT_NEGATIVE)
    switch (GET_CODE (cond))
      {
      case GE:
      case GEU:
	/* Jump becomes unconditional.  */
	return 1;

      case LT:
      case LTU:
	/* Jump becomes no-op.  */
	return -1;

      case LE:
      case LEU:
	PUT_CODE (cond, EQ);
	value = 2;
	break;

      case GT:
      case GTU:
	PUT_CODE (cond, NE);
	value = 2;
	break;
Kazu Hirata committed
2881

2882 2883
      default:
	break;
2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905
      }

  if (cc_status.flags & CC_NO_OVERFLOW)
    switch (GET_CODE (cond))
      {
      case GEU:
	/* Jump becomes unconditional.  */
	return 1;

      case LEU:
	PUT_CODE (cond, EQ);
	value = 2;
	break;

      case GTU:
	PUT_CODE (cond, NE);
	value = 2;
	break;

      case LTU:
	/* Jump becomes no-op.  */
	return -1;
Kazu Hirata committed
2906

2907 2908
      default:
	break;
2909 2910 2911 2912 2913
      }

  if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
    switch (GET_CODE (cond))
      {
2914
      default:
2915
	gcc_unreachable ();
2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950 2951

      case NE:
	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
	value = 2;
	break;

      case EQ:
	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
	value = 2;
	break;
      }

  if (cc_status.flags & CC_NOT_SIGNED)
    /* The flags are valid if signed condition operators are converted
       to unsigned.  */
    switch (GET_CODE (cond))
      {
      case LE:
	PUT_CODE (cond, LEU);
	value = 2;
	break;

      case LT:
	PUT_CODE (cond, LTU);
	value = 2;
	break;

      case GT:
	PUT_CODE (cond, GTU);
	value = 2;
	break;

      case GE:
	PUT_CODE (cond, GEU);
	value = 2;
	break;
2952 2953 2954

      default:
	break;
2955 2956 2957 2958 2959 2960 2961 2962 2963 2964
      }

  return value;
}
#endif

/* Report inconsistency between the assembler template and the operands.
   In an `asm', it's the user's fault; otherwise, the compiler's fault.  */

void
2965
output_operand_lossage (const char *cmsgid, ...)
2966
{
2967 2968
  char *fmt_string;
  char *new_message;
2969
  const char *pfx_str;
2970
  va_list ap;
2971

2972
  va_start (ap, cmsgid);
2973

2974
  pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
2975
  asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
2976
  vasprintf (&new_message, fmt_string, ap);
Kazu Hirata committed
2977

2978
  if (this_is_asm_operands)
2979
    error_for_asm (this_is_asm_operands, "%s", new_message);
2980
  else
2981 2982 2983 2984
    internal_error ("%s", new_message);

  free (fmt_string);
  free (new_message);
2985
  va_end (ap);
2986 2987 2988 2989
}

/* Output of assembler code from a template, and its subroutines.  */

2990 2991 2992 2993
/* Annotate the assembly with a comment describing the pattern and
   alternative used.  */

static void
2994
output_asm_name (void)
2995 2996 2997 2998 2999 3000 3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013
{
  if (debug_insn)
    {
      int num = INSN_CODE (debug_insn);
      fprintf (asm_out_file, "\t%s %d\t%s",
	       ASM_COMMENT_START, INSN_UID (debug_insn),
	       insn_data[num].name);
      if (insn_data[num].n_alternatives > 1)
	fprintf (asm_out_file, "/%d", which_alternative + 1);
#ifdef HAVE_ATTR_length
      fprintf (asm_out_file, "\t[length = %d]",
	       get_attr_length (debug_insn));
#endif
      /* Clear this so only the first assembler insn
	 of any rtl insn will get the special comment for -dp.  */
      debug_insn = 0;
    }
}

3014 3015
/* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
   or its address, return that expr .  Set *PADDRESSP to 1 if the expr
3016 3017 3018
   corresponds to the address of the object and 0 if to the object.  */

static tree
3019
get_mem_expr_from_op (rtx op, int *paddressp)
3020
{
3021
  tree expr;
3022 3023 3024 3025
  int inner_addressp;

  *paddressp = 0;

3026
  if (REG_P (op))
3027
    return REG_EXPR (op);
3028
  else if (!MEM_P (op))
3029 3030
    return 0;

3031 3032
  if (MEM_EXPR (op) != 0)
    return MEM_EXPR (op);
3033 3034 3035 3036 3037 3038 3039 3040

  /* Otherwise we have an address, so indicate it and look at the address.  */
  *paddressp = 1;
  op = XEXP (op, 0);

  /* First check if we have a decl for the address, then look at the right side
     if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
     But don't allow the address to itself be indirect.  */
3041 3042
  if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
    return expr;
3043
  else if (GET_CODE (op) == PLUS
3044 3045
	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
    return expr;
3046

3047 3048
  while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3049 3050
    op = XEXP (op, 0);

3051 3052
  expr = get_mem_expr_from_op (op, &inner_addressp);
  return inner_addressp ? 0 : expr;
3053
}
3054

3055 3056 3057 3058 3059
/* Output operand names for assembler instructions.  OPERANDS is the
   operand vector, OPORDER is the order to write the operands, and NOPS
   is the number of operands to write.  */

static void
3060
output_asm_operand_names (rtx *operands, int *oporder, int nops)
3061 3062 3063 3064 3065 3066 3067
{
  int wrote = 0;
  int i;

  for (i = 0; i < nops; i++)
    {
      int addressp;
3068 3069
      rtx op = operands[oporder[i]];
      tree expr = get_mem_expr_from_op (op, &addressp);
3070

3071 3072 3073
      fprintf (asm_out_file, "%c%s",
	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
      wrote = 1;
3074
      if (expr)
3075
	{
3076
	  fprintf (asm_out_file, "%s",
3077 3078
		   addressp ? "*" : "");
	  print_mem_expr (asm_out_file, expr);
3079 3080
	  wrote = 1;
	}
3081 3082 3083
      else if (REG_P (op) && ORIGINAL_REGNO (op)
	       && ORIGINAL_REGNO (op) != REGNO (op))
	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3084 3085 3086
    }
}

3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103
/* Output text from TEMPLATE to the assembler output file,
   obeying %-directions to substitute operands taken from
   the vector OPERANDS.

   %N (for N a digit) means print operand N in usual manner.
   %lN means require operand N to be a CODE_LABEL or LABEL_REF
      and print the label name with no punctuation.
   %cN means require operand N to be a constant
      and print the constant expression with no punctuation.
   %aN means expect operand N to be a memory address
      (not a memory reference!) and print a reference
      to that address.
   %nN means expect operand N to be a constant
      and print a constant expression for minus the value
      of the operand, with no other punctuation.  */

void
3104
output_asm_insn (const char *template, rtx *operands)
3105
{
3106 3107
  const char *p;
  int c;
3108 3109 3110
#ifdef ASSEMBLER_DIALECT
  int dialect = 0;
#endif
3111
  int oporder[MAX_RECOG_OPERANDS];
3112
  char opoutput[MAX_RECOG_OPERANDS];
3113
  int ops = 0;
3114 3115 3116 3117 3118 3119

  /* An insn may return a null string template
     in a case where no assembler code is needed.  */
  if (*template == 0)
    return;

3120
  memset (opoutput, 0, sizeof opoutput);
3121 3122 3123 3124 3125 3126 3127
  p = template;
  putc ('\t', asm_out_file);

#ifdef ASM_OUTPUT_OPCODE
  ASM_OUTPUT_OPCODE (asm_out_file, p);
#endif

3128
  while ((c = *p++))
3129 3130 3131
    switch (c)
      {
      case '\n':
3132 3133
	if (flag_verbose_asm)
	  output_asm_operand_names (operands, oporder, ops);
3134 3135 3136
	if (flag_print_asm_name)
	  output_asm_name ();

3137 3138 3139
	ops = 0;
	memset (opoutput, 0, sizeof opoutput);

3140
	putc (c, asm_out_file);
3141
#ifdef ASM_OUTPUT_OPCODE
3142 3143 3144 3145 3146 3147 3148
	while ((c = *p) == '\t')
	  {
	    putc (c, asm_out_file);
	    p++;
	  }
	ASM_OUTPUT_OPCODE (asm_out_file, p);
#endif
3149
	break;
3150 3151 3152

#ifdef ASSEMBLER_DIALECT
      case '{':
3153
	{
3154
	  int i;
Kazu Hirata committed
3155

3156 3157 3158 3159 3160
	  if (dialect)
	    output_operand_lossage ("nested assembly dialect alternatives");
	  else
	    dialect = 1;

3161 3162 3163 3164
	  /* If we want the first dialect, do nothing.  Otherwise, skip
	     DIALECT_NUMBER of strings ending with '|'.  */
	  for (i = 0; i < dialect_number; i++)
	    {
3165
	      while (*p && *p != '}' && *p++ != '|')
3166
		;
3167 3168
	      if (*p == '}')
		break;
3169 3170 3171
	      if (*p == '|')
		p++;
	    }
3172 3173 3174

	  if (*p == '\0')
	    output_operand_lossage ("unterminated assembly dialect alternative");
3175
	}
3176 3177 3178
	break;

      case '|':
3179 3180 3181 3182 3183 3184 3185 3186 3187 3188
	if (dialect)
	  {
	    /* Skip to close brace.  */
	    do
	      {
		if (*p == '\0')
		  {
		    output_operand_lossage ("unterminated assembly dialect alternative");
		    break;
		  }
3189
	      }
3190 3191 3192 3193 3194
	    while (*p++ != '}');
	    dialect = 0;
	  }
	else
	  putc (c, asm_out_file);
3195 3196 3197
	break;

      case '}':
3198 3199 3200
	if (! dialect)
	  putc (c, asm_out_file);
	dialect = 0;
3201 3202 3203 3204 3205 3206 3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223
	break;
#endif

      case '%':
	/* %% outputs a single %.  */
	if (*p == '%')
	  {
	    p++;
	    putc (c, asm_out_file);
	  }
	/* %= outputs a number which is unique to each insn in the entire
	   compilation.  This is useful for making local labels that are
	   referred to more than once in a given insn.  */
	else if (*p == '=')
	  {
	    p++;
	    fprintf (asm_out_file, "%d", insn_counter);
	  }
	/* % followed by a letter and some digits
	   outputs an operand in a special way depending on the letter.
	   Letters `acln' are implemented directly.
	   Other letters are passed to `output_operand' so that
	   the PRINT_OPERAND macro can define them.  */
3224
	else if (ISALPHA (*p))
3225 3226
	  {
	    int letter = *p++;
3227 3228
	    unsigned long opnum;
	    char *endptr;
3229

3230 3231 3232 3233 3234 3235
	    opnum = strtoul (p, &endptr, 10);

	    if (endptr == p)
	      output_operand_lossage ("operand number missing "
				      "after %%-letter");
	    else if (this_is_asm_operands && opnum >= insn_noperands)
3236 3237
	      output_operand_lossage ("operand number out of range");
	    else if (letter == 'l')
3238
	      output_asm_label (operands[opnum]);
3239
	    else if (letter == 'a')
3240
	      output_address (operands[opnum]);
3241 3242
	    else if (letter == 'c')
	      {
3243 3244
		if (CONSTANT_ADDRESS_P (operands[opnum]))
		  output_addr_const (asm_out_file, operands[opnum]);
3245
		else
3246
		  output_operand (operands[opnum], 'c');
3247 3248 3249
	      }
	    else if (letter == 'n')
	      {
3250
		if (GET_CODE (operands[opnum]) == CONST_INT)
3251
		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3252
			   - INTVAL (operands[opnum]));
3253 3254 3255
		else
		  {
		    putc ('-', asm_out_file);
3256
		    output_addr_const (asm_out_file, operands[opnum]);
3257 3258 3259
		  }
	      }
	    else
3260
	      output_operand (operands[opnum], letter);
Kazu Hirata committed
3261

3262
	    if (!opoutput[opnum])
3263
	      oporder[ops++] = opnum;
3264
	    opoutput[opnum] = 1;
3265

3266 3267
	    p = endptr;
	    c = *p;
3268 3269
	  }
	/* % followed by a digit outputs an operand the default way.  */
3270
	else if (ISDIGIT (*p))
3271
	  {
3272 3273
	    unsigned long opnum;
	    char *endptr;
3274

3275 3276
	    opnum = strtoul (p, &endptr, 10);
	    if (this_is_asm_operands && opnum >= insn_noperands)
3277 3278
	      output_operand_lossage ("operand number out of range");
	    else
3279
	      output_operand (operands[opnum], 0);
3280

3281
	    if (!opoutput[opnum])
3282
	      oporder[ops++] = opnum;
3283
	    opoutput[opnum] = 1;
3284

3285 3286
	    p = endptr;
	    c = *p;
3287 3288 3289 3290 3291
	  }
	/* % followed by punctuation: output something for that
	   punctuation character alone, with no operand.
	   The PRINT_OPERAND macro decides what is actually done.  */
#ifdef PRINT_OPERAND_PUNCT_VALID_P
Kazu Hirata committed
3292
	else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3293 3294 3295 3296 3297 3298 3299 3300 3301 3302
	  output_operand (NULL_RTX, *p++);
#endif
	else
	  output_operand_lossage ("invalid %%-code");
	break;

      default:
	putc (c, asm_out_file);
      }

3303 3304
  /* Write out the variable names for operands, if we know them.  */
  if (flag_verbose_asm)
3305
    output_asm_operand_names (operands, oporder, ops);
3306 3307
  if (flag_print_asm_name)
    output_asm_name ();
3308 3309 3310 3311 3312 3313 3314

  putc ('\n', asm_out_file);
}

/* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */

void
3315
output_asm_label (rtx x)
3316 3317 3318 3319
{
  char buf[256];

  if (GET_CODE (x) == LABEL_REF)
3320
    x = XEXP (x, 0);
3321 3322
  if (LABEL_P (x)
      || (NOTE_P (x)
3323
	  && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3324 3325
    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
  else
3326
    output_operand_lossage ("'%%l' operand isn't a label");
3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341

  assemble_name (asm_out_file, buf);
}

/* Print operand X using machine-dependent assembler syntax.
   The macro PRINT_OPERAND is defined just to control this function.
   CODE is a non-digit that preceded the operand-number in the % spec,
   such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
   between the % and the digits.
   When CODE is a non-letter, X is 0.

   The meanings of the letters are machine-dependent and controlled
   by PRINT_OPERAND.  */

static void
3342
output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3343 3344
{
  if (x && GET_CODE (x) == SUBREG)
3345
    x = alter_subreg (&x);
3346

3347
  /* X must not be a pseudo reg.  */
3348
  gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3349 3350 3351 3352 3353 3354 3355 3356 3357

  PRINT_OPERAND (asm_out_file, x, code);
}

/* Print a memory reference operand for address X
   using machine-dependent assembler syntax.
   The macro PRINT_OPERAND_ADDRESS exists just to control this function.  */

void
3358
output_address (rtx x)
3359
{
3360 3361
  bool changed = false;
  walk_alter_subreg (&x, &changed);
3362 3363 3364 3365 3366 3367 3368 3369
  PRINT_OPERAND_ADDRESS (asm_out_file, x);
}

/* Print an integer constant expression in assembler syntax.
   Addition and subtraction are the only arithmetic
   that may appear in these expressions.  */

void
3370
output_addr_const (FILE *file, rtx x)
3371 3372 3373 3374 3375 3376 3377
{
  char buf[256];

 restart:
  switch (GET_CODE (x))
    {
    case PC:
3378
      putc ('.', file);
3379 3380 3381
      break;

    case SYMBOL_REF:
3382 3383
      if (SYMBOL_REF_DECL (x))
	mark_decl_referenced (SYMBOL_REF_DECL (x));
3384 3385 3386
#ifdef ASM_OUTPUT_SYMBOL_REF
      ASM_OUTPUT_SYMBOL_REF (file, x);
#else
3387
      assemble_name (file, XSTR (x, 0));
3388
#endif
3389 3390 3391
      break;

    case LABEL_REF:
3392 3393
      x = XEXP (x, 0);
      /* Fall through.  */
3394 3395
    case CODE_LABEL:
      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3396 3397 3398
#ifdef ASM_OUTPUT_LABEL_REF
      ASM_OUTPUT_LABEL_REF (file, buf);
#else
3399
      assemble_name (file, buf);
3400
#endif
3401 3402 3403
      break;

    case CONST_INT:
3404
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417
      break;

    case CONST:
      /* This used to output parentheses around the expression,
	 but that does not work on the 386 (either ATT or BSD assembler).  */
      output_addr_const (file, XEXP (x, 0));
      break;

    case CONST_DOUBLE:
      if (GET_MODE (x) == VOIDmode)
	{
	  /* We can use %d if the number is one word and positive.  */
	  if (CONST_DOUBLE_HIGH (x))
3418
	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3419 3420
		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
Kazu Hirata committed
3421
	  else if (CONST_DOUBLE_LOW (x) < 0)
3422 3423
	    fprintf (file, HOST_WIDE_INT_PRINT_HEX,
		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3424
	  else
3425
	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3426 3427 3428 3429 3430 3431 3432
	}
      else
	/* We can't handle floating point constants;
	   PRINT_OPERAND must handle them.  */
	output_operand_lossage ("floating constant misused");
      break;

3433
    case CONST_FIXED:
3434 3435
      fprintf (file, HOST_WIDE_INT_PRINT_HEX,
	       (unsigned HOST_WIDE_INT) CONST_FIXED_VALUE_LOW (x));
3436 3437
      break;

3438 3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449
    case PLUS:
      /* Some assemblers need integer constants to appear last (eg masm).  */
      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
	{
	  output_addr_const (file, XEXP (x, 1));
	  if (INTVAL (XEXP (x, 0)) >= 0)
	    fprintf (file, "+");
	  output_addr_const (file, XEXP (x, 0));
	}
      else
	{
	  output_addr_const (file, XEXP (x, 0));
3450 3451
	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
	      || INTVAL (XEXP (x, 1)) >= 0)
3452 3453 3454 3455 3456 3457 3458 3459 3460 3461 3462 3463 3464 3465
	    fprintf (file, "+");
	  output_addr_const (file, XEXP (x, 1));
	}
      break;

    case MINUS:
      /* Avoid outputting things like x-x or x+5-x,
	 since some assemblers can't handle that.  */
      x = simplify_subtraction (x);
      if (GET_CODE (x) != MINUS)
	goto restart;

      output_addr_const (file, XEXP (x, 0));
      fprintf (file, "-");
3466 3467 3468 3469 3470
      if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
	  || GET_CODE (XEXP (x, 1)) == PC
	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
	output_addr_const (file, XEXP (x, 1));
      else
3471
	{
3472
	  fputs (targetm.asm_out.open_paren, file);
3473
	  output_addr_const (file, XEXP (x, 1));
3474
	  fputs (targetm.asm_out.close_paren, file);
3475 3476 3477 3478 3479
	}
      break;

    case ZERO_EXTEND:
    case SIGN_EXTEND:
3480
    case SUBREG:
3481
    case TRUNCATE:
3482 3483 3484 3485
      output_addr_const (file, XEXP (x, 0));
      break;

    default:
3486 3487 3488 3489 3490 3491
#ifdef OUTPUT_ADDR_CONST_EXTRA
      OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
      break;

    fail:
#endif
3492 3493 3494 3495 3496 3497 3498 3499 3500 3501
      output_operand_lossage ("invalid expression as operand");
    }
}

/* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
   %R prints the value of REGISTER_PREFIX.
   %L prints the value of LOCAL_LABEL_PREFIX.
   %U prints the value of USER_LABEL_PREFIX.
   %I prints the value of IMMEDIATE_PREFIX.
   %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3502
   Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3503 3504 3505 3506

   We handle alternate assembler dialects here, just like output_asm_insn.  */

void
3507
asm_fprintf (FILE *file, const char *p, ...)
3508 3509 3510
{
  char buf[10];
  char *q, c;
3511
  va_list argptr;
3512

3513
  va_start (argptr, p);
3514 3515 3516

  buf[0] = '%';

3517
  while ((c = *p++))
3518 3519 3520 3521
    switch (c)
      {
#ifdef ASSEMBLER_DIALECT
      case '{':
3522 3523
	{
	  int i;
3524

3525 3526 3527 3528 3529 3530 3531 3532 3533
	  /* If we want the first dialect, do nothing.  Otherwise, skip
	     DIALECT_NUMBER of strings ending with '|'.  */
	  for (i = 0; i < dialect_number; i++)
	    {
	      while (*p && *p++ != '|')
		;

	      if (*p == '|')
		p++;
Kazu Hirata committed
3534
	    }
3535
	}
3536 3537 3538 3539 3540 3541 3542 3543 3544 3545 3546 3547 3548 3549 3550
	break;

      case '|':
	/* Skip to close brace.  */
	while (*p && *p++ != '}')
	  ;
	break;

      case '}':
	break;
#endif

      case '%':
	c = *p++;
	q = &buf[1];
3551 3552 3553 3554 3555
	while (strchr ("-+ #0", c))
	  {
	    *q++ = c;
	    c = *p++;
	  }
3556
	while (ISDIGIT (c) || c == '.')
3557 3558 3559 3560 3561 3562 3563
	  {
	    *q++ = c;
	    c = *p++;
	  }
	switch (c)
	  {
	  case '%':
3564
	    putc ('%', file);
3565 3566 3567
	    break;

	  case 'd':  case 'i':  case 'u':
3568 3569
	  case 'x':  case 'X':  case 'o':
	  case 'c':
3570 3571 3572 3573 3574 3575
	    *q++ = c;
	    *q = 0;
	    fprintf (file, buf, va_arg (argptr, int));
	    break;

	  case 'w':
3576 3577 3578 3579
	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
	       'o' cases, but we do not check for those cases.  It
	       means that the value is a HOST_WIDE_INT, which may be
	       either `long' or `long long'.  */
3580 3581
	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
	    q += strlen (HOST_WIDE_INT_PRINT);
3582 3583 3584 3585 3586 3587 3588
	    *q++ = *p++;
	    *q = 0;
	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
	    break;

	  case 'l':
	    *q++ = c;
3589 3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600 3601 3602 3603
#ifdef HAVE_LONG_LONG
	    if (*p == 'l')
	      {
		*q++ = *p++;
		*q++ = *p++;
		*q = 0;
		fprintf (file, buf, va_arg (argptr, long long));
	      }
	    else
#endif
	      {
		*q++ = *p++;
		*q = 0;
		fprintf (file, buf, va_arg (argptr, long));
	      }
3604

3605 3606 3607 3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637
	    break;

	  case 's':
	    *q++ = c;
	    *q = 0;
	    fprintf (file, buf, va_arg (argptr, char *));
	    break;

	  case 'O':
#ifdef ASM_OUTPUT_OPCODE
	    ASM_OUTPUT_OPCODE (asm_out_file, p);
#endif
	    break;

	  case 'R':
#ifdef REGISTER_PREFIX
	    fprintf (file, "%s", REGISTER_PREFIX);
#endif
	    break;

	  case 'I':
#ifdef IMMEDIATE_PREFIX
	    fprintf (file, "%s", IMMEDIATE_PREFIX);
#endif
	    break;

	  case 'L':
#ifdef LOCAL_LABEL_PREFIX
	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
#endif
	    break;

	  case 'U':
3638
	    fputs (user_label_prefix, file);
3639 3640
	    break;

3641
#ifdef ASM_FPRINTF_EXTENSIONS
3642
	    /* Uppercase letters are reserved for general use by asm_fprintf
3643 3644 3645 3646 3647 3648 3649 3650 3651
	       and so are not available to target specific code.  In order to
	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
	       they are defined here.  As they get turned into real extensions
	       to asm_fprintf they should be removed from this list.  */
	  case 'A': case 'B': case 'C': case 'D': case 'E':
	  case 'F': case 'G': case 'H': case 'J': case 'K':
	  case 'M': case 'N': case 'P': case 'Q': case 'S':
	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
	    break;
Kazu Hirata committed
3652

3653 3654
	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
#endif
3655
	  default:
3656
	    gcc_unreachable ();
3657 3658 3659 3660
	  }
	break;

      default:
3661
	putc (c, file);
3662
      }
3663
  va_end (argptr);
3664 3665 3666 3667 3668 3669 3670 3671
}

/* Split up a CONST_DOUBLE or integer constant rtx
   into two rtx's for single words,
   storing in *FIRST the word that comes first in memory in the target
   and in *SECOND the other.  */

void
3672
split_double (rtx value, rtx *first, rtx *second)
3673 3674 3675
{
  if (GET_CODE (value) == CONST_INT)
    {
3676
      if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3677
	{
3678
	  /* In this case the CONST_INT holds both target words.
3679 3680
	     Extract the bits from it into two word-sized pieces.
	     Sign extend each half to HOST_WIDE_INT.  */
3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698
	  unsigned HOST_WIDE_INT low, high;
	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;

	  /* Set sign_bit to the most significant bit of a word.  */
	  sign_bit = 1;
	  sign_bit <<= BITS_PER_WORD - 1;

	  /* Set mask so that all bits of the word are set.  We could
	     have used 1 << BITS_PER_WORD instead of basing the
	     calculation on sign_bit.  However, on machines where
	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
	     compiler warning, even though the code would never be
	     executed.  */
	  mask = sign_bit << 1;
	  mask--;

	  /* Set sign_extend as any remaining bits.  */
	  sign_extend = ~mask;
Kazu Hirata committed
3699

3700 3701 3702 3703 3704 3705 3706 3707 3708 3709 3710 3711 3712 3713 3714 3715
	  /* Pick the lower word and sign-extend it.  */
	  low = INTVAL (value);
	  low &= mask;
	  if (low & sign_bit)
	    low |= sign_extend;

	  /* Pick the higher word, shifted to the least significant
	     bits, and sign-extend it.  */
	  high = INTVAL (value);
	  high >>= BITS_PER_WORD - 1;
	  high >>= 1;
	  high &= mask;
	  if (high & sign_bit)
	    high |= sign_extend;

	  /* Store the words in the target machine order.  */
3716 3717
	  if (WORDS_BIG_ENDIAN)
	    {
3718 3719
	      *first = GEN_INT (high);
	      *second = GEN_INT (low);
3720 3721 3722
	    }
	  else
	    {
3723 3724
	      *first = GEN_INT (low);
	      *second = GEN_INT (high);
3725
	    }
3726 3727 3728
	}
      else
	{
3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742
	  /* The rule for using CONST_INT for a wider mode
	     is that we regard the value as signed.
	     So sign-extend it.  */
	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
	  if (WORDS_BIG_ENDIAN)
	    {
	      *first = high;
	      *second = value;
	    }
	  else
	    {
	      *first = value;
	      *second = high;
	    }
3743
	}
3744 3745 3746
    }
  else if (GET_CODE (value) != CONST_DOUBLE)
    {
3747 3748 3749 3750 3751 3752 3753 3754 3755 3756
      if (WORDS_BIG_ENDIAN)
	{
	  *first = const0_rtx;
	  *second = value;
	}
      else
	{
	  *first = value;
	  *second = const0_rtx;
	}
3757 3758 3759 3760 3761 3762 3763
    }
  else if (GET_MODE (value) == VOIDmode
	   /* This is the old way we did CONST_DOUBLE integers.  */
	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
    {
      /* In an integer, the words are defined as most and least significant.
	 So order them by the target's convention.  */
3764 3765 3766 3767 3768 3769 3770 3771 3772 3773
      if (WORDS_BIG_ENDIAN)
	{
	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
	}
      else
	{
	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
	}
3774 3775 3776
    }
  else
    {
Kazu Hirata committed
3777 3778
      REAL_VALUE_TYPE r;
      long l[2];
3779 3780 3781 3782 3783
      REAL_VALUE_FROM_CONST_DOUBLE (r, value);

      /* Note, this converts the REAL_VALUE_TYPE to the target's
	 format, splits up the floating point double and outputs
	 exactly 32 bits of it into each of l[0] and l[1] --
Mike Stump committed
3784
	 not necessarily BITS_PER_WORD bits.  */
3785 3786
      REAL_VALUE_TO_TARGET_DOUBLE (r, l);

3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797 3798 3799 3800 3801 3802
      /* If 32 bits is an entire word for the target, but not for the host,
	 then sign-extend on the host so that the number will look the same
	 way on the host that it would on the target.  See for instance
	 simplify_unary_operation.  The #if is needed to avoid compiler
	 warnings.  */

#if HOST_BITS_PER_LONG > 32
      if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
	{
	  if (l[0] & ((long) 1 << 31))
	    l[0] |= ((long) (-1) << 32);
	  if (l[1] & ((long) 1 << 31))
	    l[1] |= ((long) (-1) << 32);
	}
#endif

3803 3804
      *first = GEN_INT (l[0]);
      *second = GEN_INT (l[1]);
3805 3806 3807 3808 3809 3810
    }
}

/* Return nonzero if this function has no function calls.  */

int
3811
leaf_function_p (void)
3812 3813
{
  rtx insn;
3814
  rtx link;
3815

3816
  if (crtl->profile || profile_arc_flag)
3817 3818 3819 3820
    return 0;

  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    {
3821
      if (CALL_P (insn)
3822
	  && ! SIBLING_CALL_P (insn))
3823
	return 0;
3824
      if (NONJUMP_INSN_P (insn)
3825
	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3826
	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3827
	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3828 3829
	return 0;
    }
3830
  for (link = crtl->epilogue_delay_list;
3831 3832
       link;
       link = XEXP (link, 1))
3833
    {
3834 3835
      insn = XEXP (link, 0);

3836
      if (CALL_P (insn)
3837
	  && ! SIBLING_CALL_P (insn))
3838
	return 0;
3839
      if (NONJUMP_INSN_P (insn)
3840
	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3841
	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3842
	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3843 3844 3845 3846 3847 3848
	return 0;
    }

  return 1;
}

3849
/* Return 1 if branch is a forward branch.
3850 3851 3852 3853
   Uses insn_shuid array, so it works only in the final pass.  May be used by
   output templates to customary add branch prediction hints.
 */
int
3854
final_forward_branch_p (rtx insn)
3855 3856
{
  int insn_id, label_id;
3857

3858
  gcc_assert (uid_shuid);
3859 3860 3861
  insn_id = INSN_SHUID (insn);
  label_id = INSN_SHUID (JUMP_LABEL (insn));
  /* We've hit some insns that does not have id information available.  */
3862
  gcc_assert (insn_id && label_id);
3863 3864 3865
  return insn_id < label_id;
}

3866 3867 3868 3869 3870 3871 3872 3873 3874 3875 3876 3877 3878 3879 3880
/* On some machines, a function with no call insns
   can run faster if it doesn't create its own register window.
   When output, the leaf function should use only the "output"
   registers.  Ordinarily, the function would be compiled to use
   the "input" registers to find its arguments; it is a candidate
   for leaf treatment if it uses only the "input" registers.
   Leaf function treatment means renumbering so the function
   uses the "output" registers instead.  */

#ifdef LEAF_REGISTERS

/* Return 1 if this function uses only the registers that can be
   safely renumbered.  */

int
3881
only_leaf_regs_used (void)
3882 3883
{
  int i;
3884
  const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3885 3886

  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3887
    if ((df_regs_ever_live_p (i) || global_regs[i])
3888 3889 3890
	&& ! permitted_reg_in_leaf_functions[i])
      return 0;

3891
  if (crtl->uses_pic_offset_table
3892
      && pic_offset_table_rtx != 0
3893
      && REG_P (pic_offset_table_rtx)
3894 3895 3896
      && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
    return 0;

3897 3898 3899 3900 3901 3902 3903
  return 1;
}

/* Scan all instructions and renumber all registers into those
   available in leaf functions.  */

static void
3904
leaf_renumber_regs (rtx first)
3905 3906 3907 3908 3909 3910 3911
{
  rtx insn;

  /* Renumber only the actual patterns.
     The reg-notes can contain frame pointer refs,
     and renumbering them could crash, and should not be needed.  */
  for (insn = first; insn; insn = NEXT_INSN (insn))
3912
    if (INSN_P (insn))
3913
      leaf_renumber_regs_insn (PATTERN (insn));
3914
  for (insn = crtl->epilogue_delay_list;
Kazu Hirata committed
3915 3916
       insn;
       insn = XEXP (insn, 1))
3917
    if (INSN_P (XEXP (insn, 0)))
3918 3919 3920 3921 3922 3923 3924
      leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
}

/* Scan IN_RTX and its subexpressions, and renumber all regs into those
   available in leaf functions.  */

void
3925
leaf_renumber_regs_insn (rtx in_rtx)
3926
{
3927 3928
  int i, j;
  const char *format_ptr;
3929 3930 3931 3932 3933 3934 3935 3936

  if (in_rtx == 0)
    return;

  /* Renumber all input-registers into output-registers.
     renumbered_regs would be 1 for an output-register;
     they  */

3937
  if (REG_P (in_rtx))
3938 3939 3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952 3953
    {
      int newreg;

      /* Don't renumber the same reg twice.  */
      if (in_rtx->used)
	return;

      newreg = REGNO (in_rtx);
      /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
	 to reach here as part of a REG_NOTE.  */
      if (newreg >= FIRST_PSEUDO_REGISTER)
	{
	  in_rtx->used = 1;
	  return;
	}
      newreg = LEAF_REG_REMAP (newreg);
3954
      gcc_assert (newreg >= 0);
3955 3956 3957
      df_set_regs_ever_live (REGNO (in_rtx), false);
      df_set_regs_ever_live (newreg, true);
      SET_REGNO (in_rtx, newreg);
3958 3959 3960
      in_rtx->used = 1;
    }

3961
  if (INSN_P (in_rtx))
3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995 3996
    {
      /* Inside a SEQUENCE, we find insns.
	 Renumber just the patterns of these insns,
	 just as we do for the top-level insns.  */
      leaf_renumber_regs_insn (PATTERN (in_rtx));
      return;
    }

  format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));

  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
    switch (*format_ptr++)
      {
      case 'e':
	leaf_renumber_regs_insn (XEXP (in_rtx, i));
	break;

      case 'E':
	if (NULL != XVEC (in_rtx, i))
	  {
	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
	      leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
	  }
	break;

      case 'S':
      case 's':
      case '0':
      case 'i':
      case 'w':
      case 'n':
      case 'u':
	break;

      default:
3997
	gcc_unreachable ();
3998 3999 4000
      }
}
#endif
4001 4002 4003 4004 4005 4006 4007


/* When -gused is used, emit debug info for only used symbols. But in
   addition to the standard intercepted debug_hooks there are some direct
   calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
   Those routines may also be called from a higher level intercepted routine. So
   to prevent recording data for an inner call to one of these for an intercept,
4008
   we maintain an intercept nesting counter (debug_nesting). We only save the
4009 4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021
   intercepted arguments if the nesting is 1.  */
int debug_nesting = 0;

static tree *symbol_queue;
int symbol_queue_index = 0;
static int symbol_queue_size = 0;

/* Generate the symbols for any queued up type symbols we encountered
   while generating the type info for some originally used symbol.
   This might generate additional entries in the queue.  Only when
   the nesting depth goes to 0 is this routine called.  */

void
4022
debug_flush_symbol_queue (void)
4023 4024
{
  int i;
4025

4026 4027
  /* Make sure that additionally queued items are not flushed
     prematurely.  */
4028

4029
  ++debug_nesting;
4030

4031 4032
  for (i = 0; i < symbol_queue_index; ++i)
    {
4033
      /* If we pushed queued symbols then such symbols must be
4034 4035 4036 4037 4038 4039 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051
         output no matter what anyone else says.  Specifically,
         we need to make sure dbxout_symbol() thinks the symbol was
         used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
         which may be set for outside reasons.  */
      int saved_tree_used = TREE_USED (symbol_queue[i]);
      int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
      TREE_USED (symbol_queue[i]) = 1;
      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;

#ifdef DBX_DEBUGGING_INFO
      dbxout_symbol (symbol_queue[i], 0);
#endif

      TREE_USED (symbol_queue[i]) = saved_tree_used;
      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
    }

  symbol_queue_index = 0;
4052
  --debug_nesting;
4053 4054 4055 4056 4057 4058
}

/* Queue a type symbol needed as part of the definition of a decl
   symbol.  These symbols are generated when debug_flush_symbol_queue()
   is called.  */

4059
void
4060 4061
debug_queue_symbol (tree decl)
{
4062
  if (symbol_queue_index >= symbol_queue_size)
4063 4064
    {
      symbol_queue_size += 10;
4065 4066
      symbol_queue = xrealloc (symbol_queue,
			       symbol_queue_size * sizeof (tree));
4067 4068 4069
    }

  symbol_queue[symbol_queue_index++] = decl;
4070
}
4071

4072
/* Free symbol queue.  */
4073
void
4074
debug_free_queue (void)
4075 4076 4077 4078 4079 4080 4081 4082
{
  if (symbol_queue)
    {
      free (symbol_queue);
      symbol_queue = NULL;
      symbol_queue_size = 0;
    }
}
4083 4084

/* Turn the RTL into assembly.  */
4085
static unsigned int
4086 4087 4088 4089 4090 4091 4092 4093 4094 4095 4096 4097 4098 4099 4100 4101 4102 4103 4104 4105 4106 4107
rest_of_handle_final (void)
{
  rtx x;
  const char *fnname;

  /* Get the function's name, as described by its RTL.  This may be
     different from the DECL_NAME name used in the source file.  */

  x = DECL_RTL (current_function_decl);
  gcc_assert (MEM_P (x));
  x = XEXP (x, 0);
  gcc_assert (GET_CODE (x) == SYMBOL_REF);
  fnname = XSTR (x, 0);

  assemble_start_function (current_function_decl, fnname);
  final_start_function (get_insns (), asm_out_file, optimize);
  final (get_insns (), asm_out_file, optimize);
  final_end_function ();

#ifdef TARGET_UNWIND_INFO
  /* ??? The IA-64 ".handlerdata" directive must be issued before
     the ".endp" directive that closes the procedure descriptor.  */
4108
  output_function_exception_table (fnname);
4109 4110 4111 4112 4113 4114
#endif

  assemble_end_function (current_function_decl, fnname);

#ifndef TARGET_UNWIND_INFO
  /* Otherwise, it feels unclean to switch sections in the middle.  */
4115
  output_function_exception_table (fnname);
4116 4117 4118 4119
#endif

  user_defined_section_attribute = false;

4120 4121 4122
  /* Free up reg info memory.  */
  free_reg_info ();

4123 4124 4125 4126 4127 4128 4129 4130 4131 4132 4133 4134 4135 4136 4137 4138 4139
  if (! quiet_flag)
    fflush (asm_out_file);

  /* Write DBX symbols if requested.  */

  /* Note that for those inline functions where we don't initially
     know for certain that we will be generating an out-of-line copy,
     the first invocation of this routine (rest_of_compilation) will
     skip over this code by doing a `goto exit_rest_of_compilation;'.
     Later on, wrapup_global_declarations will (indirectly) call
     rest_of_compilation again for those inline functions that need
     to have out-of-line copies generated.  During that call, we
     *will* be routed past here.  */

  timevar_push (TV_SYMOUT);
  (*debug_hooks->function_decl) (current_function_decl);
  timevar_pop (TV_SYMOUT);
4140 4141 4142 4143 4144 4145 4146 4147 4148 4149
  if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
      && targetm.have_ctors_dtors)
    targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
				 decl_init_priority_lookup
				   (current_function_decl));
  if (DECL_STATIC_DESTRUCTOR (current_function_decl)
      && targetm.have_ctors_dtors)
    targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
				decl_fini_priority_lookup
				  (current_function_decl));
4150
  return 0;
4151 4152
}

4153
struct rtl_opt_pass pass_final =
4154
{
4155 4156
 {
  RTL_PASS,
4157 4158 4159 4160 4161 4162 4163 4164 4165 4166 4167
  NULL,                                 /* name */
  NULL,                                 /* gate */
  rest_of_handle_final,                 /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_FINAL,                             /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
4168 4169
  TODO_ggc_collect                      /* todo_flags_finish */
 }
4170 4171 4172
};


4173
static unsigned int
4174 4175 4176 4177
rest_of_handle_shorten_branches (void)
{
  /* Shorten branches.  */
  shorten_branches (get_insns ());
4178
  return 0;
4179
}
4180

4181
struct rtl_opt_pass pass_shorten_branches =
4182
{
4183 4184
 {
  RTL_PASS,
4185
  "shorten",                            /* name */
4186 4187 4188 4189 4190 4191 4192 4193 4194 4195
  NULL,                                 /* gate */
  rest_of_handle_shorten_branches,      /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_FINAL,                             /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
4196 4197
  TODO_dump_func                        /* todo_flags_finish */
 }
4198 4199 4200
};


4201
static unsigned int
4202 4203 4204 4205 4206 4207 4208 4209 4210 4211 4212 4213 4214 4215 4216 4217 4218 4219 4220 4221 4222 4223 4224 4225 4226
rest_of_clean_state (void)
{
  rtx insn, next;

  /* It is very important to decompose the RTL instruction chain here:
     debug information keeps pointing into CODE_LABEL insns inside the function
     body.  If these remain pointing to the other insns, we end up preserving
     whole RTL chain and attached detailed debug info in memory.  */
  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);
      NEXT_INSN (insn) = NULL;
      PREV_INSN (insn) = NULL;
    }

  /* In case the function was not output,
     don't leave any temporary anonymous types
     queued up for sdb output.  */
#ifdef SDB_DEBUGGING_INFO
  if (write_symbols == SDB_DEBUG)
    sdbout_types (NULL_TREE);
#endif

  reload_completed = 0;
  epilogue_completed = 0;
4227 4228 4229
#ifdef STACK_REGS
  regstack_completed = 0;
#endif
4230 4231 4232 4233 4234 4235 4236 4237 4238 4239 4240 4241

  /* Clear out the insn_length contents now that they are no
     longer valid.  */
  init_insn_lengths ();

  /* Show no temporary slots allocated.  */
  init_temp_slots ();

  free_bb_for_insn ();

  if (targetm.binds_local_p (current_function_decl))
    {
4242 4243 4244
      int pref = crtl->preferred_stack_boundary;
      if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
        pref = crtl->stack_alignment_needed;
4245 4246 4247 4248 4249 4250 4251 4252 4253 4254 4255 4256 4257 4258 4259 4260 4261
      cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
        = pref;
    }

  /* Make sure volatile mem refs aren't considered valid operands for
     arithmetic insns.  We must call this here if this is a nested inline
     function, since the above code leaves us in the init_recog state,
     and the function context push/pop code does not save/restore volatile_ok.

     ??? Maybe it isn't necessary for expand_start_function to call this
     anymore if we do it here?  */

  init_recog_no_volatile ();

  /* We're done with this function.  Free up memory if we can.  */
  free_after_parsing (cfun);
  free_after_compilation (cfun);
4262
  return 0;
4263 4264
}

4265
struct rtl_opt_pass pass_clean_state =
4266
{
4267 4268
 {
  RTL_PASS,
4269 4270 4271 4272 4273 4274 4275 4276 4277 4278 4279
  NULL,                                 /* name */
  NULL,                                 /* gate */
  rest_of_clean_state,                  /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_FINAL,                             /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  PROP_rtl,                             /* properties_destroyed */
  0,                                    /* todo_flags_start */
4280 4281
  0                                     /* todo_flags_finish */
 }
4282 4283
};