rtl.h 126 KB
Newer Older
1
/* Register Transfer Language (RTL) definitions for GCC
2
   Copyright (C) 1987-2014 Free Software Foundation, Inc.
Jim Wilson committed
3

4
This file is part of GCC.
Jim Wilson committed
5

6 7
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9
version.
Jim Wilson committed
10

11 12 13 14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Jim Wilson committed
15 16

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Jim Wilson committed
19

20 21
#ifndef GCC_RTL_H
#define GCC_RTL_H
22

Kenneth Zadeck committed
23
#include <utility>
24
#include "statistics.h"
Jim Wilson committed
25
#include "machmode.h"
26
#include "input.h"
27
#include "real.h"
28
#include "vec.h"
29
#include "fixed-value.h"
30
#include "alias.h"
31
#include "hashtab.h"
Kenneth Zadeck committed
32
#include "wide-int.h"
33
#include "flags.h"
34
#include "is-a.h"
Jim Wilson committed
35

36
/* Value used by some passes to "recognize" noop moves as valid
Kazu Hirata committed
37
 instructions.  */
38 39
#define NOOP_MOVE_INSN_CODE	INT_MAX

Jim Wilson committed
40 41 42 43 44 45 46 47 48
/* Register Transfer Language EXPRESSIONS CODES */

#define RTX_CODE	enum rtx_code
enum rtx_code  {

#define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS)   ENUM ,
#include "rtl.def"		/* rtl expressions are documented here */
#undef DEF_RTL_EXPR

49
  LAST_AND_UNUSED_RTX_CODE};	/* A convenient way to get a value for
Jim Wilson committed
50
				   NUM_RTX_CODE.
51
				   Assumes default enum value assignment.  */
Jim Wilson committed
52

53
/* The cast here, saves many elsewhere.  */
Kazu Hirata committed
54
#define NUM_RTX_CODE ((int) LAST_AND_UNUSED_RTX_CODE)
55 56 57 58 59

/* Similar, but since generator files get more entries... */
#ifdef GENERATOR_FILE
# define NON_GENERATOR_NUM_RTX_CODE ((int) MATCH_OPERAND)
#endif
Jim Wilson committed
60

61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
/* Register Transfer Language EXPRESSIONS CODE CLASSES */

enum rtx_class  {
  /* We check bit 0-1 of some rtx class codes in the predicates below.  */

  /* Bit 0 = comparison if 0, arithmetic is 1
     Bit 1 = 1 if commutative.  */
  RTX_COMPARE,		/* 0 */
  RTX_COMM_COMPARE,
  RTX_BIN_ARITH,
  RTX_COMM_ARITH,

  /* Must follow the four preceding values.  */
  RTX_UNARY,		/* 4 */

  RTX_EXTRA,
  RTX_MATCH,
  RTX_INSN,

  /* Bit 0 = 1 if constant.  */
  RTX_OBJ,		/* 8 */
  RTX_CONST_OBJ,

  RTX_TERNARY,
  RTX_BITFIELD_OPS,
  RTX_AUTOINC
};

#define RTX_OBJ_MASK (~1)
#define RTX_OBJ_RESULT (RTX_OBJ & RTX_OBJ_MASK)
#define RTX_COMPARE_MASK (~1)
#define RTX_COMPARE_RESULT (RTX_COMPARE & RTX_COMPARE_MASK)
#define RTX_ARITHMETIC_MASK (~1)
#define RTX_ARITHMETIC_RESULT (RTX_COMM_ARITH & RTX_ARITHMETIC_MASK)
#define RTX_BINARY_MASK (~3)
#define RTX_BINARY_RESULT (RTX_COMPARE & RTX_BINARY_MASK)
#define RTX_COMMUTATIVE_MASK (~2)
#define RTX_COMMUTATIVE_RESULT (RTX_COMM_COMPARE & RTX_COMMUTATIVE_MASK)
#define RTX_NON_COMMUTATIVE_RESULT (RTX_COMPARE & RTX_COMMUTATIVE_MASK)

101
extern const unsigned char rtx_length[NUM_RTX_CODE];
Mike Stump committed
102
#define GET_RTX_LENGTH(CODE)		(rtx_length[(int) (CODE)])
Jim Wilson committed
103

104
extern const char * const rtx_name[NUM_RTX_CODE];
Mike Stump committed
105
#define GET_RTX_NAME(CODE)		(rtx_name[(int) (CODE)])
Jim Wilson committed
106

107
extern const char * const rtx_format[NUM_RTX_CODE];
Mike Stump committed
108
#define GET_RTX_FORMAT(CODE)		(rtx_format[(int) (CODE)])
Jim Wilson committed
109

110
extern const enum rtx_class rtx_class[NUM_RTX_CODE];
Mike Stump committed
111
#define GET_RTX_CLASS(CODE)		(rtx_class[(int) (CODE)])
Geoffrey Keating committed
112

Richard Sandiford committed
113 114 115 116
/* True if CODE is part of the insn chain (i.e. has INSN_UID, PREV_INSN
   and NEXT_INSN fields).  */
#define INSN_CHAIN_CODE_P(CODE) IN_RANGE (CODE, DEBUG_INSN, NOTE)

117
extern const unsigned char rtx_code_size[NUM_RTX_CODE];
Geoffrey Keating committed
118
extern const unsigned char rtx_next[NUM_RTX_CODE];
Jim Wilson committed
119

120 121
/* The flags and bitfields of an ADDR_DIFF_VEC.  BASE is the base label
   relative to which the offsets are calculated, as explained in rtl.def.  */
122
struct addr_diff_vec_flags
123 124 125 126 127
{
  /* Set at the start of shorten_branches - ONLY WHEN OPTIMIZING - : */
  unsigned min_align: 8;
  /* Flags: */
  unsigned base_after_vec: 1; /* BASE is after the ADDR_DIFF_VEC.  */
128 129 130 131 132 133 134 135
  unsigned min_after_vec: 1;  /* minimum address target label is
				 after the ADDR_DIFF_VEC.  */
  unsigned max_after_vec: 1;  /* maximum address target label is
				 after the ADDR_DIFF_VEC.  */
  unsigned min_after_base: 1; /* minimum address target label is
				 after BASE.  */
  unsigned max_after_base: 1; /* maximum address target label is
				 after BASE.  */
136 137 138 139
  /* Set by the actual branch shortening process - ONLY WHEN OPTIMIZING - : */
  unsigned offset_unsigned: 1; /* offsets have to be treated as unsigned.  */
  unsigned : 2;
  unsigned scale : 8;
140
};
141

142 143
/* Structure used to describe the attributes of a MEM.  These are hashed
   so MEMs that the same attributes share a data structure.  This means
144
   they cannot be modified in place.  */
145
struct GTY(()) mem_attrs
146
{
147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175
  /* The expression that the MEM accesses, or null if not known.
     This expression might be larger than the memory reference itself.
     (In other words, the MEM might access only part of the object.)  */
  tree expr;

  /* The offset of the memory reference from the start of EXPR.
     Only valid if OFFSET_KNOWN_P.  */
  HOST_WIDE_INT offset;

  /* The size of the memory reference in bytes.  Only valid if
     SIZE_KNOWN_P.  */
  HOST_WIDE_INT size;

  /* The alias set of the memory reference.  */
  alias_set_type alias;

  /* The alignment of the reference in bits.  Always a multiple of
     BITS_PER_UNIT.  Note that EXPR may have a stricter alignment
     than the memory reference itself.  */
  unsigned int align;

  /* The address space that the memory reference uses.  */
  unsigned char addrspace;

  /* True if OFFSET is known.  */
  bool offset_known_p;

  /* True if SIZE is known.  */
  bool size_known_p;
176
};
177

178
/* Structure used to describe the attributes of a REG in similar way as
179 180 181 182 183
   mem_attrs does for MEM above.  Note that the OFFSET field is calculated
   in the same way as for mem_attrs, rather than in the same way as a
   SUBREG_BYTE.  For example, if a big-endian target stores a byte
   object in the low part of a 4-byte register, the OFFSET field
   will be -3 rather than 0.  */
184

185
struct GTY(()) reg_attrs {
186 187
  tree decl;			/* decl corresponding to REG.  */
  HOST_WIDE_INT offset;		/* Offset from start of DECL.  */
188
};
189

Jim Wilson committed
190 191
/* Common union for an element of an rtx.  */

192
union rtunion
Jim Wilson committed
193
{
194 195 196 197 198 199
  int rt_int;
  unsigned int rt_uint;
  const char *rt_str;
  rtx rt_rtx;
  rtvec rt_rtvec;
  enum machine_mode rt_type;
200
  addr_diff_vec_flags rt_addr_diff_vec_flags;
201
  struct cselib_val *rt_cselib;
202
  tree rt_tree;
203
  basic_block rt_bb;
204 205
  mem_attrs *rt_mem;
  reg_attrs *rt_reg;
206
  struct constant_descriptor_rtx *rt_constant;
207
  struct dw_cfi_node *rt_cfi;
Geoffrey Keating committed
208
};
Jim Wilson committed
209

210 211
/* This structure remembers the position of a SYMBOL_REF within an
   object_block structure.  A SYMBOL_REF only provides this information
212
   if SYMBOL_REF_HAS_BLOCK_INFO_P is true.  */
213
struct GTY(()) block_symbol {
214
  /* The usual SYMBOL_REF fields.  */
215
  rtunion GTY ((skip)) fld[2];
216 217 218 219 220 221 222 223 224 225 226

  /* The block that contains this object.  */
  struct object_block *block;

  /* The offset of this object from the start of its block.  It is negative
     if the symbol has not yet been assigned an offset.  */
  HOST_WIDE_INT offset;
};

/* Describes a group of objects that are to be placed together in such
   a way that their relative positions are known.  */
227
struct GTY(()) object_block {
228 229 230 231 232 233 234 235 236 237 238 239 240
  /* The section in which these objects should be placed.  */
  section *sect;

  /* The alignment of the first object, measured in bits.  */
  unsigned int alignment;

  /* The total size of the objects, measured in bytes.  */
  HOST_WIDE_INT size;

  /* The SYMBOL_REFs for each object.  The vector is sorted in
     order of increasing offset and the following conditions will
     hold for each element X:

241
	 SYMBOL_REF_HAS_BLOCK_INFO_P (X)
242 243 244
	 !SYMBOL_REF_ANCHOR_P (X)
	 SYMBOL_REF_BLOCK (X) == [address of this structure]
	 SYMBOL_REF_BLOCK_OFFSET (X) >= 0.  */
245
  vec<rtx, va_gc> *objects;
246 247 248 249 250

  /* All the anchor SYMBOL_REFs used to address these objects, sorted
     in order of increasing offset, and then increasing TLS model.
     The following conditions will hold for each element X in this vector:

251
	 SYMBOL_REF_HAS_BLOCK_INFO_P (X)
252 253 254
	 SYMBOL_REF_ANCHOR_P (X)
	 SYMBOL_REF_BLOCK (X) == [address of this structure]
	 SYMBOL_REF_BLOCK_OFFSET (X) >= 0.  */
255
  vec<rtx, va_gc> *anchors;
256 257
};

Kenneth Zadeck committed
258 259 260 261 262 263 264 265 266 267
struct GTY((variable_size)) hwivec_def {
  HOST_WIDE_INT elem[1];
};

/* Number of elements of the HWIVEC if RTX is a CONST_WIDE_INT.  */
#define CWI_GET_NUM_ELEM(RTX)					\
  ((int)RTL_FLAG_CHECK1("CWI_GET_NUM_ELEM", (RTX), CONST_WIDE_INT)->u2.num_elem)
#define CWI_PUT_NUM_ELEM(RTX, NUM)					\
  (RTL_FLAG_CHECK1("CWI_PUT_NUM_ELEM", (RTX), CONST_WIDE_INT)->u2.num_elem = (NUM))

Jim Wilson committed
268 269
/* RTL expression ("rtx").  */

270 271 272 273 274 275 276 277 278 279 280 281 282 283 284
/* The GTY "desc" and "tag" options below are a kludge: we need a desc
   field for for gengtype to recognize that inheritance is occurring,
   so that all subclasses are redirected to the traversal hook for the
   base class.
   However, all of the fields are in the base class, and special-casing
   is at work.  Hence we use desc and tag of 0, generating a switch
   statement of the form:
     switch (0)
       {
       case 0: // all the work happens here
      }
   in order to work with the existing special-casing in gengtype.  */

struct GTY((desc("0"), tag("0"),
	    chain_next ("RTX_NEXT (&%h)"),
285
	    chain_prev ("RTX_PREV (&%h)"))) rtx_def {
Jim Wilson committed
286
  /* The kind of expression this is.  */
287
  ENUM_BITFIELD(rtx_code) code: 16;
288

Jim Wilson committed
289
  /* The kind of value the expression has.  */
290 291
  ENUM_BITFIELD(machine_mode) mode : 8;

292 293
  /* 1 in a MEM if we should keep the alias set for this mem unchanged
     when we access a component.
294
     1 in a JUMP_INSN if it is a crossing jump.
295
     1 in a CALL_INSN if it is a sibling call.
296
     1 in a SET that is for a return.
297
     In a CODE_LABEL, part of the two-bit alternate entry field.
298
     1 in a CONCAT is VAL_EXPR_IS_COPIED in var-tracking.c.
299
     1 in a VALUE is SP_BASED_VALUE_P in cselib.c.
300
     1 in a SUBREG generated by LRA for reload insns.  */
Jim Wilson committed
301
  unsigned int jump : 1;
302
  /* In a CODE_LABEL, part of the two-bit alternate entry field.
H.J. Lu committed
303
     1 in a MEM if it cannot trap.
Kenneth Zadeck committed
304 305
     1 in a CALL_INSN logically equivalent to
       ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P. */
Jim Wilson committed
306
  unsigned int call : 1;
307
  /* 1 in a REG, MEM, or CONCAT if the value is set at most once, anywhere.
308
     1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
Jim Wilson committed
309 310
     1 in a SYMBOL_REF if it addresses something in the per-function
     constants pool.
H.J. Lu committed
311
     1 in a CALL_INSN logically equivalent to ECF_CONST and TREE_READONLY.
Kenneth Zadeck committed
312
     1 in a NOTE, or EXPR_LIST for a const call.
313 314
     1 in a JUMP_INSN of an annulling branch.
     1 in a CONCAT is VAL_EXPR_IS_CLOBBERED in var-tracking.c.
315 316
     1 in a preserved VALUE is PRESERVED_VALUE_P in cselib.c.
     1 in a clobber temporarily created for LRA.  */
Jim Wilson committed
317
  unsigned int unchanging : 1;
318 319 320 321
  /* 1 in a MEM or ASM_OPERANDS expression if the memory reference is volatile.
     1 in an INSN, CALL_INSN, JUMP_INSN, CODE_LABEL, BARRIER, or NOTE
     if it has been deleted.
     1 in a REG expression if corresponds to a variable declared by the user,
Jim Wilson committed
322
     0 for an internally generated temporary.
323
     1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
324 325
     1 in a LABEL_REF, REG_LABEL_TARGET or REG_LABEL_OPERAND note for a
     non-local label.
326 327
     In a SYMBOL_REF, this flag is used for machine-specific purposes.
     In a PREFETCH, this flag indicates that it should be considered a scheduling
328 329
     barrier.
     1 in a CONCAT is VAL_NEEDS_RESOLUTION in var-tracking.c.  */
Jim Wilson committed
330
  unsigned int volatil : 1;
331
  /* 1 in a REG if the register is used only in exit code a loop.
332
     1 in a SUBREG expression if was generated from a variable with a
333
     promoted mode.
Jim Wilson committed
334 335
     1 in a CODE_LABEL if the label is used for nonlocal gotos
     and must not be deleted even if its count is zero.
336 337
     1 in an INSN, JUMP_INSN or CALL_INSN if this insn must be scheduled
     together with the preceding insn.  Valid only within sched.
Jim Wilson committed
338 339
     1 in an INSN, JUMP_INSN, or CALL_INSN if insn is in a delay slot and
     from the target of a branch.  Valid from reorg until end of compilation;
340 341 342 343
     cleared before used.

     The name of the field is historical.  It used to be used in MEMs
     to record whether the MEM accessed part of a structure.  */
Jim Wilson committed
344
  unsigned int in_struct : 1;
345 346
  /* At the end of RTL generation, 1 if this rtx is used.  This is used for
     copying shared structure.  See `unshare_all_rtl'.
347
     In a REG, this is not needed for that purpose, and used instead
Jim Wilson committed
348
     in `leaf_renumber_regs_insn'.
349
     1 in a SYMBOL_REF, means that emit_library_call
350 351 352
     has used it as the function.
     1 in a CONCAT is VAL_HOLDS_TRACK_EXPR in var-tracking.c.
     1 in a VALUE or DEBUG_EXPR is VALUE_RECURSED_INTO in var-tracking.c.  */
Jim Wilson committed
353
  unsigned int used : 1;
354
  /* 1 in an INSN or a SET if this rtx is related to the call frame,
355
     either changing how we compute the frame address or saving and
356
     restoring registers in the prologue and epilogue.
357
     1 in a REG or MEM if it is a pointer.
358
     1 in a SYMBOL_REF if it addresses something in the per-function
359 360
     constant string pool.
     1 in a VALUE is VALUE_CHANGED in var-tracking.c.  */
x  
Jason Merrill committed
361
  unsigned frame_related : 1;
362
  /* 1 in a REG or PARALLEL that is the current function's return value.
H.J. Lu committed
363
     1 in a SYMBOL_REF for a weak symbol.
364 365 366
     1 in a CALL_INSN logically equivalent to ECF_PURE and DECL_PURE_P.
     1 in a CONCAT is VAL_EXPR_HAS_REVERSE in var-tracking.c.
     1 in a VALUE or DEBUG_EXPR is NO_LOC_P in var-tracking.c.  */
367
  unsigned return_val : 1;
368

Kenneth Zadeck committed
369 370 371 372 373 374
  union {
    /* The final union field is aligned to 64 bits on LP64 hosts,
       giving a 32-bit gap after the fields above.  We optimize the
       layout for that case and use the gap for extra code-specific
       information.  */

375 376 377
    /* The ORIGINAL_REGNO of a REG.  */
    unsigned int original_regno;

Richard Sandiford committed
378 379 380
    /* The INSN_UID of an RTX_INSN-class code.  */
    int insn_uid;

381 382 383
    /* The SYMBOL_REF_FLAGS of a SYMBOL_REF.  */
    unsigned int symbol_ref_flags;

384 385 386
    /* The PAT_VAR_LOCATION_STATUS of a VAR_LOCATION.  */
    enum var_init_status var_location_status;

Kenneth Zadeck committed
387 388
    /* In a CONST_WIDE_INT (aka hwivec_def), this is the number of
       HOST_WIDE_INTs in the hwivec_def.  */
389 390
    unsigned int num_elem;
  } GTY ((skip)) u2;
Kenneth Zadeck committed
391

Jim Wilson committed
392 393 394
  /* The first element of the operands of this rtx.
     The number of operands and their types are controlled
     by the `code' field, according to rtl.def.  */
395 396 397
  union u {
    rtunion fld[1];
    HOST_WIDE_INT hwint[1];
398
    struct block_symbol block_sym;
399
    struct real_value rv;
400
    struct fixed_value fv;
Kenneth Zadeck committed
401
    struct hwivec_def hwiv;
402
  } GTY ((special ("rtx_def"), desc ("GET_CODE (&%0)"))) u;
403
};
Jim Wilson committed
404

405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426
/* A node for constructing singly-linked lists of rtx.  */

class GTY(()) rtx_expr_list : public rtx_def
{
  /* No extra fields, but adds invariant: (GET_CODE (X) == EXPR_LIST).  */

public:
  /* Get next in list.  */
  rtx_expr_list *next () const;

  /* Get at the underlying rtx.  */
  rtx element () const;
};

template <>
template <>
inline bool
is_a_helper <rtx_expr_list *>::test (rtx rt)
{
  return rt->code == EXPR_LIST;
}

427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455
class GTY(()) rtx_insn_list : public rtx_def
{
  /* No extra fields, but adds invariant: (GET_CODE (X) == INSN_LIST).

     This is an instance of:

       DEF_RTL_EXPR(INSN_LIST, "insn_list", "ue", RTX_EXTRA)

     i.e. a node for constructing singly-linked lists of rtx_insn *, where
     the list is "external" to the insn (as opposed to the doubly-linked
     list embedded within rtx_insn itself).  */

public:
  /* Get next in list.  */
  rtx_insn_list *next () const;

  /* Get at the underlying instruction.  */
  rtx_insn *insn () const;

};

template <>
template <>
inline bool
is_a_helper <rtx_insn_list *>::test (rtx rt)
{
  return rt->code == INSN_LIST;
}

456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490
/* A node with invariant GET_CODE (X) == SEQUENCE i.e. a vector of rtx,
   typically (but not always) of rtx_insn *, used in the late passes.  */

class GTY(()) rtx_sequence : public rtx_def
{
  /* No extra fields, but adds invariant: (GET_CODE (X) == SEQUENCE).  */

public:
  /* Get number of elements in sequence.  */
  int len () const;

  /* Get i-th element of the sequence.  */
  rtx element (int index) const;

  /* Get i-th element of the sequence, with a checked cast to
     rtx_insn *.  */
  rtx_insn *insn (int index) const;
};

template <>
template <>
inline bool
is_a_helper <rtx_sequence *>::test (rtx rt)
{
  return rt->code == SEQUENCE;
}

template <>
template <>
inline bool
is_a_helper <const rtx_sequence *>::test (const_rtx rt)
{
  return rt->code == SEQUENCE;
}

491 492
class GTY(()) rtx_insn : public rtx_def
{
493
public:
494 495 496 497 498 499 500 501 502 503 504 505 506 507 508
  /* No extra fields, but adds the invariant:

     (INSN_P (X)
      || NOTE_P (X)
      || JUMP_TABLE_DATA_P (X)
      || BARRIER_P (X)
      || LABEL_P (X))

     i.e. that we must be able to use the following:
      INSN_UID ()
      NEXT_INSN ()
      PREV_INSN ()
    i.e. we have an rtx that has an INSN_UID field and can be part of
    a linked list of insns.
  */
509 510 511 512 513 514 515 516 517 518 519 520

  /* Returns true if this insn has been deleted.  */

  bool deleted () const { return volatil; }

  /* Mark this insn as deleted.  */

  void set_deleted () { volatil = true; }

  /* Mark this insn as not deleted.  */

  void set_undeleted () { volatil = false; }
521 522
};

523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580
/* Subclasses of rtx_insn.  */

class GTY(()) rtx_debug_insn : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       DEBUG_INSN_P (X) aka (GET_CODE (X) == DEBUG_INSN)
     i.e. an annotation for tracking variable assignments.

     This is an instance of:
       DEF_RTL_EXPR(DEBUG_INSN, "debug_insn", "uuBeiie", RTX_INSN)
     from rtl.def.  */
};

class GTY(()) rtx_nonjump_insn : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       NONJUMP_INSN_P (X) aka (GET_CODE (X) == INSN)
     i.e an instruction that cannot jump.

     This is an instance of:
       DEF_RTL_EXPR(INSN, "insn", "uuBeiie", RTX_INSN)
     from rtl.def.  */
};

class GTY(()) rtx_jump_insn : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       JUMP_P (X) aka (GET_CODE (X) == JUMP_INSN)
     i.e. an instruction that can possibly jump.

     This is an instance of:
       DEF_RTL_EXPR(JUMP_INSN, "jump_insn", "uuBeiie0", RTX_INSN)
     from rtl.def.  */
};

class GTY(()) rtx_call_insn : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       CALL_P (X) aka (GET_CODE (X) == CALL_INSN)
     i.e. an instruction that can possibly call a subroutine
     but which will not change which instruction comes next
     in the current function.

     This is an instance of:
       DEF_RTL_EXPR(CALL_INSN, "call_insn", "uuBeiiee", RTX_INSN)
     from rtl.def.  */
};

class GTY(()) rtx_jump_table_data : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       JUMP_TABLE_DATA_P (X) aka (GET_CODE (INSN) == JUMP_TABLE_DATA)
     i.e. a data for a jump table, considered an instruction for
     historical reasons.

     This is an instance of:
       DEF_RTL_EXPR(JUMP_TABLE_DATA, "jump_table_data", "uuBe0000", RTX_INSN)
     from rtl.def.  */
581 582 583 584 585 586 587 588 589 590 591 592 593 594 595

public:

  /* This can be either:

       (a) a table of absolute jumps, in which case PATTERN (this) is an
           ADDR_VEC with arg 0 a vector of labels, or

       (b) a table of relative jumps (e.g. for -fPIC), in which case
           PATTERN (this) is an ADDR_DIFF_VEC, with arg 0 a LABEL_REF and
	   arg 1 the vector of labels.

     This method gets the underlying vec.  */

  inline rtvec get_labels () const;
596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630
};

class GTY(()) rtx_barrier : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       BARRIER_P (X) aka (GET_CODE (X) == BARRIER)
     i.e. a marker that indicates that control will not flow through.

     This is an instance of:
       DEF_RTL_EXPR(BARRIER, "barrier", "uu00000", RTX_EXTRA)
     from rtl.def.  */
};

class GTY(()) rtx_code_label : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       LABEL_P (X) aka (GET_CODE (X) == CODE_LABEL)
     i.e. a label in the assembler.

     This is an instance of:
       DEF_RTL_EXPR(CODE_LABEL, "code_label", "uuB00is", RTX_EXTRA)
     from rtl.def.  */
};

class GTY(()) rtx_note : public rtx_insn
{
  /* No extra fields, but adds the invariant:
       NOTE_P(X) aka (GET_CODE (X) == NOTE)
     i.e. a note about the corresponding source code.

     This is an instance of:
       DEF_RTL_EXPR(NOTE, "note", "uuB0ni", RTX_EXTRA)
     from rtl.def.  */
};

631 632 633 634
/* The size in bytes of an rtx header (code, mode and flags).  */
#define RTX_HDR_SIZE offsetof (struct rtx_def, u)

/* The size in bytes of an rtx with code CODE.  */
635
#define RTX_CODE_SIZE(CODE) rtx_code_size[CODE]
636

Jim Wilson committed
637 638
#define NULL_RTX (rtx) 0

Geoffrey Keating committed
639 640 641 642 643 644 645
/* The "next" and "previous" RTX, relative to this one.  */

#define RTX_NEXT(X) (rtx_next[GET_CODE (X)] == 0 ? NULL			\
		     : *(rtx *)(((char *)X) + rtx_next[GET_CODE (X)]))

/* FIXME: the "NEXT_INSN (PREV_INSN (X)) == X" condition shouldn't be needed.
 */
646 647
#define RTX_PREV(X) ((INSN_P (X)       			\
                      || NOTE_P (X)       		\
648
                      || JUMP_TABLE_DATA_P (X)		\
649 650
                      || BARRIER_P (X)        		\
                      || LABEL_P (X))    		\
651 652 653
		     && PREV_INSN (as_a <rtx_insn *> (X)) != NULL	\
                     && NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X))) == X \
                     ? PREV_INSN (as_a <rtx_insn *> (X)) : NULL)
Geoffrey Keating committed
654

Jim Wilson committed
655 656
/* Define macros to access the `code' field of the rtx.  */

657
#define GET_CODE(RTX)	    ((enum rtx_code) (RTX)->code)
658
#define PUT_CODE(RTX, CODE) ((RTX)->code = (CODE))
Jim Wilson committed
659

660
#define GET_MODE(RTX)	    ((enum machine_mode) (RTX)->mode)
661
#define PUT_MODE(RTX, MODE) ((RTX)->mode = (MODE))
Jim Wilson committed
662 663 664 665 666

/* RTL vector.  These appear inside RTX's when there is a need
   for a variable number of things.  The principle use is inside
   PARALLEL expressions.  */

667
struct GTY(()) rtvec_def {
668
  int num_elem;		/* number of elements */
669
  rtx GTY ((length ("%h.num_elem"))) elem[1];
670
};
Jim Wilson committed
671 672 673 674

#define NULL_RTVEC (rtvec) 0

#define GET_NUM_ELEM(RTVEC)		((RTVEC)->num_elem)
675
#define PUT_NUM_ELEM(RTVEC, NUM)	((RTVEC)->num_elem = (NUM))
Jim Wilson committed
676

677
/* Predicate yielding nonzero iff X is an rtx for a register.  */
Jim Wilson committed
678 679
#define REG_P(X) (GET_CODE (X) == REG)

680 681 682
/* Predicate yielding nonzero iff X is an rtx for a memory location.  */
#define MEM_P(X) (GET_CODE (X) == MEM)

Kenneth Zadeck committed
683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707
#if TARGET_SUPPORTS_WIDE_INT

/* Match CONST_*s that can represent compile-time constant integers.  */
#define CASE_CONST_SCALAR_INT \
   case CONST_INT: \
   case CONST_WIDE_INT

/* Match CONST_*s for which pointer equality corresponds to value
   equality.  */
#define CASE_CONST_UNIQUE \
   case CONST_INT: \
   case CONST_WIDE_INT: \
   case CONST_DOUBLE: \
   case CONST_FIXED

/* Match all CONST_* rtxes.  */
#define CASE_CONST_ANY \
   case CONST_INT: \
   case CONST_WIDE_INT: \
   case CONST_DOUBLE: \
   case CONST_FIXED: \
   case CONST_VECTOR

#else

708 709 710 711 712
/* Match CONST_*s that can represent compile-time constant integers.  */
#define CASE_CONST_SCALAR_INT \
   case CONST_INT: \
   case CONST_DOUBLE

Kenneth Zadeck committed
713 714
/* Match CONST_*s for which pointer equality corresponds to value
   equality.  */
715 716 717 718 719 720 721 722 723 724 725
#define CASE_CONST_UNIQUE \
   case CONST_INT: \
   case CONST_DOUBLE: \
   case CONST_FIXED

/* Match all CONST_* rtxes.  */
#define CASE_CONST_ANY \
   case CONST_INT: \
   case CONST_DOUBLE: \
   case CONST_FIXED: \
   case CONST_VECTOR
Kenneth Zadeck committed
726
#endif
727

728
/* Predicate yielding nonzero iff X is an rtx for a constant integer.  */
729 730
#define CONST_INT_P(X) (GET_CODE (X) == CONST_INT)

Kenneth Zadeck committed
731 732 733
/* Predicate yielding nonzero iff X is an rtx for a constant integer.  */
#define CONST_WIDE_INT_P(X) (GET_CODE (X) == CONST_WIDE_INT)

734 735 736
/* Predicate yielding nonzero iff X is an rtx for a constant fixed-point.  */
#define CONST_FIXED_P(X) (GET_CODE (X) == CONST_FIXED)

737 738 739 740
/* Predicate yielding true iff X is an rtx for a double-int
   or floating point constant.  */
#define CONST_DOUBLE_P(X) (GET_CODE (X) == CONST_DOUBLE)

741 742 743 744
/* Predicate yielding true iff X is an rtx for a double-int.  */
#define CONST_DOUBLE_AS_INT_P(X) \
  (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) == VOIDmode)

745
/* Predicate yielding true iff X is an rtx for a integer const.  */
Kenneth Zadeck committed
746 747 748 749
#if TARGET_SUPPORTS_WIDE_INT
#define CONST_SCALAR_INT_P(X) \
  (CONST_INT_P (X) || CONST_WIDE_INT_P (X))
#else
750 751
#define CONST_SCALAR_INT_P(X) \
  (CONST_INT_P (X) || CONST_DOUBLE_AS_INT_P (X))
Kenneth Zadeck committed
752
#endif
753

754 755 756 757
/* Predicate yielding true iff X is an rtx for a double-int.  */
#define CONST_DOUBLE_AS_FLOAT_P(X) \
  (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) != VOIDmode)

758 759 760 761 762 763
/* Predicate yielding nonzero iff X is a label insn.  */
#define LABEL_P(X) (GET_CODE (X) == CODE_LABEL)

/* Predicate yielding nonzero iff X is a jump insn.  */
#define JUMP_P(X) (GET_CODE (X) == JUMP_INSN)

764 765 766 767 768 769
/* Predicate yielding nonzero iff X is a call insn.  */
#define CALL_P(X) (GET_CODE (X) == CALL_INSN)

/* Predicate yielding nonzero iff X is an insn that cannot jump.  */
#define NONJUMP_INSN_P(X) (GET_CODE (X) == INSN)

770 771 772 773 774 775 776
/* Predicate yielding nonzero iff X is a debug note/insn.  */
#define DEBUG_INSN_P(X) (GET_CODE (X) == DEBUG_INSN)

/* Predicate yielding nonzero iff X is an insn that is not a debug insn.  */
#define NONDEBUG_INSN_P(X) (INSN_P (X) && !DEBUG_INSN_P (X))

/* Nonzero if DEBUG_INSN_P may possibly hold.  */
777
#define MAY_HAVE_DEBUG_INSNS (flag_var_tracking_assignments)
778

779 780
/* Predicate yielding nonzero iff X is a real insn.  */
#define INSN_P(X) \
781
  (NONJUMP_INSN_P (X) || DEBUG_INSN_P (X) || JUMP_P (X) || CALL_P (X))
782

783 784 785 786 787 788 789
/* Predicate yielding nonzero iff X is a note insn.  */
#define NOTE_P(X) (GET_CODE (X) == NOTE)

/* Predicate yielding nonzero iff X is a barrier insn.  */
#define BARRIER_P(X) (GET_CODE (X) == BARRIER)

/* Predicate yielding nonzero iff X is a data for a jump table.  */
790
#define JUMP_TABLE_DATA_P(INSN) (GET_CODE (INSN) == JUMP_TABLE_DATA)
791

792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815
template <>
template <>
inline bool
is_a_helper <rtx_insn *>::test (rtx rt)
{
  return (INSN_P (rt)
	  || NOTE_P (rt)
	  || JUMP_TABLE_DATA_P (rt)
	  || BARRIER_P (rt)
	  || LABEL_P (rt));
}

template <>
template <>
inline bool
is_a_helper <const rtx_insn *>::test (const_rtx rt)
{
  return (INSN_P (rt)
	  || NOTE_P (rt)
	  || JUMP_TABLE_DATA_P (rt)
	  || BARRIER_P (rt)
	  || LABEL_P (rt));
}

816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850
template <>
template <>
inline bool
is_a_helper <rtx_debug_insn *>::test (rtx rt)
{
  return DEBUG_INSN_P (rt);
}

template <>
template <>
inline bool
is_a_helper <rtx_nonjump_insn *>::test (rtx rt)
{
  return NONJUMP_INSN_P (rt);
}

template <>
template <>
inline bool
is_a_helper <rtx_jump_insn *>::test (rtx rt)
{
  return JUMP_P (rt);
}

template <>
template <>
inline bool
is_a_helper <rtx_call_insn *>::test (rtx rt)
{
  return CALL_P (rt);
}

template <>
template <>
inline bool
851 852 853 854 855 856 857 858
is_a_helper <rtx_call_insn *>::test (rtx_insn *insn)
{
  return CALL_P (insn);
}

template <>
template <>
inline bool
859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911
is_a_helper <rtx_jump_table_data *>::test (rtx rt)
{
  return JUMP_TABLE_DATA_P (rt);
}

template <>
template <>
inline bool
is_a_helper <rtx_jump_table_data *>::test (rtx_insn *insn)
{
  return JUMP_TABLE_DATA_P (insn);
}

template <>
template <>
inline bool
is_a_helper <rtx_barrier *>::test (rtx rt)
{
  return BARRIER_P (rt);
}

template <>
template <>
inline bool
is_a_helper <rtx_code_label *>::test (rtx rt)
{
  return LABEL_P (rt);
}

template <>
template <>
inline bool
is_a_helper <rtx_code_label *>::test (rtx_insn *insn)
{
  return LABEL_P (insn);
}

template <>
template <>
inline bool
is_a_helper <rtx_note *>::test (rtx rt)
{
  return NOTE_P (rt);
}

template <>
template <>
inline bool
is_a_helper <rtx_note *>::test (rtx_insn *insn)
{
  return NOTE_P (insn);
}

912 913 914
/* Predicate yielding nonzero iff X is a return or simple_return.  */
#define ANY_RETURN_P(X) \
  (GET_CODE (X) == RETURN || GET_CODE (X) == SIMPLE_RETURN)
915

916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962
/* 1 if X is a unary operator.  */

#define UNARY_P(X)   \
  (GET_RTX_CLASS (GET_CODE (X)) == RTX_UNARY)

/* 1 if X is a binary operator.  */

#define BINARY_P(X)   \
  ((GET_RTX_CLASS (GET_CODE (X)) & RTX_BINARY_MASK) == RTX_BINARY_RESULT)

/* 1 if X is an arithmetic operator.  */

#define ARITHMETIC_P(X)   \
  ((GET_RTX_CLASS (GET_CODE (X)) & RTX_ARITHMETIC_MASK)			\
    == RTX_ARITHMETIC_RESULT)

/* 1 if X is an arithmetic operator.  */

#define COMMUTATIVE_ARITH_P(X)   \
  (GET_RTX_CLASS (GET_CODE (X)) == RTX_COMM_ARITH)

/* 1 if X is a commutative arithmetic operator or a comparison operator.
   These two are sometimes selected together because it is possible to
   swap the two operands.  */

#define SWAPPABLE_OPERANDS_P(X)   \
  ((1 << GET_RTX_CLASS (GET_CODE (X)))					\
    & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE)			\
       | (1 << RTX_COMPARE)))

/* 1 if X is a non-commutative operator.  */

#define NON_COMMUTATIVE_P(X)   \
  ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMMUTATIVE_MASK)		\
    == RTX_NON_COMMUTATIVE_RESULT)

/* 1 if X is a commutative operator on integers.  */

#define COMMUTATIVE_P(X)   \
  ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMMUTATIVE_MASK)		\
    == RTX_COMMUTATIVE_RESULT)

/* 1 if X is a relational operator.  */

#define COMPARISON_P(X)   \
  ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMPARE_MASK) == RTX_COMPARE_RESULT)

Jim Wilson committed
963 964 965
/* 1 if X is a constant value that is an integer.  */

#define CONSTANT_P(X)   \
966
  (GET_RTX_CLASS (GET_CODE (X)) == RTX_CONST_OBJ)
Jim Wilson committed
967

968 969 970 971
/* 1 if X can be used to represent an object.  */
#define OBJECT_P(X)							\
  ((GET_RTX_CLASS (GET_CODE (X)) & RTX_OBJ_MASK) == RTX_OBJ_RESULT)

Jim Wilson committed
972 973
/* General accessor macros for accessing the fields of an rtx.  */

974
#if defined ENABLE_RTL_CHECKING && (GCC_VERSION >= 2007)
975 976
/* The bit with a star outside the statement expr and an & inside is
   so that N can be evaluated only once.  */
977
#define RTL_CHECK1(RTX, N, C1) __extension__				\
978
(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N);		\
979
     const enum rtx_code _code = GET_CODE (_rtx);			\
980
     if (_n < 0 || _n >= GET_RTX_LENGTH (_code))			\
981 982
       rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__,		\
				__FUNCTION__);				\
983
     if (GET_RTX_FORMAT (_code)[_n] != C1)				\
984 985
       rtl_check_failed_type1 (_rtx, _n, C1, __FILE__, __LINE__,	\
			       __FUNCTION__);				\
986
     &_rtx->u.fld[_n]; }))
987

988
#define RTL_CHECK2(RTX, N, C1, C2) __extension__			\
989
(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N);		\
990
     const enum rtx_code _code = GET_CODE (_rtx);			\
991
     if (_n < 0 || _n >= GET_RTX_LENGTH (_code))			\
992 993
       rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__,		\
				__FUNCTION__);				\
994 995
     if (GET_RTX_FORMAT (_code)[_n] != C1				\
	 && GET_RTX_FORMAT (_code)[_n] != C2)				\
996
       rtl_check_failed_type2 (_rtx, _n, C1, C2, __FILE__, __LINE__,	\
997
			       __FUNCTION__);				\
998
     &_rtx->u.fld[_n]; }))
999

1000
#define RTL_CHECKC1(RTX, N, C) __extension__				\
1001
(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N);		\
1002 1003 1004
     if (GET_CODE (_rtx) != (C))					\
       rtl_check_failed_code1 (_rtx, (C), __FILE__, __LINE__,		\
			       __FUNCTION__);				\
1005
     &_rtx->u.fld[_n]; }))
1006

1007
#define RTL_CHECKC2(RTX, N, C1, C2) __extension__			\
1008
(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N);		\
1009
     const enum rtx_code _code = GET_CODE (_rtx);			\
1010 1011 1012
     if (_code != (C1) && _code != (C2))				\
       rtl_check_failed_code2 (_rtx, (C1), (C2), __FILE__, __LINE__,	\
			       __FUNCTION__); \
1013
     &_rtx->u.fld[_n]; }))
1014

1015
#define RTVEC_ELT(RTVEC, I) __extension__				\
1016
(*({ __typeof (RTVEC) const _rtvec = (RTVEC); const int _i = (I);	\
1017 1018
     if (_i < 0 || _i >= GET_NUM_ELEM (_rtvec))				\
       rtvec_check_failed_bounds (_rtvec, _i, __FILE__, __LINE__,	\
1019
				  __FUNCTION__);			\
1020 1021
     &_rtvec->elem[_i]; }))

1022
#define XWINT(RTX, N) __extension__					\
1023
(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N);		\
1024 1025 1026 1027
     const enum rtx_code _code = GET_CODE (_rtx);			\
     if (_n < 0 || _n >= GET_RTX_LENGTH (_code))			\
       rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__,		\
				__FUNCTION__);				\
1028
     if (GET_RTX_FORMAT (_code)[_n] != 'w')				\
1029 1030 1031 1032
       rtl_check_failed_type1 (_rtx, _n, 'w', __FILE__, __LINE__,	\
			       __FUNCTION__);				\
     &_rtx->u.hwint[_n]; }))

Kenneth Zadeck committed
1033 1034 1035 1036 1037 1038 1039 1040 1041
#define CWI_ELT(RTX, I) __extension__					\
(*({ __typeof (RTX) const _cwi = (RTX);					\
     int _max = CWI_GET_NUM_ELEM (_cwi);				\
     const int _i = (I);						\
     if (_i < 0 || _i >= _max)						\
       cwi_check_failed_bounds (_cwi, _i, __FILE__, __LINE__,		\
				__FUNCTION__);				\
     &_cwi->u.hwiv.elem[_i]; }))

1042
#define XCWINT(RTX, N, C) __extension__					\
1043
(*({ __typeof (RTX) const _rtx = (RTX);					\
1044 1045 1046 1047 1048
     if (GET_CODE (_rtx) != (C))					\
       rtl_check_failed_code1 (_rtx, (C), __FILE__, __LINE__,		\
			       __FUNCTION__);				\
     &_rtx->u.hwint[N]; }))

1049
#define XCMWINT(RTX, N, C, M) __extension__				\
1050
(*({ __typeof (RTX) const _rtx = (RTX);					\
1051 1052 1053 1054 1055 1056
     if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) != (M))		\
       rtl_check_failed_code_mode (_rtx, (C), (M), false, __FILE__,	\
				   __LINE__, __FUNCTION__);		\
     &_rtx->u.hwint[N]; }))

#define XCNMPRV(RTX, C, M) __extension__				\
1057
({ __typeof (RTX) const _rtx = (RTX);					\
1058 1059 1060 1061 1062
   if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) == (M))		\
     rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__,	\
				 __LINE__, __FUNCTION__);		\
   &_rtx->u.rv; })

1063
#define XCNMPFV(RTX, C, M) __extension__				\
1064
({ __typeof (RTX) const _rtx = (RTX);					\
1065 1066 1067 1068 1069
   if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) == (M))		\
     rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__,	\
				 __LINE__, __FUNCTION__);		\
   &_rtx->u.fv; })

1070
#define BLOCK_SYMBOL_CHECK(RTX) __extension__				\
1071
({ __typeof (RTX) const _symbol = (RTX);				\
Tom de Vries committed
1072
   const unsigned int flags = SYMBOL_REF_FLAGS (_symbol);		\
1073
   if ((flags & SYMBOL_FLAG_HAS_BLOCK_INFO) == 0)			\
1074 1075 1076 1077
     rtl_check_failed_block_symbol (__FILE__, __LINE__,			\
				    __FUNCTION__);			\
   &_symbol->u.block_sym; })

Kenneth Zadeck committed
1078 1079 1080 1081 1082
#define HWIVEC_CHECK(RTX,C) __extension__				\
({ __typeof (RTX) const _symbol = (RTX);				\
   RTL_CHECKC1 (_symbol, 0, C);						\
   &_symbol->u.hwiv; })

1083
extern void rtl_check_failed_bounds (const_rtx, int, const char *, int,
1084
				     const char *)
1085
    ATTRIBUTE_NORETURN;
1086
extern void rtl_check_failed_type1 (const_rtx, int, int, const char *, int,
1087
				    const char *)
1088
    ATTRIBUTE_NORETURN;
1089
extern void rtl_check_failed_type2 (const_rtx, int, int, int, const char *,
1090
				    int, const char *)
1091
    ATTRIBUTE_NORETURN;
1092
extern void rtl_check_failed_code1 (const_rtx, enum rtx_code, const char *,
1093
				    int, const char *)
1094
    ATTRIBUTE_NORETURN;
1095
extern void rtl_check_failed_code2 (const_rtx, enum rtx_code, enum rtx_code,
1096
				    const char *, int, const char *)
1097
    ATTRIBUTE_NORETURN;
1098
extern void rtl_check_failed_code_mode (const_rtx, enum rtx_code, enum machine_mode,
1099 1100
					bool, const char *, int, const char *)
    ATTRIBUTE_NORETURN;
1101 1102
extern void rtl_check_failed_block_symbol (const char *, int, const char *)
    ATTRIBUTE_NORETURN;
Kenneth Zadeck committed
1103 1104 1105
extern void cwi_check_failed_bounds (const_rtx, int, const char *, int,
				     const char *)
    ATTRIBUTE_NORETURN;
1106
extern void rtvec_check_failed_bounds (const_rtvec, int, const char *, int,
1107
				       const char *)
1108 1109
    ATTRIBUTE_NORETURN;

1110
#else   /* not ENABLE_RTL_CHECKING */
1111

1112 1113 1114 1115
#define RTL_CHECK1(RTX, N, C1)      ((RTX)->u.fld[N])
#define RTL_CHECK2(RTX, N, C1, C2)  ((RTX)->u.fld[N])
#define RTL_CHECKC1(RTX, N, C)	    ((RTX)->u.fld[N])
#define RTL_CHECKC2(RTX, N, C1, C2) ((RTX)->u.fld[N])
1116
#define RTVEC_ELT(RTVEC, I)	    ((RTVEC)->elem[I])
1117
#define XWINT(RTX, N)		    ((RTX)->u.hwint[N])
Kenneth Zadeck committed
1118
#define CWI_ELT(RTX, I)		    ((RTX)->u.hwiv.elem[I])
1119
#define XCWINT(RTX, N, C)	    ((RTX)->u.hwint[N])
1120 1121 1122
#define XCMWINT(RTX, N, C, M)	    ((RTX)->u.hwint[N])
#define XCNMWINT(RTX, N, C, M)	    ((RTX)->u.hwint[N])
#define XCNMPRV(RTX, C, M)	    (&(RTX)->u.rv)
1123
#define XCNMPFV(RTX, C, M)	    (&(RTX)->u.fv)
1124
#define BLOCK_SYMBOL_CHECK(RTX)	    (&(RTX)->u.block_sym)
Kenneth Zadeck committed
1125
#define HWIVEC_CHECK(RTX,C)	    (&(RTX)->u.hwiv)
1126 1127 1128

#endif

1129 1130 1131 1132 1133
/* General accessor macros for accessing the flags of an rtx.  */

/* Access an individual rtx flag, with no checking of any kind.  */
#define RTX_FLAG(RTX, FLAG)	((RTX)->FLAG)

1134
#if defined ENABLE_RTL_FLAG_CHECKING && (GCC_VERSION >= 2007)
1135
#define RTL_FLAG_CHECK1(NAME, RTX, C1) __extension__			\
1136
({ __typeof (RTX) const _rtx = (RTX);					\
1137
   if (GET_CODE (_rtx) != C1)						\
1138
     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
1139
			     __FUNCTION__);				\
1140 1141
   _rtx; })

1142
#define RTL_FLAG_CHECK2(NAME, RTX, C1, C2) __extension__		\
1143
({ __typeof (RTX) const _rtx = (RTX);					\
1144
   if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2)			\
1145
     rtl_check_failed_flag  (NAME,_rtx, __FILE__, __LINE__,		\
1146
			      __FUNCTION__);				\
1147 1148
   _rtx; })

1149
#define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3) __extension__		\
1150
({ __typeof (RTX) const _rtx = (RTX);					\
1151 1152
   if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2			\
       && GET_CODE (_rtx) != C3)					\
1153
     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
1154
			     __FUNCTION__);				\
1155 1156
   _rtx; })

1157
#define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4) __extension__	\
1158
({ __typeof (RTX) const _rtx = (RTX);					\
1159 1160
   if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2			\
       && GET_CODE (_rtx) != C3 && GET_CODE(_rtx) != C4)		\
1161
     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
1162
			      __FUNCTION__);				\
1163 1164
   _rtx; })

1165
#define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5) __extension__	\
1166
({ __typeof (RTX) const _rtx = (RTX);					\
1167 1168 1169
   if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2			\
       && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4		\
       && GET_CODE (_rtx) != C5)					\
1170
     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
1171
			     __FUNCTION__);				\
1172 1173
   _rtx; })

1174 1175
#define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6)		\
  __extension__								\
1176
({ __typeof (RTX) const _rtx = (RTX);					\
1177 1178 1179
   if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2			\
       && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4		\
       && GET_CODE (_rtx) != C5 && GET_CODE (_rtx) != C6)		\
1180
     rtl_check_failed_flag  (NAME,_rtx, __FILE__, __LINE__,		\
1181
			     __FUNCTION__);				\
1182 1183
   _rtx; })

1184 1185
#define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7)		\
  __extension__								\
1186
({ __typeof (RTX) const _rtx = (RTX);					\
1187 1188 1189 1190
   if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2			\
       && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4		\
       && GET_CODE (_rtx) != C5 && GET_CODE (_rtx) != C6		\
       && GET_CODE (_rtx) != C7)					\
1191
     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
1192
			     __FUNCTION__);				\
1193 1194
   _rtx; })

Richard Sandiford committed
1195
#define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX) 				\
1196
  __extension__								\
1197
({ __typeof (RTX) const _rtx = (RTX);					\
Richard Sandiford committed
1198 1199 1200
   if (!INSN_CHAIN_CODE_P (GET_CODE (_rtx)))				\
     rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__,		\
			    __FUNCTION__);				\
1201 1202
   _rtx; })

1203
extern void rtl_check_failed_flag (const char *, const_rtx, const char *,
1204
				   int, const char *)
1205 1206 1207 1208 1209
    ATTRIBUTE_NORETURN
    ;

#else	/* not ENABLE_RTL_FLAG_CHECKING */

1210 1211 1212 1213
#define RTL_FLAG_CHECK1(NAME, RTX, C1)					(RTX)
#define RTL_FLAG_CHECK2(NAME, RTX, C1, C2)				(RTX)
#define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3)				(RTX)
#define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4)			(RTX)
Richard Sandiford committed
1214
#define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5)			(RTX)
1215 1216
#define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6)		(RTX)
#define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7)		(RTX)
Richard Sandiford committed
1217
#define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX) 				(RTX)
1218 1219
#endif

1220
#define XINT(RTX, N)	(RTL_CHECK2 (RTX, N, 'i', 'n').rt_int)
1221
#define XUINT(RTX, N)   (RTL_CHECK2 (RTX, N, 'i', 'n').rt_uint)
1222 1223 1224 1225 1226 1227 1228
#define XSTR(RTX, N)	(RTL_CHECK2 (RTX, N, 's', 'S').rt_str)
#define XEXP(RTX, N)	(RTL_CHECK2 (RTX, N, 'e', 'u').rt_rtx)
#define XVEC(RTX, N)	(RTL_CHECK2 (RTX, N, 'E', 'V').rt_rtvec)
#define XMODE(RTX, N)	(RTL_CHECK1 (RTX, N, 'M').rt_type)
#define XTREE(RTX, N)   (RTL_CHECK1 (RTX, N, 't').rt_tree)
#define XBBDEF(RTX, N)	(RTL_CHECK1 (RTX, N, 'B').rt_bb)
#define XTMPL(RTX, N)	(RTL_CHECK1 (RTX, N, 'T').rt_str)
1229
#define XCFI(RTX, N)	(RTL_CHECK1 (RTX, N, 'C').rt_cfi)
1230 1231 1232 1233

#define XVECEXP(RTX, N, M)	RTVEC_ELT (XVEC (RTX, N), M)
#define XVECLEN(RTX, N)		GET_NUM_ELEM (XVEC (RTX, N))

1234
/* These are like XINT, etc. except that they expect a '0' field instead
1235 1236
   of the normal type code.  */

1237 1238 1239 1240 1241 1242 1243 1244
#define X0INT(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_int)
#define X0UINT(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_uint)
#define X0STR(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_str)
#define X0EXP(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_rtx)
#define X0VEC(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_rtvec)
#define X0MODE(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_type)
#define X0TREE(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_tree)
#define X0BBDEF(RTX, N)	   (RTL_CHECK1 (RTX, N, '0').rt_bb)
1245 1246
#define X0ADVFLAGS(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_addr_diff_vec_flags)
#define X0CSELIB(RTX, N)   (RTL_CHECK1 (RTX, N, '0').rt_cselib)
1247 1248
#define X0MEMATTR(RTX, N)  (RTL_CHECKC1 (RTX, N, MEM).rt_mem)
#define X0REGATTR(RTX, N)  (RTL_CHECKC1 (RTX, N, REG).rt_reg)
1249
#define X0CONSTANT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_constant)
1250

1251 1252 1253
/* Access a '0' field with any type.  */
#define X0ANY(RTX, N)	   RTL_CHECK1 (RTX, N, '0')

1254 1255 1256 1257 1258 1259 1260 1261
#define XCINT(RTX, N, C)      (RTL_CHECKC1 (RTX, N, C).rt_int)
#define XCUINT(RTX, N, C)     (RTL_CHECKC1 (RTX, N, C).rt_uint)
#define XCSTR(RTX, N, C)      (RTL_CHECKC1 (RTX, N, C).rt_str)
#define XCEXP(RTX, N, C)      (RTL_CHECKC1 (RTX, N, C).rt_rtx)
#define XCVEC(RTX, N, C)      (RTL_CHECKC1 (RTX, N, C).rt_rtvec)
#define XCMODE(RTX, N, C)     (RTL_CHECKC1 (RTX, N, C).rt_type)
#define XCTREE(RTX, N, C)     (RTL_CHECKC1 (RTX, N, C).rt_tree)
#define XCBBDEF(RTX, N, C)    (RTL_CHECKC1 (RTX, N, C).rt_bb)
1262
#define XCCFI(RTX, N, C)      (RTL_CHECKC1 (RTX, N, C).rt_cfi)
1263
#define XCCSELIB(RTX, N, C)   (RTL_CHECKC1 (RTX, N, C).rt_cselib)
1264 1265 1266 1267

#define XCVECEXP(RTX, N, M, C)	RTVEC_ELT (XCVEC (RTX, N, C), M)
#define XCVECLEN(RTX, N, C)	GET_NUM_ELEM (XCVEC (RTX, N, C))

1268
#define XC2EXP(RTX, N, C1, C2)      (RTL_CHECKC2 (RTX, N, C1, C2).rt_rtx)
Jim Wilson committed
1269

1270

1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283
/* Methods of rtx_expr_list.  */

inline rtx_expr_list *rtx_expr_list::next () const
{
  rtx tmp = XEXP (this, 1);
  return safe_as_a <rtx_expr_list *> (tmp);
}

inline rtx rtx_expr_list::element () const
{
  return XEXP (this, 0);
}

1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297
/* Methods of rtx_insn_list.  */

inline rtx_insn_list *rtx_insn_list::next () const
{
  rtx tmp = XEXP (this, 1);
  return safe_as_a <rtx_insn_list *> (tmp);
}

inline rtx_insn *rtx_insn_list::insn () const
{
  rtx tmp = XEXP (this, 0);
  return safe_as_a <rtx_insn *> (tmp);
}

1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314
/* Methods of rtx_sequence.  */

inline int rtx_sequence::len () const
{
  return XVECLEN (this, 0);
}

inline rtx rtx_sequence::element (int index) const
{
  return XVECEXP (this, 0, index);
}

inline rtx_insn *rtx_sequence::insn (int index) const
{
  return as_a <rtx_insn *> (XVECEXP (this, 0, index));
}

Jim Wilson committed
1315 1316 1317 1318
/* ACCESS MACROS for particular fields of insns.  */

/* Holds a unique number for each insn.
   These are not necessarily sequentially increasing.  */
1319 1320 1321 1322 1323 1324 1325 1326 1327 1328
inline int INSN_UID (const_rtx insn)
{
  return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID",
				    (insn))->u2.insn_uid;
}
inline int& INSN_UID (rtx insn)
{
  return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID",
				    (insn))->u2.insn_uid;
}
Jim Wilson committed
1329 1330

/* Chain insns together in sequence.  */
1331

1332 1333 1334 1335 1336
/* For now these are split in two: an rvalue form:
     PREV_INSN/NEXT_INSN
   and an lvalue form:
     SET_NEXT_INSN/SET_PREV_INSN.  */

1337
inline rtx_insn *PREV_INSN (const rtx_insn *insn)
1338 1339 1340 1341 1342
{
  rtx prev = XEXP (insn, 0);
  return safe_as_a <rtx_insn *> (prev);
}

1343
inline rtx& SET_PREV_INSN (rtx_insn *insn)
1344 1345 1346 1347
{
  return XEXP (insn, 0);
}

1348
inline rtx_insn *NEXT_INSN (const rtx_insn *insn)
1349 1350 1351 1352 1353
{
  rtx next = XEXP (insn, 1);
  return safe_as_a <rtx_insn *> (next);
}

1354
inline rtx& SET_NEXT_INSN (rtx_insn *insn)
1355 1356 1357
{
  return XEXP (insn, 1);
}
Jim Wilson committed
1358

1359 1360 1361 1362 1363 1364 1365 1366 1367
inline basic_block BLOCK_FOR_INSN (const_rtx insn)
{
  return XBBDEF (insn, 2);
}

inline basic_block& BLOCK_FOR_INSN (rtx insn)
{
  return XBBDEF (insn, 2);
}
1368

1369 1370 1371 1372 1373
inline void set_block_for_insn (rtx_insn *insn, basic_block bb)
{
  BLOCK_FOR_INSN (insn) = bb;
}

1374
/* The body of an insn.  */
1375 1376 1377 1378 1379 1380 1381 1382 1383
inline rtx PATTERN (const_rtx insn)
{
  return XEXP (insn, 3);
}

inline rtx& PATTERN (rtx insn)
{
  return XEXP (insn, 3);
}
1384

1385
inline unsigned int INSN_LOCATION (const rtx_insn *insn)
1386 1387 1388 1389
{
  return XUINT (insn, 4);
}

1390
inline unsigned int& INSN_LOCATION (rtx_insn *insn)
1391 1392 1393
{
  return XUINT (insn, 4);
}
1394

1395
inline bool INSN_HAS_LOCATION (const rtx_insn *insn)
1396 1397 1398
{
  return LOCATION_LOCUS (INSN_LOCATION (insn)) != UNKNOWN_LOCATION;
}
1399

1400 1401
/* LOCATION of an RTX if relevant.  */
#define RTL_LOCATION(X) (INSN_P (X) ? \
1402 1403
			 INSN_LOCATION (as_a <rtx_insn *> (X)) \
			 : UNKNOWN_LOCATION)
Jim Wilson committed
1404 1405 1406

/* Code number of instruction, from when it was recognized.
   -1 means this instruction has not been recognized yet.  */
Richard Sandiford committed
1407
#define INSN_CODE(INSN) XINT (INSN, 5)
Jim Wilson committed
1408

1409 1410 1411 1412 1413 1414 1415 1416 1417
inline rtvec rtx_jump_table_data::get_labels () const
{
  rtx pat = PATTERN (this);
  if (GET_CODE (pat) == ADDR_VEC)
    return XVEC (pat, 0);
  else
    return XVEC (pat, 1); /* presumably an ADDR_DIFF_VEC */
}

1418
#define RTX_FRAME_RELATED_P(RTX)					\
1419 1420
  (RTL_FLAG_CHECK6 ("RTX_FRAME_RELATED_P", (RTX), DEBUG_INSN, INSN,	\
		    CALL_INSN, JUMP_INSN, BARRIER, SET)->frame_related)
1421

1422 1423 1424 1425
/* 1 if JUMP RTX is a crossing jump.  */
#define CROSSING_JUMP_P(RTX) \
  (RTL_FLAG_CHECK1 ("CROSSING_JUMP_P", (RTX), JUMP_INSN)->jump)

Kenneth Zadeck committed
1426 1427 1428
/* 1 if RTX is a call to a const function.  Built from ECF_CONST and
   TREE_READONLY.  */
#define RTL_CONST_CALL_P(RTX)					\
1429
  (RTL_FLAG_CHECK1 ("RTL_CONST_CALL_P", (RTX), CALL_INSN)->unchanging)
Kenneth Zadeck committed
1430 1431 1432 1433

/* 1 if RTX is a call to a pure function.  Built from ECF_PURE and
   DECL_PURE_P.  */
#define RTL_PURE_CALL_P(RTX)					\
1434
  (RTL_FLAG_CHECK1 ("RTL_PURE_CALL_P", (RTX), CALL_INSN)->return_val)
Kenneth Zadeck committed
1435

1436
/* 1 if RTX is a call to a const or pure function.  */
Kenneth Zadeck committed
1437
#define RTL_CONST_OR_PURE_CALL_P(RTX) \
1438
  (RTL_CONST_CALL_P (RTX) || RTL_PURE_CALL_P (RTX))
Kenneth Zadeck committed
1439 1440 1441

/* 1 if RTX is a call to a looping const or pure function.  Built from
   ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P.  */
1442
#define RTL_LOOPING_CONST_OR_PURE_CALL_P(RTX)				\
1443
  (RTL_FLAG_CHECK1 ("CONST_OR_PURE_CALL_P", (RTX), CALL_INSN)->call)
1444 1445 1446

/* 1 if RTX is a call_insn for a sibling call.  */
#define SIBLING_CALL_P(RTX)						\
1447
  (RTL_FLAG_CHECK1 ("SIBLING_CALL_P", (RTX), CALL_INSN)->jump)
1448

1449
/* 1 if RTX is a jump_insn, call_insn, or insn that is an annulling branch.  */
1450
#define INSN_ANNULLED_BRANCH_P(RTX)					\
1451
  (RTL_FLAG_CHECK1 ("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)->unchanging)
1452 1453 1454

/* 1 if RTX is an insn in a delay slot and is from the target of the branch.
   If the branch insn has INSN_ANNULLED_BRANCH_P set, this insn should only be
Jim Wilson committed
1455 1456
   executed if the branch is taken.  For annulled branches with this bit
   clear, the insn should be executed only if the branch is not taken.  */
1457
#define INSN_FROM_TARGET_P(RTX)						\
1458 1459
  (RTL_FLAG_CHECK3 ("INSN_FROM_TARGET_P", (RTX), INSN, JUMP_INSN, \
		    CALL_INSN)->in_struct)
Jim Wilson committed
1460

1461 1462
/* In an ADDR_DIFF_VEC, the flags for RTX for use by branch shortening.
   See the comments for ADDR_DIFF_VEC in rtl.def.  */
1463
#define ADDR_DIFF_VEC_FLAGS(RTX) X0ADVFLAGS (RTX, 4)
1464

1465
/* In a VALUE, the value cselib has assigned to RTX.
1466
   This is a "struct cselib_val", see cselib.h.  */
1467
#define CSELIB_VAL_PTR(RTX) X0CSELIB (RTX, 0)
1468

Jim Wilson committed
1469
/* Holds a list of notes on what this insn does to various REGs.
1470 1471
   It is a chain of EXPR_LIST rtx's, where the second operand is the
   chain pointer and the first operand is the REG being described.
Jim Wilson committed
1472
   The mode field of the EXPR_LIST contains not a real machine mode
1473
   but a value from enum reg_note.  */
Richard Sandiford committed
1474
#define REG_NOTES(INSN)	XEXP(INSN, 6)
Jim Wilson committed
1475

1476 1477 1478 1479
/* In an ENTRY_VALUE this is the DECL_INCOMING_RTL of the argument in
   question.  */
#define ENTRY_VALUE_EXP(RTX) (RTL_CHECKC1 (RTX, 0, ENTRY_VALUE).rt_rtx)

1480 1481
enum reg_note
{
1482 1483 1484 1485
#define DEF_REG_NOTE(NAME) NAME,
#include "reg-notes.def"
#undef DEF_REG_NOTE
  REG_NOTE_MAX
1486 1487
};

Jim Wilson committed
1488 1489
/* Define macros to extract and insert the reg-note kind in an EXPR_LIST.  */
#define REG_NOTE_KIND(LINK) ((enum reg_note) GET_MODE (LINK))
1490 1491
#define PUT_REG_NOTE_KIND(LINK, KIND) \
  PUT_MODE (LINK, (enum machine_mode) (KIND))
Jim Wilson committed
1492 1493 1494

/* Names for REG_NOTE's in EXPR_LIST insn's.  */

1495
extern const char * const reg_note_name[];
Mike Stump committed
1496
#define GET_REG_NOTE_NAME(MODE) (reg_note_name[(int) (MODE)])
Jim Wilson committed
1497

1498 1499 1500 1501 1502 1503 1504
/* This field is only present on CALL_INSNs.  It holds a chain of EXPR_LIST of
   USE and CLOBBER expressions.
     USE expressions list the registers filled with arguments that
   are passed to the function.
     CLOBBER expressions document the registers explicitly clobbered
   by this CALL_INSN.
     Pseudo registers can not be mentioned in this list.  */
Richard Sandiford committed
1505
#define CALL_INSN_FUNCTION_USAGE(INSN)	XEXP(INSN, 7)
1506

Jim Wilson committed
1507 1508 1509
/* The label-number of a code-label.  The assembler label
   is made from `L' and the label-number printed in decimal.
   Label numbers are unique in a compilation.  */
Richard Sandiford committed
1510
#define CODE_LABEL_NUMBER(INSN)	XINT (INSN, 5)
Jim Wilson committed
1511

1512 1513 1514 1515
/* In a NOTE that is a line number, this is a string for the file name that the
   line is in.  We use the same field to record block numbers temporarily in
   NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes.  (We avoid lots of casts
   between ints and pointers if we use a different macro for the block number.)
1516
   */
Jim Wilson committed
1517

1518
/* Opaque data.  */
Richard Sandiford committed
1519 1520
#define NOTE_DATA(INSN)	        RTL_CHECKC1 (INSN, 3, NOTE)
#define NOTE_DELETED_LABEL_NAME(INSN) XCSTR (INSN, 3, NOTE)
1521
#define SET_INSN_DELETED(INSN) set_insn_deleted (INSN);
Richard Sandiford committed
1522 1523 1524 1525 1526 1527
#define NOTE_BLOCK(INSN)	XCTREE (INSN, 3, NOTE)
#define NOTE_EH_HANDLER(INSN)	XCINT (INSN, 3, NOTE)
#define NOTE_BASIC_BLOCK(INSN)	XCBBDEF (INSN, 3, NOTE)
#define NOTE_VAR_LOCATION(INSN)	XCEXP (INSN, 3, NOTE)
#define NOTE_CFI(INSN)		XCCFI (INSN, 3, NOTE)
#define NOTE_LABEL_NUMBER(INSN)	XCINT (INSN, 3, NOTE)
1528

Jim Wilson committed
1529 1530
/* In a NOTE that is a line number, this is the line number.
   Other kinds of NOTEs are identified by negative numbers here.  */
Richard Sandiford committed
1531
#define NOTE_KIND(INSN) XCINT (INSN, 4, NOTE)
Jim Wilson committed
1532

1533
/* Nonzero if INSN is a note marking the beginning of a basic block.  */
1534 1535
#define NOTE_INSN_BASIC_BLOCK_P(INSN) \
  (NOTE_P (INSN) && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK)
1536

1537
/* Variable declaration and the location of a variable.  */
1538 1539
#define PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION))
#define PAT_VAR_LOCATION_LOC(PAT) (XCEXP ((PAT), 1, VAR_LOCATION))
Jim Wilson committed
1540

1541 1542 1543
/* Initialization status of the variable in the location.  Status
   can be unknown, uninitialized or initialized.  See enumeration
   type below.  */
1544
#define PAT_VAR_LOCATION_STATUS(PAT) \
1545 1546
  (RTL_FLAG_CHECK1 ("PAT_VAR_LOCATION_STATUS", PAT, VAR_LOCATION) \
   ->u2.var_location_status)
1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577

/* Accessors for a NOTE_INSN_VAR_LOCATION.  */
#define NOTE_VAR_LOCATION_DECL(NOTE) \
  PAT_VAR_LOCATION_DECL (NOTE_VAR_LOCATION (NOTE))
#define NOTE_VAR_LOCATION_LOC(NOTE) \
  PAT_VAR_LOCATION_LOC (NOTE_VAR_LOCATION (NOTE))
#define NOTE_VAR_LOCATION_STATUS(NOTE) \
  PAT_VAR_LOCATION_STATUS (NOTE_VAR_LOCATION (NOTE))

/* The VAR_LOCATION rtx in a DEBUG_INSN.  */
#define INSN_VAR_LOCATION(INSN) PATTERN (INSN)

/* Accessors for a tree-expanded var location debug insn.  */
#define INSN_VAR_LOCATION_DECL(INSN) \
  PAT_VAR_LOCATION_DECL (INSN_VAR_LOCATION (INSN))
#define INSN_VAR_LOCATION_LOC(INSN) \
  PAT_VAR_LOCATION_LOC (INSN_VAR_LOCATION (INSN))
#define INSN_VAR_LOCATION_STATUS(INSN) \
  PAT_VAR_LOCATION_STATUS (INSN_VAR_LOCATION (INSN))

/* Expand to the RTL that denotes an unknown variable location in a
   DEBUG_INSN.  */
#define gen_rtx_UNKNOWN_VAR_LOC() (gen_rtx_CLOBBER (VOIDmode, const0_rtx))

/* Determine whether X is such an unknown location.  */
#define VAR_LOC_UNKNOWN_P(X) \
  (GET_CODE (X) == CLOBBER && XEXP ((X), 0) == const0_rtx)

/* 1 if RTX is emitted after a call, but it should take effect before
   the call returns.  */
#define NOTE_DURING_CALL_P(RTX)				\
1578
  (RTL_FLAG_CHECK1 ("NOTE_VAR_LOCATION_DURING_CALL_P", (RTX), NOTE)->call)
1579

1580 1581 1582
/* DEBUG_EXPR_DECL corresponding to a DEBUG_EXPR RTX.  */
#define DEBUG_EXPR_TREE_DECL(RTX) XCTREE (RTX, 0, DEBUG_EXPR)

1583 1584 1585
/* VAR_DECL/PARM_DECL DEBUG_IMPLICIT_PTR takes address of.  */
#define DEBUG_IMPLICIT_PTR_DECL(RTX) XCTREE (RTX, 0, DEBUG_IMPLICIT_PTR)

1586 1587 1588
/* PARM_DECL DEBUG_PARAMETER_REF references.  */
#define DEBUG_PARAMETER_REF_DECL(RTX) XCTREE (RTX, 0, DEBUG_PARAMETER_REF)

1589
/* Codes that appear in the NOTE_KIND field for kinds of notes
1590
   that are not line numbers.  These codes are all negative.
H.J. Lu committed
1591

1592 1593 1594 1595
   Notice that we do not try to use zero here for any of
   the special note codes because sometimes the source line
   actually can be zero!  This happens (for example) when we
   are generating code for the per-translation-unit constructor
1596
   and destructor routines for some C++ translation unit.  */
Jim Wilson committed
1597

1598 1599
enum insn_note
{
1600 1601 1602
#define DEF_INSN_NOTE(NAME) NAME,
#include "insn-notes.def"
#undef DEF_INSN_NOTE
1603

1604 1605
  NOTE_INSN_MAX
};
1606

Jim Wilson committed
1607 1608
/* Names for NOTE insn's other than line numbers.  */

1609
extern const char * const note_insn_name[NOTE_INSN_MAX];
1610
#define GET_NOTE_INSN_NAME(NOTE_CODE) \
1611
  (note_insn_name[(NOTE_CODE)])
Jim Wilson committed
1612 1613 1614

/* The name of a label, in case it corresponds to an explicit label
   in the input source code.  */
Richard Sandiford committed
1615
#define LABEL_NAME(RTX) XCSTR (RTX, 6, CODE_LABEL)
Jim Wilson committed
1616 1617 1618

/* In jump.c, each label contains a count of the number
   of LABEL_REFs that point at it, so unused labels can be deleted.  */
Richard Sandiford committed
1619
#define LABEL_NUSES(RTX) XCINT (RTX, 4, CODE_LABEL)
Jim Wilson committed
1620

1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635
/* Labels carry a two-bit field composed of the ->jump and ->call
   bits.  This field indicates whether the label is an alternate
   entry point, and if so, what kind.  */
enum label_kind
{
  LABEL_NORMAL = 0,	/* ordinary label */
  LABEL_STATIC_ENTRY,	/* alternate entry point, not exported */
  LABEL_GLOBAL_ENTRY,	/* alternate entry point, exported */
  LABEL_WEAK_ENTRY	/* alternate entry point, exported as weak symbol */
};

#if defined ENABLE_RTL_FLAG_CHECKING && (GCC_VERSION > 2007)

/* Retrieve the kind of LABEL.  */
#define LABEL_KIND(LABEL) __extension__					\
1636
({ __typeof (LABEL) const _label = (LABEL);				\
1637
   if (! LABEL_P (_label))						\
1638 1639 1640 1641 1642 1643
     rtl_check_failed_flag ("LABEL_KIND", _label, __FILE__, __LINE__,	\
			    __FUNCTION__);				\
   (enum label_kind) ((_label->jump << 1) | _label->call); })

/* Set the kind of LABEL.  */
#define SET_LABEL_KIND(LABEL, KIND) do {				\
1644
   __typeof (LABEL) const _label = (LABEL);				\
1645
   const unsigned int _kind = (KIND);					\
1646
   if (! LABEL_P (_label))						\
1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660
     rtl_check_failed_flag ("SET_LABEL_KIND", _label, __FILE__, __LINE__, \
			    __FUNCTION__);				\
   _label->jump = ((_kind >> 1) & 1);					\
   _label->call = (_kind & 1);						\
} while (0)

#else

/* Retrieve the kind of LABEL.  */
#define LABEL_KIND(LABEL) \
   ((enum label_kind) (((LABEL)->jump << 1) | (LABEL)->call))

/* Set the kind of LABEL.  */
#define SET_LABEL_KIND(LABEL, KIND) do {				\
1661 1662
   rtx const _label = (LABEL);						\
   const unsigned int _kind = (KIND);					\
1663 1664 1665 1666 1667 1668 1669
   _label->jump = ((_kind >> 1) & 1);					\
   _label->call = (_kind & 1);						\
} while (0)

#endif /* rtl flag checking */

#define LABEL_ALT_ENTRY_P(LABEL) (LABEL_KIND (LABEL) != LABEL_NORMAL)
1670

Jim Wilson committed
1671 1672 1673
/* In jump.c, each JUMP_INSN can point to a label that it can jump to,
   so that if the JUMP_INSN is deleted, the label's LABEL_NUSES can
   be decremented and possibly the label can be deleted.  */
Richard Sandiford committed
1674
#define JUMP_LABEL(INSN)   XCEXP (INSN, 7, JUMP_INSN)
Jim Wilson committed
1675

1676
inline rtx_insn *JUMP_LABEL_AS_INSN (const rtx_insn *insn)
David Malcolm committed
1677 1678 1679 1680
{
  return safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
}

1681 1682 1683
/* Once basic blocks are found, each CODE_LABEL starts a chain that
   goes through all the LABEL_REFs that jump to that label.  The chain
   eventually winds up at the CODE_LABEL: it is circular.  */
Richard Sandiford committed
1684
#define LABEL_REFS(LABEL) XCEXP (LABEL, 3, CODE_LABEL)
1685 1686 1687 1688

/* Get the label that a LABEL_REF references.  */
#define LABEL_REF_LABEL(LABREF) XCEXP (LABREF, 0, LABEL_REF)

Jim Wilson committed
1689

1690 1691 1692
/* For a REG rtx, REGNO extracts the register number.  REGNO can only
   be used on RHS.  Use SET_REGNO to change the value.  */
#define REGNO(RTX) (rhs_regno(RTX))
1693 1694
#define SET_REGNO(RTX,N) \
  (df_ref_change_reg_with_loc (REGNO (RTX), N, RTX), XCUINT (RTX, 0, REG) = N)
1695
#define SET_REGNO_RAW(RTX,N) (XCUINT (RTX, 0, REG) = N)
1696 1697 1698 1699

/* ORIGINAL_REGNO holds the number the register originally had; for a
   pseudo register turned into a hard reg this will hold the old pseudo
   register number.  */
1700 1701
#define ORIGINAL_REGNO(RTX) \
  (RTL_FLAG_CHECK1 ("ORIGINAL_REGNO", (RTX), REG)->u2.original_regno)
Jim Wilson committed
1702

1703 1704
/* Force the REGNO macro to only be used on the lhs.  */
static inline unsigned int
1705
rhs_regno (const_rtx x)
1706 1707 1708 1709 1710
{
  return XCUINT (x, 0, REG);
}


1711 1712
/* 1 if RTX is a reg or parallel that is the current function's return
   value.  */
1713
#define REG_FUNCTION_VALUE_P(RTX)					\
1714
  (RTL_FLAG_CHECK2 ("REG_FUNCTION_VALUE_P", (RTX), REG, PARALLEL)->return_val)
Jim Wilson committed
1715

1716 1717
/* 1 if RTX is a reg that corresponds to a variable declared by the user.  */
#define REG_USERVAR_P(RTX)						\
1718
  (RTL_FLAG_CHECK1 ("REG_USERVAR_P", (RTX), REG)->volatil)
Jim Wilson committed
1719

1720 1721
/* 1 if RTX is a reg that holds a pointer value.  */
#define REG_POINTER(RTX)						\
1722
  (RTL_FLAG_CHECK1 ("REG_POINTER", (RTX), REG)->frame_related)
1723

1724 1725
/* 1 if RTX is a mem that holds a pointer value.  */
#define MEM_POINTER(RTX)						\
1726
  (RTL_FLAG_CHECK1 ("MEM_POINTER", (RTX), MEM)->frame_related)
1727

1728 1729 1730 1731
/* 1 if the given register REG corresponds to a hard register.  */
#define HARD_REGISTER_P(REG) (HARD_REGISTER_NUM_P (REGNO (REG)))

/* 1 if the given register number REG_NO corresponds to a hard register.  */
1732
#define HARD_REGISTER_NUM_P(REG_NO) ((REG_NO) < FIRST_PSEUDO_REGISTER)
1733

Jim Wilson committed
1734
/* For a CONST_INT rtx, INTVAL extracts the integer.  */
1735
#define INTVAL(RTX) XCWINT (RTX, 0, CONST_INT)
Stuart Hastings committed
1736
#define UINTVAL(RTX) ((unsigned HOST_WIDE_INT) INTVAL (RTX))
Jim Wilson committed
1737

Kenneth Zadeck committed
1738 1739 1740 1741 1742 1743 1744 1745
/* For a CONST_WIDE_INT, CONST_WIDE_INT_NUNITS is the number of
   elements actually needed to represent the constant.
   CONST_WIDE_INT_ELT gets one of the elements.  0 is the least
   significant HOST_WIDE_INT.  */
#define CONST_WIDE_INT_VEC(RTX) HWIVEC_CHECK (RTX, CONST_WIDE_INT)
#define CONST_WIDE_INT_NUNITS(RTX) CWI_GET_NUM_ELEM (RTX)
#define CONST_WIDE_INT_ELT(RTX, N) CWI_ELT (RTX, N)

Zack Weinberg committed
1746
/* For a CONST_DOUBLE:
Kenneth Zadeck committed
1747
#if TARGET_SUPPORTS_WIDE_INT == 0
1748
   For a VOIDmode, there are two integers CONST_DOUBLE_LOW is the
1749
     low-order word and ..._HIGH the high-order.
Kenneth Zadeck committed
1750
#endif
1751
   For a float, there is a REAL_VALUE_TYPE structure, and
1752
     CONST_DOUBLE_REAL_VALUE(r) is a pointer to it.  */
1753 1754 1755 1756
#define CONST_DOUBLE_LOW(r) XCMWINT (r, 0, CONST_DOUBLE, VOIDmode)
#define CONST_DOUBLE_HIGH(r) XCMWINT (r, 1, CONST_DOUBLE, VOIDmode)
#define CONST_DOUBLE_REAL_VALUE(r) \
  ((const struct real_value *) XCNMPRV (r, CONST_DOUBLE, VOIDmode))
Zack Weinberg committed
1757

1758 1759 1760
#define CONST_FIXED_VALUE(r) \
  ((const struct fixed_value *) XCNMPFV (r, CONST_FIXED, VOIDmode))
#define CONST_FIXED_VALUE_HIGH(r) \
1761
  ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.high))
1762
#define CONST_FIXED_VALUE_LOW(r) \
1763
  ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.low))
1764

1765 1766 1767 1768 1769 1770
/* For a CONST_VECTOR, return element #n.  */
#define CONST_VECTOR_ELT(RTX, N) XCVECEXP (RTX, 0, N, CONST_VECTOR)

/* For a CONST_VECTOR, return the number of elements in a vector.  */
#define CONST_VECTOR_NUNITS(RTX) XCVECLEN (RTX, 0, CONST_VECTOR)

Jim Wilson committed
1771
/* For a SUBREG rtx, SUBREG_REG extracts the value we want a subreg of.
1772
   SUBREG_BYTE extracts the byte-number.  */
Jim Wilson committed
1773

1774 1775
#define SUBREG_REG(RTX) XCEXP (RTX, 0, SUBREG)
#define SUBREG_BYTE(RTX) XCUINT (RTX, 1, SUBREG)
1776 1777

/* in rtlanal.c */
1778 1779 1780 1781 1782 1783 1784 1785 1786
/* Return the right cost to give to an operation
   to make the cost of the corresponding register-to-register instruction
   N times that of a fast register-to-register instruction.  */
#define COSTS_N_INSNS(N) ((N) * 4)

/* Maximum cost of an rtl expression.  This value has the special meaning
   not to use an rtx with this cost under any circumstances.  */
#define MAX_COST INT_MAX

1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833
/* A structure to hold all available cost information about an rtl
   expression.  */
struct full_rtx_costs
{
  int speed;
  int size;
};

/* Initialize a full_rtx_costs structure C to the maximum cost.  */
static inline void
init_costs_to_max (struct full_rtx_costs *c)
{
  c->speed = MAX_COST;
  c->size = MAX_COST;
}

/* Initialize a full_rtx_costs structure C to zero cost.  */
static inline void
init_costs_to_zero (struct full_rtx_costs *c)
{
  c->speed = 0;
  c->size = 0;
}

/* Compare two full_rtx_costs structures A and B, returning true
   if A < B when optimizing for speed.  */
static inline bool
costs_lt_p (struct full_rtx_costs *a, struct full_rtx_costs *b,
	    bool speed)
{
  if (speed)
    return (a->speed < b->speed
	    || (a->speed == b->speed && a->size < b->size));
  else
    return (a->size < b->size
	    || (a->size == b->size && a->speed < b->speed));
}

/* Increase both members of the full_rtx_costs structure C by the
   cost of N insns.  */
static inline void
costs_add_n_insns (struct full_rtx_costs *c, int n)
{
  c->speed += COSTS_N_INSNS (n);
  c->size += COSTS_N_INSNS (n);
}

1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891
/* Describes the shape of a subreg:

   inner_mode == the mode of the SUBREG_REG
   offset     == the SUBREG_BYTE
   outer_mode == the mode of the SUBREG itself.  */
struct subreg_shape {
  subreg_shape (enum machine_mode, unsigned int, enum machine_mode);
  bool operator == (const subreg_shape &) const;
  bool operator != (const subreg_shape &) const;
  unsigned int unique_id () const;

  enum machine_mode inner_mode;
  unsigned int offset;
  enum machine_mode outer_mode;
};

inline
subreg_shape::subreg_shape (enum machine_mode inner_mode_in,
			    unsigned int offset_in,
			    enum machine_mode outer_mode_in)
  : inner_mode (inner_mode_in), offset (offset_in), outer_mode (outer_mode_in)
{}

inline bool
subreg_shape::operator == (const subreg_shape &other) const
{
  return (inner_mode == other.inner_mode
	  && offset == other.offset
	  && outer_mode == other.outer_mode);
}

inline bool
subreg_shape::operator != (const subreg_shape &other) const
{
  return !operator == (other);
}

/* Return an integer that uniquely identifies this shape.  Structures
   like rtx_def assume that a mode can fit in an 8-bit bitfield and no
   current mode is anywhere near being 65536 bytes in size, so the
   id comfortably fits in an int.  */

inline unsigned int
subreg_shape::unique_id () const
{
  STATIC_ASSERT (MAX_MACHINE_MODE <= 256);
  return (int) inner_mode + ((int) outer_mode << 8) + (offset << 16);
}

/* Return the shape of a SUBREG rtx.  */

static inline subreg_shape
shape_of_subreg (const_rtx x)
{
  return subreg_shape (GET_MODE (SUBREG_REG (x)),
		       SUBREG_BYTE (x), GET_MODE (x));
}

1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962
/* Information about an address.  This structure is supposed to be able
   to represent all supported target addresses.  Please extend it if it
   is not yet general enough.  */
struct address_info {
  /* The mode of the value being addressed, or VOIDmode if this is
     a load-address operation with no known address mode.  */
  enum machine_mode mode;

  /* The address space.  */
  addr_space_t as;

  /* A pointer to the top-level address.  */
  rtx *outer;

  /* A pointer to the inner address, after all address mutations
     have been stripped from the top-level address.  It can be one
     of the following:

     - A {PRE,POST}_{INC,DEC} of *BASE.  SEGMENT, INDEX and DISP are null.

     - A {PRE,POST}_MODIFY of *BASE.  In this case either INDEX or DISP
       points to the step value, depending on whether the step is variable
       or constant respectively.  SEGMENT is null.

     - A plain sum of the form SEGMENT + BASE + INDEX + DISP,
       with null fields evaluating to 0.  */
  rtx *inner;

  /* Components that make up *INNER.  Each one may be null or nonnull.
     When nonnull, their meanings are as follows:

     - *SEGMENT is the "segment" of memory to which the address refers.
       This value is entirely target-specific and is only called a "segment"
       because that's its most typical use.  It contains exactly one UNSPEC,
       pointed to by SEGMENT_TERM.  The contents of *SEGMENT do not need
       reloading.

     - *BASE is a variable expression representing a base address.
       It contains exactly one REG, SUBREG or MEM, pointed to by BASE_TERM.

     - *INDEX is a variable expression representing an index value.
       It may be a scaled expression, such as a MULT.  It has exactly
       one REG, SUBREG or MEM, pointed to by INDEX_TERM.

     - *DISP is a constant, possibly mutated.  DISP_TERM points to the
       unmutated RTX_CONST_OBJ.  */
  rtx *segment;
  rtx *base;
  rtx *index;
  rtx *disp;

  rtx *segment_term;
  rtx *base_term;
  rtx *index_term;
  rtx *disp_term;

  /* In a {PRE,POST}_MODIFY address, this points to a second copy
     of BASE_TERM, otherwise it is null.  */
  rtx *base_term2;

  /* ADDRESS if this structure describes an address operand, MEM if
     it describes a MEM address.  */
  enum rtx_code addr_outer_code;

  /* If BASE is nonnull, this is the code of the rtx that contains it.  */
  enum rtx_code base_outer_code;

  /* True if this is an RTX_AUTOINC address.  */
  bool autoinc_p;
};

Kenneth Zadeck committed
1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050
/* This is used to bundle an rtx and a mode together so that the pair
   can be used with the wi:: routines.  If we ever put modes into rtx
   integer constants, this should go away and then just pass an rtx in.  */
typedef std::pair <rtx, enum machine_mode> rtx_mode_t;

namespace wi
{
  template <>
  struct int_traits <rtx_mode_t>
  {
    static const enum precision_type precision_type = VAR_PRECISION;
    static const bool host_dependent_precision = false;
    /* This ought to be true, except for the special case that BImode
       is canonicalized to STORE_FLAG_VALUE, which might be 1.  */
    static const bool is_sign_extended = false;
    static unsigned int get_precision (const rtx_mode_t &);
    static wi::storage_ref decompose (HOST_WIDE_INT *, unsigned int,
				      const rtx_mode_t &);
  };
}

inline unsigned int
wi::int_traits <rtx_mode_t>::get_precision (const rtx_mode_t &x)
{
  return GET_MODE_PRECISION (x.second);
}

inline wi::storage_ref
wi::int_traits <rtx_mode_t>::decompose (HOST_WIDE_INT *,
					unsigned int precision,
					const rtx_mode_t &x)
{
  gcc_checking_assert (precision == get_precision (x));
  switch (GET_CODE (x.first))
    {
    case CONST_INT:
      if (precision < HOST_BITS_PER_WIDE_INT)
	/* Nonzero BImodes are stored as STORE_FLAG_VALUE, which on many
	   targets is 1 rather than -1.  */
	gcc_checking_assert (INTVAL (x.first)
			     == sext_hwi (INTVAL (x.first), precision)
			     || (x.second == BImode && INTVAL (x.first) == 1));

      return wi::storage_ref (&INTVAL (x.first), 1, precision);

    case CONST_WIDE_INT:
      return wi::storage_ref (&CONST_WIDE_INT_ELT (x.first, 0),
			      CONST_WIDE_INT_NUNITS (x.first), precision);

#if TARGET_SUPPORTS_WIDE_INT == 0
    case CONST_DOUBLE:
      return wi::storage_ref (&CONST_DOUBLE_LOW (x.first), 2, precision);
#endif

    default:
      gcc_unreachable ();
    }
}

namespace wi
{
  hwi_with_prec shwi (HOST_WIDE_INT, enum machine_mode mode);
  wide_int min_value (enum machine_mode, signop);
  wide_int max_value (enum machine_mode, signop);
}

inline wi::hwi_with_prec
wi::shwi (HOST_WIDE_INT val, enum machine_mode mode)
{
  return shwi (val, GET_MODE_PRECISION (mode));
}

/* Produce the smallest number that is represented in MODE.  The precision
   is taken from MODE and the sign from SGN.  */
inline wide_int
wi::min_value (enum machine_mode mode, signop sgn)
{
  return min_value (GET_MODE_PRECISION (mode), sgn);
}

/* Produce the largest number that is represented in MODE.  The precision
   is taken from MODE and the sign from SGN.  */
inline wide_int
wi::max_value (enum machine_mode mode, signop sgn)
{
  return max_value (GET_MODE_PRECISION (mode), sgn);
}

2051
extern void init_rtlanal (void);
2052
extern int rtx_cost (rtx, enum rtx_code, int, bool);
2053
extern int address_cost (rtx, enum machine_mode, addr_space_t, bool);
2054 2055
extern void get_full_rtx_cost (rtx, enum rtx_code, int,
			       struct full_rtx_costs *);
2056
extern unsigned int subreg_lsb (const_rtx);
2057 2058
extern unsigned int subreg_lsb_1 (enum machine_mode, enum machine_mode,
				  unsigned int);
2059 2060 2061 2062
extern unsigned int subreg_regno_offset	(unsigned int, enum machine_mode,
					 unsigned int, enum machine_mode);
extern bool subreg_offset_representable_p (unsigned int, enum machine_mode,
					   unsigned int, enum machine_mode);
2063
extern unsigned int subreg_regno (const_rtx);
2064 2065
extern int simplify_subreg_regno (unsigned int, enum machine_mode,
				  unsigned int, enum machine_mode);
2066
extern unsigned int subreg_nregs (const_rtx);
2067
extern unsigned int subreg_nregs_with_regno (unsigned int, const_rtx);
2068 2069
extern unsigned HOST_WIDE_INT nonzero_bits (const_rtx, enum machine_mode);
extern unsigned int num_sign_bit_copies (const_rtx, enum machine_mode);
2070
extern bool constant_pool_constant_p (rtx);
2071
extern bool truncated_to_mode (enum machine_mode, const_rtx);
2072
extern int low_bitmask_len (enum machine_mode, unsigned HOST_WIDE_INT);
2073
extern void split_double (rtx, rtx *, rtx *);
2074 2075 2076 2077 2078 2079 2080 2081
extern rtx *strip_address_mutations (rtx *, enum rtx_code * = 0);
extern void decompose_address (struct address_info *, rtx *,
			       enum machine_mode, addr_space_t, enum rtx_code);
extern void decompose_lea_address (struct address_info *, rtx *);
extern void decompose_mem_address (struct address_info *, rtx);
extern void update_address (struct address_info *);
extern HOST_WIDE_INT get_index_scale (const struct address_info *);
extern enum rtx_code get_index_code (const struct address_info *);
2082

2083 2084 2085 2086 2087 2088 2089
#ifndef GENERATOR_FILE
/* Return the cost of SET X.  SPEED_P is true if optimizing for speed
   rather than size.  */

static inline int
set_rtx_cost (rtx x, bool speed_p)
{
2090
  return rtx_cost (x, INSN, 4, speed_p);
2091 2092 2093 2094 2095 2096 2097
}

/* Like set_rtx_cost, but return both the speed and size costs in C.  */

static inline void
get_full_set_rtx_cost (rtx x, struct full_rtx_costs *c)
{
2098
  get_full_rtx_cost (x, INSN, 4, c);
2099
}
2100 2101 2102 2103 2104 2105 2106 2107

/* Return the cost of moving X into a register, relative to the cost
   of a register move.  SPEED_P is true if optimizing for speed rather
   than size.  */

static inline int
set_src_cost (rtx x, bool speed_p)
{
2108
  return rtx_cost (x, SET, 1, speed_p);
2109 2110 2111 2112 2113 2114 2115
}

/* Like set_src_cost, but return both the speed and size costs in C.  */

static inline void
get_full_set_src_cost (rtx x, struct full_rtx_costs *c)
{
2116
  get_full_rtx_cost (x, SET, 1, c);
2117
}
2118
#endif
Jim Wilson committed
2119

2120 2121
/* 1 if RTX is a subreg containing a reg that is already known to be
   sign- or zero-extended from the mode of the subreg to the mode of
2122
   the reg.  SUBREG_PROMOTED_UNSIGNED_P gives the signedness of the
2123
   extension.
2124 2125 2126 2127

   When used as a LHS, is means that this extension must be done
   when assigning to SUBREG_REG.  */

2128
#define SUBREG_PROMOTED_VAR_P(RTX)					\
2129
  (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED", (RTX), SUBREG)->in_struct)
2130

2131 2132
/* Valid for subregs which are SUBREG_PROMOTED_VAR_P().  In that case
   this gives the necessary extensions:
2133 2134 2135 2136
   0  - signed (SPR_SIGNED)
   1  - normal unsigned (SPR_UNSIGNED)
   2  - value is both sign and unsign extended for mode
	(SPR_SIGNED_AND_UNSIGNED).
2137 2138
   -1 - pointer unsigned, which most often can be handled like unsigned
        extension, except for generating instructions where we need to
2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171
	emit special code (ptr_extend insns) on some architectures
	(SPR_POINTER). */

const int SRP_POINTER = -1;
const int SRP_SIGNED = 0;
const int SRP_UNSIGNED = 1;
const int SRP_SIGNED_AND_UNSIGNED = 2;

/* Sets promoted mode for SUBREG_PROMOTED_VAR_P().  */
#define SUBREG_PROMOTED_SET(RTX, VAL)		                        \
do {								        \
  rtx const _rtx = RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SET",		\
                                    (RTX), SUBREG);			\
  switch (VAL)								\
  {									\
    case SRP_POINTER:							\
      _rtx->volatil = 0;						\
      _rtx->unchanging = 0;						\
      break;								\
    case SRP_SIGNED:							\
      _rtx->volatil = 0;						\
      _rtx->unchanging = 1;						\
      break;								\
    case SRP_UNSIGNED:							\
      _rtx->volatil = 1;						\
      _rtx->unchanging = 0;						\
      break;								\
    case SRP_SIGNED_AND_UNSIGNED:					\
      _rtx->volatil = 1;						\
      _rtx->unchanging = 1;						\
      break;								\
  }									\
} while (0)
2172

2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191
/* Gets the value stored in promoted mode for SUBREG_PROMOTED_VAR_P(),
   including SRP_SIGNED_AND_UNSIGNED if promoted for
   both signed and unsigned.  */
#define SUBREG_PROMOTED_GET(RTX)	\
  (2 * (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_GET", (RTX), SUBREG)->volatil)\
   + (RTX)->unchanging - 1)

/* Returns sign of promoted mode for SUBREG_PROMOTED_VAR_P().  */
#define SUBREG_PROMOTED_SIGN(RTX)	\
  ((RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGN", (RTX), SUBREG)->volatil) ? 1\
   : (RTX)->unchanging - 1)

/* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted
   for SIGNED type.  */
#define SUBREG_PROMOTED_SIGNED_P(RTX)	\
  (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGNED_P", (RTX), SUBREG)->unchanging)

/* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted
   for UNSIGNED type.  */
2192
#define SUBREG_PROMOTED_UNSIGNED_P(RTX)	\
2193 2194 2195 2196 2197 2198 2199
  (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_UNSIGNED_P", (RTX), SUBREG)->volatil)

/* Checks if RTX of SUBREG_PROMOTED_VAR_P() is promoted for given SIGN.  */
#define SUBREG_CHECK_PROMOTED_SIGN(RTX, SIGN)	\
((SIGN) == SRP_POINTER ? SUBREG_PROMOTED_GET (RTX) == SRP_POINTER	\
 : (SIGN) == SRP_SIGNED ? SUBREG_PROMOTED_SIGNED_P (RTX)		\
 : SUBREG_PROMOTED_UNSIGNED_P (RTX))
2200

2201 2202 2203
/* True if the subreg was generated by LRA for reload insns.  Such
   subregs are valid only during LRA.  */
#define LRA_SUBREG_P(RTX)	\
2204
  (RTL_FLAG_CHECK1 ("LRA_SUBREG_P", (RTX), SUBREG)->jump)
2205

Jim Wilson committed
2206 2207
/* Access various components of an ASM_OPERANDS rtx.  */

2208 2209 2210 2211 2212 2213 2214
#define ASM_OPERANDS_TEMPLATE(RTX) XCSTR (RTX, 0, ASM_OPERANDS)
#define ASM_OPERANDS_OUTPUT_CONSTRAINT(RTX) XCSTR (RTX, 1, ASM_OPERANDS)
#define ASM_OPERANDS_OUTPUT_IDX(RTX) XCINT (RTX, 2, ASM_OPERANDS)
#define ASM_OPERANDS_INPUT_VEC(RTX) XCVEC (RTX, 3, ASM_OPERANDS)
#define ASM_OPERANDS_INPUT_CONSTRAINT_VEC(RTX) XCVEC (RTX, 4, ASM_OPERANDS)
#define ASM_OPERANDS_INPUT(RTX, N) XCVECEXP (RTX, 3, N, ASM_OPERANDS)
#define ASM_OPERANDS_INPUT_LENGTH(RTX) XCVECLEN (RTX, 3, ASM_OPERANDS)
2215
#define ASM_OPERANDS_INPUT_CONSTRAINT_EXP(RTX, N) \
2216
  XCVECEXP (RTX, 4, N, ASM_OPERANDS)
2217
#define ASM_OPERANDS_INPUT_CONSTRAINT(RTX, N) \
2218
  XSTR (XCVECEXP (RTX, 4, N, ASM_OPERANDS), 0)
2219
#define ASM_OPERANDS_INPUT_MODE(RTX, N)  \
2220
  GET_MODE (XCVECEXP (RTX, 4, N, ASM_OPERANDS))
2221 2222 2223 2224
#define ASM_OPERANDS_LABEL_VEC(RTX) XCVEC (RTX, 5, ASM_OPERANDS)
#define ASM_OPERANDS_LABEL_LENGTH(RTX) XCVECLEN (RTX, 5, ASM_OPERANDS)
#define ASM_OPERANDS_LABEL(RTX, N) XCVECEXP (RTX, 5, N, ASM_OPERANDS)
#define ASM_OPERANDS_SOURCE_LOCATION(RTX) XCUINT (RTX, 6, ASM_OPERANDS)
2225
#define ASM_INPUT_SOURCE_LOCATION(RTX) XCUINT (RTX, 1, ASM_INPUT)
Jim Wilson committed
2226

2227 2228
/* 1 if RTX is a mem that is statically allocated in read-only memory.  */
#define MEM_READONLY_P(RTX) \
2229
  (RTL_FLAG_CHECK1 ("MEM_READONLY_P", (RTX), MEM)->unchanging)
2230

2231
/* 1 if RTX is a mem and we should keep the alias set for this mem
2232 2233
   unchanged when we access a component.  Set to 1, or example, when we
   are already in a non-addressable component of an aggregate.  */
2234
#define MEM_KEEP_ALIAS_SET_P(RTX)					\
2235
  (RTL_FLAG_CHECK1 ("MEM_KEEP_ALIAS_SET_P", (RTX), MEM)->jump)
2236

2237 2238
/* 1 if RTX is a mem or asm_operand for a volatile reference.  */
#define MEM_VOLATILE_P(RTX)						\
2239 2240
  (RTL_FLAG_CHECK3 ("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS,		\
		    ASM_INPUT)->volatil)
Jim Wilson committed
2241

2242 2243
/* 1 if RTX is a mem that cannot trap.  */
#define MEM_NOTRAP_P(RTX) \
2244
  (RTL_FLAG_CHECK1 ("MEM_NOTRAP_P", (RTX), MEM)->call)
2245

2246 2247 2248 2249
/* The memory attribute block.  We provide access macros for each value
   in the block and provide defaults if none specified.  */
#define MEM_ATTRS(RTX) X0MEMATTR (RTX, 1)

2250 2251
/* The register attribute block.  We provide access macros for each value
   in the block and provide defaults if none specified.  */
2252
#define REG_ATTRS(RTX) X0REGATTR (RTX, 1)
2253

Richard Sandiford committed
2254
#ifndef GENERATOR_FILE
2255 2256
/* For a MEM rtx, the alias set.  If 0, this MEM is not in any alias
   set, and may alias anything.  Otherwise, the MEM can only alias
2257
   MEMs in a conflicting alias set.  This value is set in a
2258
   language-dependent manner in the front-end, and should not be
2259 2260
   altered in the back-end.  These set numbers are tested with
   alias_sets_conflict_p.  */
Richard Sandiford committed
2261
#define MEM_ALIAS_SET(RTX) (get_mem_attrs (RTX)->alias)
2262 2263

/* For a MEM rtx, the decl it is known to refer to, if it is known to
2264
   refer to part of a DECL.  It may also be a COMPONENT_REF.  */
Richard Sandiford committed
2265
#define MEM_EXPR(RTX) (get_mem_attrs (RTX)->expr)
2266

2267
/* For a MEM rtx, true if its MEM_OFFSET is known.  */
2268
#define MEM_OFFSET_KNOWN_P(RTX) (get_mem_attrs (RTX)->offset_known_p)
2269 2270

/* For a MEM rtx, the offset from the start of MEM_EXPR.  */
2271
#define MEM_OFFSET(RTX) (get_mem_attrs (RTX)->offset)
2272

2273
/* For a MEM rtx, the address space.  */
Richard Sandiford committed
2274
#define MEM_ADDR_SPACE(RTX) (get_mem_attrs (RTX)->addrspace)
2275

2276
/* For a MEM rtx, true if its MEM_SIZE is known.  */
2277
#define MEM_SIZE_KNOWN_P(RTX) (get_mem_attrs (RTX)->size_known_p)
2278 2279

/* For a MEM rtx, the size in bytes of the MEM.  */
2280
#define MEM_SIZE(RTX) (get_mem_attrs (RTX)->size)
2281

2282 2283
/* For a MEM rtx, the alignment in bits.  We can use the alignment of the
   mode as a default when STRICT_ALIGNMENT, but not if not.  */
Richard Sandiford committed
2284 2285 2286 2287
#define MEM_ALIGN(RTX) (get_mem_attrs (RTX)->align)
#else
#define MEM_ADDR_SPACE(RTX) ADDR_SPACE_GENERIC
#endif
2288

2289 2290 2291 2292
/* For a REG rtx, the decl it is known to refer to, if it is known to
   refer to part of a DECL.  */
#define REG_EXPR(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->decl)

2293 2294
/* For a REG rtx, the offset from the start of REG_EXPR, if known, as an
   HOST_WIDE_INT.  */
2295 2296
#define REG_OFFSET(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->offset)

2297
/* Copy the attributes that apply to memory locations from RHS to LHS.  */
2298 2299
#define MEM_COPY_ATTRIBUTES(LHS, RHS)				\
  (MEM_VOLATILE_P (LHS) = MEM_VOLATILE_P (RHS),			\
2300
   MEM_NOTRAP_P (LHS) = MEM_NOTRAP_P (RHS),			\
2301
   MEM_READONLY_P (LHS) = MEM_READONLY_P (RHS),			\
2302
   MEM_KEEP_ALIAS_SET_P (LHS) = MEM_KEEP_ALIAS_SET_P (RHS),	\
2303
   MEM_POINTER (LHS) = MEM_POINTER (RHS),			\
2304
   MEM_ATTRS (LHS) = MEM_ATTRS (RHS))
2305

2306
/* 1 if RTX is a label_ref for a nonlocal label.  */
2307 2308
/* Likewise in an expr_list for a REG_LABEL_OPERAND or
   REG_LABEL_TARGET note.  */
2309
#define LABEL_REF_NONLOCAL_P(RTX)					\
2310
  (RTL_FLAG_CHECK1 ("LABEL_REF_NONLOCAL_P", (RTX), LABEL_REF)->volatil)
Richard Stallman committed
2311

2312 2313
/* 1 if RTX is a code_label that should always be considered to be needed.  */
#define LABEL_PRESERVE_P(RTX)						\
2314
  (RTL_FLAG_CHECK2 ("LABEL_PRESERVE_P", (RTX), CODE_LABEL, NOTE)->in_struct)
Jim Wilson committed
2315

2316
/* During sched, 1 if RTX is an insn that must be scheduled together
2317
   with the preceding insn.  */
2318
#define SCHED_GROUP_P(RTX)						\
2319 2320
  (RTL_FLAG_CHECK4 ("SCHED_GROUP_P", (RTX), DEBUG_INSN, INSN,		\
		    JUMP_INSN, CALL_INSN)->in_struct)
Jim Wilson committed
2321 2322 2323

/* For a SET rtx, SET_DEST is the place that is set
   and SET_SRC is the value it is set to.  */
2324 2325
#define SET_DEST(RTX) XC2EXP (RTX, 0, SET, CLOBBER)
#define SET_SRC(RTX) XCEXP (RTX, 1, SET)
2326
#define SET_IS_RETURN_P(RTX)						\
2327
  (RTL_FLAG_CHECK1 ("SET_IS_RETURN_P", (RTX), SET)->jump)
Jim Wilson committed
2328 2329

/* For a TRAP_IF rtx, TRAP_CONDITION is an expression.  */
2330 2331
#define TRAP_CONDITION(RTX) XCEXP (RTX, 0, TRAP_IF)
#define TRAP_CODE(RTX) XCEXP (RTX, 1, TRAP_IF)
Jim Wilson committed
2332

2333 2334 2335
/* For a COND_EXEC rtx, COND_EXEC_TEST is the condition to base
   conditionally executing the code on, COND_EXEC_CODE is the code
   to execute if the condition is true.  */
2336 2337
#define COND_EXEC_TEST(RTX) XCEXP (RTX, 0, COND_EXEC)
#define COND_EXEC_CODE(RTX) XCEXP (RTX, 1, COND_EXEC)
2338

2339 2340
/* 1 if RTX is a symbol_ref that addresses this function's rtl
   constants pool.  */
2341
#define CONSTANT_POOL_ADDRESS_P(RTX)					\
2342
  (RTL_FLAG_CHECK1 ("CONSTANT_POOL_ADDRESS_P", (RTX), SYMBOL_REF)->unchanging)
Jim Wilson committed
2343

2344 2345 2346
/* 1 if RTX is a symbol_ref that addresses a value in the file's
   tree constant pool.  This information is private to varasm.c.  */
#define TREE_CONSTANT_POOL_ADDRESS_P(RTX)				\
2347 2348
  (RTL_FLAG_CHECK1 ("TREE_CONSTANT_POOL_ADDRESS_P",			\
		    (RTX), SYMBOL_REF)->frame_related)
2349

2350 2351
/* Used if RTX is a symbol_ref, for machine-specific purposes.  */
#define SYMBOL_REF_FLAG(RTX)						\
2352
  (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAG", (RTX), SYMBOL_REF)->volatil)
Jim Wilson committed
2353

2354 2355 2356
/* 1 if RTX is a symbol_ref that has been the library function in
   emit_library_call.  */
#define SYMBOL_REF_USED(RTX)						\
2357
  (RTL_FLAG_CHECK1 ("SYMBOL_REF_USED", (RTX), SYMBOL_REF)->used)
Jim Wilson committed
2358

2359 2360
/* 1 if RTX is a symbol_ref for a weak symbol.  */
#define SYMBOL_REF_WEAK(RTX)						\
2361
  (RTL_FLAG_CHECK1 ("SYMBOL_REF_WEAK", (RTX), SYMBOL_REF)->return_val)
2362

2363 2364
/* A pointer attached to the SYMBOL_REF; either SYMBOL_REF_DECL or
   SYMBOL_REF_CONSTANT.  */
2365
#define SYMBOL_REF_DATA(RTX) X0ANY ((RTX), 1)
2366 2367 2368 2369

/* Set RTX's SYMBOL_REF_DECL to DECL.  RTX must not be a constant
   pool symbol.  */
#define SET_SYMBOL_REF_DECL(RTX, DECL) \
2370
  (gcc_assert (!CONSTANT_POOL_ADDRESS_P (RTX)), X0TREE ((RTX), 1) = (DECL))
2371

2372
/* The tree (decl or constant) associated with the symbol, or null.  */
2373
#define SYMBOL_REF_DECL(RTX) \
2374
  (CONSTANT_POOL_ADDRESS_P (RTX) ? NULL : X0TREE ((RTX), 1))
2375

2376 2377
/* Set RTX's SYMBOL_REF_CONSTANT to C.  RTX must be a constant pool symbol.  */
#define SET_SYMBOL_REF_CONSTANT(RTX, C) \
2378
  (gcc_assert (CONSTANT_POOL_ADDRESS_P (RTX)), X0CONSTANT ((RTX), 1) = (C))
2379

2380 2381
/* The rtx constant pool entry for a symbol, or null.  */
#define SYMBOL_REF_CONSTANT(RTX) \
2382
  (CONSTANT_POOL_ADDRESS_P (RTX) ? X0CONSTANT ((RTX), 1) : NULL)
2383 2384 2385 2386 2387 2388 2389 2390

/* A set of flags on a symbol_ref that are, in some respects, redundant with
   information derivable from the tree decl associated with this symbol.
   Except that we build a *lot* of SYMBOL_REFs that aren't associated with a
   decl.  In some cases this is a bug.  But beyond that, it's nice to cache
   this information to avoid recomputing it.  Finally, this allows space for
   the target to store more than one bit of information, as with
   SYMBOL_REF_FLAG.  */
2391 2392 2393
#define SYMBOL_REF_FLAGS(RTX) \
  (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAGS", (RTX), SYMBOL_REF) \
   ->u2.symbol_ref_flags)
2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413

/* These flags are common enough to be defined for all targets.  They
   are computed by the default version of targetm.encode_section_info.  */

/* Set if this symbol is a function.  */
#define SYMBOL_FLAG_FUNCTION	(1 << 0)
#define SYMBOL_REF_FUNCTION_P(RTX) \
  ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_FUNCTION) != 0)
/* Set if targetm.binds_local_p is true.  */
#define SYMBOL_FLAG_LOCAL	(1 << 1)
#define SYMBOL_REF_LOCAL_P(RTX) \
  ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_LOCAL) != 0)
/* Set if targetm.in_small_data_p is true.  */
#define SYMBOL_FLAG_SMALL	(1 << 2)
#define SYMBOL_REF_SMALL_P(RTX) \
  ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_SMALL) != 0)
/* The three-bit field at [5:3] is true for TLS variables; use
   SYMBOL_REF_TLS_MODEL to extract the field as an enum tls_model.  */
#define SYMBOL_FLAG_TLS_SHIFT	3
#define SYMBOL_REF_TLS_MODEL(RTX) \
2414
  ((enum tls_model) ((SYMBOL_REF_FLAGS (RTX) >> SYMBOL_FLAG_TLS_SHIFT) & 7))
2415 2416 2417 2418
/* Set if this symbol is not defined in this translation unit.  */
#define SYMBOL_FLAG_EXTERNAL	(1 << 6)
#define SYMBOL_REF_EXTERNAL_P(RTX) \
  ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_EXTERNAL) != 0)
2419
/* Set if this symbol has a block_symbol structure associated with it.  */
2420 2421 2422
#define SYMBOL_FLAG_HAS_BLOCK_INFO (1 << 7)
#define SYMBOL_REF_HAS_BLOCK_INFO_P(RTX) \
  ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_HAS_BLOCK_INFO) != 0)
2423
/* Set if this symbol is a section anchor.  SYMBOL_REF_ANCHOR_P implies
2424
   SYMBOL_REF_HAS_BLOCK_INFO_P.  */
2425 2426 2427
#define SYMBOL_FLAG_ANCHOR	(1 << 8)
#define SYMBOL_REF_ANCHOR_P(RTX) \
  ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_ANCHOR) != 0)
2428 2429

/* Subsequent bits are available for the target to use.  */
2430
#define SYMBOL_FLAG_MACH_DEP_SHIFT	9
2431
#define SYMBOL_FLAG_MACH_DEP		(1 << SYMBOL_FLAG_MACH_DEP_SHIFT)
2432

2433 2434 2435
/* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the object_block
   structure to which the symbol belongs, or NULL if it has not been
   assigned a block.  */
2436 2437
#define SYMBOL_REF_BLOCK(RTX) (BLOCK_SYMBOL_CHECK (RTX)->block)

2438 2439 2440 2441
/* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the offset of RTX from
   the first object in SYMBOL_REF_BLOCK (RTX).  The value is negative if
   RTX has not yet been assigned to a block, or it has not been given an
   offset within that block.  */
2442 2443
#define SYMBOL_REF_BLOCK_OFFSET(RTX) (BLOCK_SYMBOL_CHECK (RTX)->offset)

2444 2445
/* True if RTX is flagged to be a scheduling barrier.  */
#define PREFETCH_SCHEDULE_BARRIER_P(RTX)					\
2446
  (RTL_FLAG_CHECK1 ("PREFETCH_SCHEDULE_BARRIER_P", (RTX), PREFETCH)->volatil)
2447

2448 2449 2450 2451 2452
/* Indicate whether the machine has any sort of auto increment addressing.
   If not, we can avoid checking for REG_INC notes.  */

#if (defined (HAVE_PRE_INCREMENT) || defined (HAVE_PRE_DECREMENT) \
     || defined (HAVE_POST_INCREMENT) || defined (HAVE_POST_DECREMENT) \
2453
     || defined (HAVE_PRE_MODIFY_DISP) || defined (HAVE_POST_MODIFY_DISP) \
2454 2455 2456 2457
     || defined (HAVE_PRE_MODIFY_REG) || defined (HAVE_POST_MODIFY_REG))
#define AUTO_INC_DEC
#endif

Jim Wilson committed
2458 2459 2460
/* Define a macro to look for REG_INC notes,
   but save time on machines where they never exist.  */

2461
#ifdef AUTO_INC_DEC
2462 2463 2464 2465
#define FIND_REG_INC_NOTE(INSN, REG)			\
  ((REG) != NULL_RTX && REG_P ((REG))			\
   ? find_regno_note ((INSN), REG_INC, REGNO (REG))	\
   : find_reg_note ((INSN), REG_INC, (REG)))
Jim Wilson committed
2466
#else
2467
#define FIND_REG_INC_NOTE(INSN, REG) 0
Jim Wilson committed
2468 2469
#endif

2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485
#ifndef HAVE_PRE_INCREMENT
#define HAVE_PRE_INCREMENT 0
#endif

#ifndef HAVE_PRE_DECREMENT
#define HAVE_PRE_DECREMENT 0
#endif

#ifndef HAVE_POST_INCREMENT
#define HAVE_POST_INCREMENT 0
#endif

#ifndef HAVE_POST_DECREMENT
#define HAVE_POST_DECREMENT 0
#endif

2486 2487 2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501
#ifndef HAVE_POST_MODIFY_DISP
#define HAVE_POST_MODIFY_DISP 0
#endif

#ifndef HAVE_POST_MODIFY_REG
#define HAVE_POST_MODIFY_REG 0
#endif

#ifndef HAVE_PRE_MODIFY_DISP
#define HAVE_PRE_MODIFY_DISP 0
#endif

#ifndef HAVE_PRE_MODIFY_REG
#define HAVE_PRE_MODIFY_REG 0
#endif

2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537

/* Some architectures do not have complete pre/post increment/decrement
   instruction sets, or only move some modes efficiently.  These macros
   allow us to tune autoincrement generation.  */

#ifndef USE_LOAD_POST_INCREMENT
#define USE_LOAD_POST_INCREMENT(MODE)   HAVE_POST_INCREMENT
#endif

#ifndef USE_LOAD_POST_DECREMENT
#define USE_LOAD_POST_DECREMENT(MODE)   HAVE_POST_DECREMENT
#endif

#ifndef USE_LOAD_PRE_INCREMENT
#define USE_LOAD_PRE_INCREMENT(MODE)    HAVE_PRE_INCREMENT
#endif

#ifndef USE_LOAD_PRE_DECREMENT
#define USE_LOAD_PRE_DECREMENT(MODE)    HAVE_PRE_DECREMENT
#endif

#ifndef USE_STORE_POST_INCREMENT
#define USE_STORE_POST_INCREMENT(MODE)  HAVE_POST_INCREMENT
#endif

#ifndef USE_STORE_POST_DECREMENT
#define USE_STORE_POST_DECREMENT(MODE)  HAVE_POST_DECREMENT
#endif

#ifndef USE_STORE_PRE_INCREMENT
#define USE_STORE_PRE_INCREMENT(MODE)   HAVE_PRE_INCREMENT
#endif

#ifndef USE_STORE_PRE_DECREMENT
#define USE_STORE_PRE_DECREMENT(MODE)   HAVE_PRE_DECREMENT
#endif
Jim Wilson committed
2538

2539 2540 2541
/* Nonzero when we are generating CONCATs.  */
extern int generating_concat_p;

2542 2543 2544
/* Nonzero when we are expanding trees to RTL.  */
extern int currently_expanding_to_rtl;

Jim Wilson committed
2545 2546
/* Generally useful functions.  */

Kaveh R. Ghazi committed
2547
/* In explow.c */
2548
extern HOST_WIDE_INT trunc_int_for_mode	(HOST_WIDE_INT, enum machine_mode);
2549
extern rtx plus_constant (enum machine_mode, rtx, HOST_WIDE_INT, bool = false);
2550

2551
/* In rtl.c */
2552 2553
extern rtx rtx_alloc_stat (RTX_CODE MEM_STAT_DECL);
#define rtx_alloc(c) rtx_alloc_stat (c MEM_STAT_INFO)
Kenneth Zadeck committed
2554 2555 2556 2557 2558 2559
extern rtx rtx_alloc_stat_v (RTX_CODE MEM_STAT_DECL, int);
#define rtx_alloc_v(c, SZ) rtx_alloc_stat_v (c MEM_STAT_INFO, SZ)
#define const_wide_int_alloc(NWORDS)				\
  rtx_alloc_v (CONST_WIDE_INT,					\
	       (sizeof (struct hwivec_def)			\
		+ ((NWORDS)-1) * sizeof (HOST_WIDE_INT)))	\
2560

2561
extern rtvec rtvec_alloc (int);
2562
extern rtvec shallow_copy_rtvec (rtvec);
2563
extern bool shared_const_p (const_rtx);
2564
extern rtx copy_rtx (rtx);
2565
extern void dump_rtx_statistics (void);
2566 2567

/* In emit-rtl.c */
2568
extern rtx copy_rtx_if_shared (rtx);
2569 2570

/* In rtl.c */
2571 2572
extern unsigned int rtx_size (const_rtx);
extern rtx shallow_copy_rtx_stat (const_rtx MEM_STAT_DECL);
2573
#define shallow_copy_rtx(a) shallow_copy_rtx_stat (a MEM_STAT_INFO)
2574
extern int rtx_equal_p (const_rtx, const_rtx);
2575 2576

/* In emit-rtl.c */
2577
extern rtvec gen_rtvec_v (int, rtx *);
2578
extern rtvec gen_rtvec_v (int, rtx_insn **);
2579 2580
extern rtx gen_reg_rtx (enum machine_mode);
extern rtx gen_rtx_REG_offset (rtx, enum machine_mode, unsigned int, int);
2581
extern rtx gen_reg_rtx_offset (rtx, enum machine_mode, int);
Peter Bergner committed
2582
extern rtx gen_reg_rtx_and_attrs (rtx);
2583
extern rtx_code_label *gen_label_rtx (void);
2584
extern rtx gen_lowpart_common (enum machine_mode, rtx);
2585 2586

/* In cse.c */
2587
extern rtx gen_lowpart_if_possible (enum machine_mode, rtx);
2588 2589

/* In emit-rtl.c */
2590 2591 2592
extern rtx gen_highpart (enum machine_mode, rtx);
extern rtx gen_highpart_mode (enum machine_mode, enum machine_mode, rtx);
extern rtx operand_subword (rtx, unsigned int, int, enum machine_mode);
2593 2594

/* In emit-rtl.c */
2595
extern rtx operand_subword_force (rtx, unsigned int, enum machine_mode);
2596
extern bool paradoxical_subreg_p (const_rtx);
2597
extern int subreg_lowpart_p (const_rtx);
2598 2599 2600 2601
extern unsigned int subreg_lowpart_offset (enum machine_mode,
					   enum machine_mode);
extern unsigned int subreg_highpart_offset (enum machine_mode,
					    enum machine_mode);
2602
extern int byte_lowpart_offset (enum machine_mode, enum machine_mode);
2603
extern rtx make_safe_from (rtx, rtx);
2604 2605 2606 2607
extern rtx convert_memory_address_addr_space (enum machine_mode, rtx,
					      addr_space_t);
#define convert_memory_address(to_mode,x) \
	convert_memory_address_addr_space ((to_mode), (x), ADDR_SPACE_GENERIC)
2608
extern const char *get_insn_name (int);
2609
extern rtx_insn *get_last_insn_anywhere (void);
2610 2611
extern rtx_insn *get_first_nonnote_insn (void);
extern rtx_insn *get_last_nonnote_insn (void);
2612
extern void start_sequence (void);
2613 2614
extern void push_to_sequence (rtx_insn *);
extern void push_to_sequence2 (rtx_insn *, rtx_insn *);
2615
extern void end_sequence (void);
Kenneth Zadeck committed
2616
#if TARGET_SUPPORTS_WIDE_INT == 0
2617
extern double_int rtx_to_double_int (const_rtx);
Kenneth Zadeck committed
2618 2619 2620 2621 2622 2623
#endif
extern void cwi_output_hex (FILE *, const_rtx);
#ifndef GENERATOR_FILE
extern rtx immed_wide_int_const (const wide_int_ref &, enum machine_mode);
#endif
#if TARGET_SUPPORTS_WIDE_INT == 0
2624 2625
extern rtx immed_double_const (HOST_WIDE_INT, HOST_WIDE_INT,
			       enum machine_mode);
Kenneth Zadeck committed
2626
#endif
2627

2628 2629 2630 2631
/* In loop-iv.c  */

extern rtx lowpart_subreg (enum machine_mode, rtx, enum machine_mode);

2632
/* In varasm.c  */
2633
extern rtx force_const_mem (enum machine_mode, rtx);
2634 2635

/* In varasm.c  */
2636 2637

struct function;
2638
extern rtx get_pool_constant (const_rtx);
2639
extern rtx get_pool_constant_mark (rtx, bool *);
2640
extern enum machine_mode get_pool_mode (const_rtx);
2641
extern rtx simplify_subtraction (rtx);
2642
extern void decide_function_section (tree);
2643 2644

/* In function.c  */
2645
extern rtx assign_stack_local (enum machine_mode, HOST_WIDE_INT, int);
2646 2647 2648
#define ASLK_REDUCE_ALIGN 1
#define ASLK_RECORD_PAD 2
extern rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int, int);
2649 2650 2651
extern rtx assign_stack_temp (enum machine_mode, HOST_WIDE_INT);
extern rtx assign_stack_temp_for_type (enum machine_mode, HOST_WIDE_INT, tree);
extern rtx assign_temp (tree, int, int);
2652

2653
/* In emit-rtl.c */
2654
extern rtx_insn *emit_insn_before (rtx, rtx);
2655 2656
extern rtx_insn *emit_insn_before_noloc (rtx, rtx_insn *, basic_block);
extern rtx_insn *emit_insn_before_setloc (rtx, rtx_insn *, int);
2657
extern rtx_insn *emit_jump_insn_before (rtx, rtx);
2658 2659 2660 2661 2662
extern rtx_insn *emit_jump_insn_before_noloc (rtx, rtx_insn *);
extern rtx_insn *emit_jump_insn_before_setloc (rtx, rtx_insn *, int);
extern rtx_insn *emit_call_insn_before (rtx, rtx_insn *);
extern rtx_insn *emit_call_insn_before_noloc (rtx, rtx_insn *);
extern rtx_insn *emit_call_insn_before_setloc (rtx, rtx_insn *, int);
2663 2664 2665 2666
extern rtx_insn *emit_debug_insn_before (rtx, rtx);
extern rtx_insn *emit_debug_insn_before_noloc (rtx, rtx);
extern rtx_insn *emit_debug_insn_before_setloc (rtx, rtx, int);
extern rtx_barrier *emit_barrier_before (rtx);
2667
extern rtx_insn *emit_label_before (rtx, rtx_insn *);
2668
extern rtx_note *emit_note_before (enum insn_note, rtx);
2669 2670 2671 2672 2673 2674 2675 2676 2677 2678 2679 2680 2681
extern rtx_insn *emit_insn_after (rtx, rtx);
extern rtx_insn *emit_insn_after_noloc (rtx, rtx, basic_block);
extern rtx_insn *emit_insn_after_setloc (rtx, rtx, int);
extern rtx_insn *emit_jump_insn_after (rtx, rtx);
extern rtx_insn *emit_jump_insn_after_noloc (rtx, rtx);
extern rtx_insn *emit_jump_insn_after_setloc (rtx, rtx, int);
extern rtx_insn *emit_call_insn_after (rtx, rtx);
extern rtx_insn *emit_call_insn_after_noloc (rtx, rtx);
extern rtx_insn *emit_call_insn_after_setloc (rtx, rtx, int);
extern rtx_insn *emit_debug_insn_after (rtx, rtx);
extern rtx_insn *emit_debug_insn_after_noloc (rtx, rtx);
extern rtx_insn *emit_debug_insn_after_setloc (rtx, rtx, int);
extern rtx_barrier *emit_barrier_after (rtx);
2682
extern rtx_insn *emit_label_after (rtx, rtx_insn *);
2683
extern rtx_note *emit_note_after (enum insn_note, rtx);
2684 2685 2686 2687 2688
extern rtx_insn *emit_insn (rtx);
extern rtx_insn *emit_debug_insn (rtx);
extern rtx_insn *emit_jump_insn (rtx);
extern rtx_insn *emit_call_insn (rtx);
extern rtx_insn *emit_label (rtx);
2689
extern rtx_jump_table_data *emit_jump_table_data (rtx);
2690
extern rtx_barrier *emit_barrier (void);
2691 2692
extern rtx_note *emit_note (enum insn_note);
extern rtx_note *emit_note_copy (rtx_note *);
2693 2694 2695 2696
extern rtx_insn *gen_clobber (rtx);
extern rtx_insn *emit_clobber (rtx);
extern rtx_insn *gen_use (rtx);
extern rtx_insn *emit_use (rtx);
2697
extern rtx_insn *make_insn_raw (rtx);
2698
extern void add_function_usage_to (rtx, rtx);
2699
extern rtx_call_insn *last_call_insn (void);
2700 2701
extern rtx_insn *previous_insn (rtx_insn *);
extern rtx_insn *next_insn (rtx_insn *);
2702 2703 2704
extern rtx_insn *prev_nonnote_insn (rtx);
extern rtx_insn *prev_nonnote_insn_bb (rtx);
extern rtx_insn *next_nonnote_insn (rtx);
2705
extern rtx_insn *next_nonnote_insn_bb (rtx_insn *);
2706 2707 2708 2709 2710 2711 2712 2713
extern rtx_insn *prev_nondebug_insn (rtx);
extern rtx_insn *next_nondebug_insn (rtx);
extern rtx_insn *prev_nonnote_nondebug_insn (rtx);
extern rtx_insn *next_nonnote_nondebug_insn (rtx);
extern rtx_insn *prev_real_insn (rtx);
extern rtx_insn *next_real_insn (rtx);
extern rtx_insn *prev_active_insn (rtx);
extern rtx_insn *next_active_insn (rtx);
2714
extern int active_insn_p (const_rtx);
2715 2716
extern rtx_insn *next_cc0_user (rtx);
extern rtx_insn *prev_cc0_setter (rtx);
2717

Steven Bosscher committed
2718
/* In emit-rtl.c  */
2719 2720 2721 2722
extern int insn_line (const rtx_insn *);
extern const char * insn_file (const rtx_insn *);
extern tree insn_scope (const rtx_insn *);
extern expanded_location insn_location (const rtx_insn *);
2723
extern location_t prologue_location, epilogue_location;
2724

2725
/* In jump.c */
2726 2727 2728 2729 2730
extern enum rtx_code reverse_condition (enum rtx_code);
extern enum rtx_code reverse_condition_maybe_unordered (enum rtx_code);
extern enum rtx_code swap_condition (enum rtx_code);
extern enum rtx_code unsigned_condition (enum rtx_code);
extern enum rtx_code signed_condition (enum rtx_code);
Trevor Saunders committed
2731
extern void mark_jump_label (rtx, rtx_insn *, int);
2732 2733

/* In jump.c */
2734
extern rtx_insn *delete_related_insns (rtx);
2735 2736

/* In recog.c  */
2737
extern rtx *find_constant_term_loc (rtx *);
2738 2739

/* In emit-rtl.c  */
2740
extern rtx_insn *try_split (rtx, rtx, int);
2741
extern int split_branch_probability;
2742 2743

/* In unknown file  */
2744
extern rtx split_insns (rtx, rtx);
2745 2746

/* In simplify-rtx.c  */
2747 2748
extern rtx simplify_const_unary_operation (enum rtx_code, enum machine_mode,
					   rtx, enum machine_mode);
2749 2750
extern rtx simplify_unary_operation (enum rtx_code, enum machine_mode, rtx,
				     enum machine_mode);
2751 2752
extern rtx simplify_const_binary_operation (enum rtx_code, enum machine_mode,
					    rtx, rtx);
2753 2754 2755 2756
extern rtx simplify_binary_operation (enum rtx_code, enum machine_mode, rtx,
				      rtx);
extern rtx simplify_ternary_operation (enum rtx_code, enum machine_mode,
				       enum machine_mode, rtx, rtx, rtx);
2757 2758
extern rtx simplify_const_relational_operation (enum rtx_code,
						enum machine_mode, rtx, rtx);
2759
extern rtx simplify_relational_operation (enum rtx_code, enum machine_mode,
2760
					  enum machine_mode, rtx, rtx);
2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771
extern rtx simplify_gen_binary (enum rtx_code, enum machine_mode, rtx, rtx);
extern rtx simplify_gen_unary (enum rtx_code, enum machine_mode, rtx,
			       enum machine_mode);
extern rtx simplify_gen_ternary (enum rtx_code, enum machine_mode,
				 enum machine_mode, rtx, rtx, rtx);
extern rtx simplify_gen_relational (enum rtx_code, enum machine_mode,
				    enum machine_mode, rtx, rtx);
extern rtx simplify_subreg (enum machine_mode, rtx, enum machine_mode,
			    unsigned int);
extern rtx simplify_gen_subreg (enum machine_mode, rtx, enum machine_mode,
				unsigned int);
2772
extern rtx simplify_replace_fn_rtx (rtx, const_rtx,
2773
				    rtx (*fn) (rtx, const_rtx, void *), void *);
2774
extern rtx simplify_replace_rtx (rtx, const_rtx, rtx);
2775
extern rtx simplify_rtx (const_rtx);
2776
extern rtx avoid_constant_pool_reference (rtx);
2777
extern rtx delegitimize_mem_from_attrs (rtx);
2778
extern bool mode_signbit_p (enum machine_mode, const_rtx);
2779 2780 2781 2782 2783
extern bool val_signbit_p (enum machine_mode, unsigned HOST_WIDE_INT);
extern bool val_signbit_known_set_p (enum machine_mode,
				     unsigned HOST_WIDE_INT);
extern bool val_signbit_known_clear_p (enum machine_mode,
				       unsigned HOST_WIDE_INT);
2784

2785
/* In reginfo.c  */
2786 2787
extern enum machine_mode choose_hard_reg_mode (unsigned int, unsigned int,
					       bool);
2788 2789 2790
#ifdef HARD_CONST
extern const HARD_REG_SET &simplifiable_subregs (const subreg_shape &);
#endif
2791 2792

/* In emit-rtl.c  */
2793
extern rtx set_for_reg_notes (rtx);
2794
extern rtx set_unique_reg_note (rtx, enum reg_note, rtx);
2795
extern rtx set_dst_reg_note (rtx, enum reg_note, rtx, rtx);
2796
extern void set_insn_deleted (rtx);
2797

2798 2799
/* Functions in rtlanal.c */

David Malcolm committed
2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811 2812 2813 2814
extern rtx single_set_2 (const rtx_insn *, const_rtx);

/* Handle the cheap and common cases inline for performance.  */

inline rtx single_set (const rtx_insn *insn)
{
  if (!INSN_P (insn))
    return NULL_RTX;

  if (GET_CODE (PATTERN (insn)) == SET)
    return PATTERN (insn);

  /* Defer to the more expensive case.  */
  return single_set_2 (insn, PATTERN (insn));
}
2815

2816
extern enum machine_mode get_address_mode (rtx mem);
2817 2818 2819
extern int rtx_addr_can_trap_p (const_rtx);
extern bool nonzero_address_p (const_rtx);
extern int rtx_unstable_p (const_rtx);
2820 2821
extern bool rtx_varies_p (const_rtx, bool);
extern bool rtx_addr_varies_p (const_rtx, bool);
2822
extern rtx get_call_rtx_from (rtx);
2823 2824 2825
extern HOST_WIDE_INT get_integer_term (const_rtx);
extern rtx get_related_value (const_rtx);
extern bool offset_within_block_p (const_rtx, HOST_WIDE_INT);
2826
extern void split_const (rtx, rtx *, rtx *);
2827
extern bool unsigned_reg_p (rtx);
2828 2829 2830
extern int reg_mentioned_p (const_rtx, const_rtx);
extern int count_occurrences (const_rtx, const_rtx, int);
extern int reg_referenced_p (const_rtx, const_rtx);
2831
extern int reg_used_between_p (const_rtx, const rtx_insn *, const rtx_insn *);
2832
extern int reg_set_between_p (const_rtx, const rtx_insn *, const rtx_insn *);
2833
extern int commutative_operand_precedence (rtx);
Peter Bergner committed
2834
extern bool swap_commutative_operands_p (rtx, rtx);
2835
extern int modified_between_p (const_rtx, const rtx_insn *, const rtx_insn *);
2836
extern int no_labels_between_p (const rtx_insn *, const rtx_insn *);
2837
extern int modified_in_p (const_rtx, const_rtx);
2838
extern int reg_set_p (const_rtx, const_rtx);
2839 2840
extern int multiple_sets (const_rtx);
extern int set_noop_p (const_rtx);
2841
extern int noop_move_p (const_rtx);
2842 2843
extern int refers_to_regno_p (unsigned int, unsigned int, const_rtx, rtx *);
extern int reg_overlap_mentioned_p (const_rtx, const_rtx);
2844
extern const_rtx set_of (const_rtx, const_rtx);
2845 2846 2847
extern void record_hard_reg_sets (rtx, const_rtx, void *);
extern void record_hard_reg_uses (rtx *, void *);
#ifdef HARD_CONST
2848
extern void find_all_hard_regs (const_rtx, HARD_REG_SET *);
2849
extern void find_all_hard_reg_sets (const_rtx, HARD_REG_SET *, bool);
2850
#endif
2851
extern void note_stores (const_rtx, void (*) (rtx, const_rtx, void *), void *);
2852
extern void note_uses (rtx *, void (*) (rtx *, void *), void *);
2853 2854 2855 2856 2857
extern int dead_or_set_p (const_rtx, const_rtx);
extern int dead_or_set_regno_p (const_rtx, unsigned int);
extern rtx find_reg_note (const_rtx, enum reg_note, const_rtx);
extern rtx find_regno_note (const_rtx, enum reg_note, unsigned int);
extern rtx find_reg_equal_equiv_note (const_rtx);
2858
extern rtx find_constant_src (const rtx_insn *);
2859 2860
extern int find_reg_fusage (const_rtx, enum rtx_code, const_rtx);
extern int find_regno_fusage (const_rtx, enum rtx_code, unsigned int);
2861
extern rtx alloc_reg_note (enum reg_note, rtx, rtx);
2862
extern void add_reg_note (rtx, enum reg_note, rtx);
2863 2864
extern void add_int_reg_note (rtx, enum reg_note, int);
extern void add_shallow_copy_of_reg_note (rtx, rtx);
2865
extern void remove_note (rtx, const_rtx);
2866
extern void remove_reg_equal_equiv_notes (rtx);
2867
extern void remove_reg_equal_equiv_notes_for_regno (unsigned int);
2868 2869 2870
extern int side_effects_p (const_rtx);
extern int volatile_refs_p (const_rtx);
extern int volatile_insn_p (const_rtx);
2871
extern int may_trap_p_1 (const_rtx, unsigned);
2872 2873
extern int may_trap_p (const_rtx);
extern int may_trap_or_fault_p (const_rtx);
2874 2875 2876 2877 2878
extern bool can_throw_internal (const_rtx);
extern bool can_throw_external (const_rtx);
extern bool insn_could_throw_p (const_rtx);
extern bool insn_nothrow_p (const_rtx);
extern bool can_nonlocal_goto (const_rtx);
2879 2880
extern void copy_reg_eh_region_note_forward (rtx, rtx_insn *, rtx);
extern void copy_reg_eh_region_note_backward (rtx, rtx_insn *, rtx);
2881
extern int inequality_comparisons_p (const_rtx);
2882
extern rtx replace_rtx (rtx, rtx, rtx);
2883 2884
extern void replace_label (rtx *, rtx, rtx, bool);
extern void replace_label_in_insn (rtx_insn *, rtx, rtx, bool);
2885
extern bool rtx_referenced_p (const_rtx, const_rtx);
2886
extern bool tablejump_p (const rtx_insn *, rtx *, rtx_jump_table_data **);
2887
extern int computed_jump_p (const_rtx);
2888
extern bool tls_referenced_p (const_rtx);
2889

2890 2891
typedef int (*rtx_function) (rtx *, void *);
extern int for_each_rtx (rtx *, rtx_function, void *);
2892
extern int for_each_rtx_in_insn (rtx_insn **, rtx_function, void *);
2893

2894 2895 2896 2897
/* Callback for for_each_inc_dec, to process the autoinc operation OP
   within MEM that sets DEST to SRC + SRCOFF, or SRC if SRCOFF is
   NULL.  The callback is passed the same opaque ARG passed to
   for_each_inc_dec.  Return zero to continue looking for other
2898 2899
   autoinc operations or any other value to interrupt the traversal and
   return that value to the caller of for_each_inc_dec.  */
2900 2901
typedef int (*for_each_inc_dec_fn) (rtx mem, rtx op, rtx dest, rtx src,
				    rtx srcoff, void *arg);
2902
extern int for_each_inc_dec (rtx, for_each_inc_dec_fn, void *arg);
2903

2904 2905 2906 2907 2908 2909 2910 2911 2912 2913
typedef int (*rtx_equal_p_callback_function) (const_rtx *, const_rtx *,
                                              rtx *, rtx *);
extern int rtx_equal_p_cb (const_rtx, const_rtx,
                           rtx_equal_p_callback_function);

typedef int (*hash_rtx_callback_function) (const_rtx, enum machine_mode, rtx *,
                                           enum machine_mode *);
extern unsigned hash_rtx_cb (const_rtx, enum machine_mode, int *, int *,
                             bool, hash_rtx_callback_function);

2914
extern rtx regno_use_in (unsigned int, rtx);
2915 2916
extern int auto_inc_p (const_rtx);
extern int in_expr_list_p (const_rtx, const_rtx);
2917
extern void remove_node_from_expr_list (const_rtx, rtx_expr_list **);
2918
extern void remove_node_from_insn_list (const rtx_insn *, rtx_insn_list **);
2919
extern int loc_mentioned_in_p (rtx *, const_rtx);
2920
extern rtx_insn *find_first_parameter_load (rtx_insn *, rtx_insn *);
2921
extern bool keep_with_call_p (const rtx_insn *);
2922
extern bool label_is_jump_target_p (const_rtx, const rtx_insn *);
2923
extern int insn_rtx_cost (rtx, bool);
Jim Wilson committed
2924

2925 2926
/* Given an insn and condition, return a canonical description of
   the test being made.  */
2927 2928
extern rtx canonicalize_condition (rtx_insn *, rtx, int, rtx_insn **, rtx,
				   int, int);
2929 2930 2931

/* Given a JUMP_INSN, return a canonical description of the test
   being made.  */
2932
extern rtx get_condition (rtx_insn *, rtx_insn **, int, int);
2933

2934 2935 2936 2937 2938
/* Information about a subreg of a hard register.  */
struct subreg_info
{
  /* Offset of first hard register involved in the subreg.  */
  int offset;
2939 2940 2941 2942
  /* Number of hard registers involved in the subreg.  In the case of
     a paradoxical subreg, this is the number of registers that would
     be modified by writing to the subreg; some of them may be don't-care
     when reading from the subreg.  */
2943 2944
  int nregs;
  /* Whether this subreg can be represented as a hard reg with the new
2945
     mode (by adding OFFSET to the original hard register).  */
2946 2947 2948 2949 2950 2951 2952
  bool representable_p;
};

extern void subreg_get_info (unsigned int, enum machine_mode,
			     unsigned int, enum machine_mode,
			     struct subreg_info *);

2953 2954
/* lists.c */

2955
extern void free_EXPR_LIST_list (rtx_expr_list **);
2956
extern void free_INSN_LIST_list (rtx_insn_list **);
2957 2958
extern void free_EXPR_LIST_node (rtx);
extern void free_INSN_LIST_node (rtx);
2959 2960 2961
extern rtx_insn_list *alloc_INSN_LIST (rtx, rtx);
extern rtx_insn_list *copy_INSN_LIST (rtx_insn_list *);
extern rtx_insn_list *concat_INSN_LIST (rtx_insn_list *, rtx_insn_list *);
2962
extern rtx_expr_list *alloc_EXPR_LIST (int, rtx, rtx);
2963
extern void remove_free_INSN_LIST_elem (rtx_insn *, rtx_insn_list **);
2964
extern rtx remove_list_elem (rtx, rtx *);
2965
extern rtx_insn *remove_free_INSN_LIST_node (rtx_insn_list **);
2966
extern rtx remove_free_EXPR_LIST_node (rtx_expr_list **);
2967

2968

2969
/* reginfo.c */
2970

Vladimir Makarov committed
2971
/* Resize reg info.  */
2972
extern bool resize_reg_info (void);
2973
/* Free up register info memory.  */
2974
extern void free_reg_info (void);
2975 2976
extern void init_subregs_of_mode (void);
extern void finish_subregs_of_mode (void);
2977

2978
/* recog.c */
2979
extern rtx extract_asm_operands (rtx);
2980
extern int asm_noperands (const_rtx);
2981
extern const char *decode_asm_operands (rtx, rtx *, rtx **, const char **,
2982
					enum machine_mode *, location_t *);
2983
extern void get_referenced_operands (const char *, bool *, unsigned int);
Jim Wilson committed
2984

2985 2986
extern enum reg_class reg_preferred_class (int);
extern enum reg_class reg_alternate_class (int);
Vladimir Makarov committed
2987
extern enum reg_class reg_allocno_class (int);
2988 2989
extern void setup_reg_classes (int, enum reg_class, enum reg_class,
			       enum reg_class);
Jim Wilson committed
2990

2991
extern void split_all_insns (void);
2992
extern unsigned int split_all_insns_noflow (void);
2993

2994
#define MAX_SAVED_CONST_INT 64
2995
extern GTY(()) rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
2996

2997 2998 2999 3000
#define const0_rtx	(const_int_rtx[MAX_SAVED_CONST_INT])
#define const1_rtx	(const_int_rtx[MAX_SAVED_CONST_INT+1])
#define const2_rtx	(const_int_rtx[MAX_SAVED_CONST_INT+2])
#define constm1_rtx	(const_int_rtx[MAX_SAVED_CONST_INT-1])
3001
extern GTY(()) rtx const_true_rtx;
3002

3003
extern GTY(()) rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
3004

3005
/* Returns a constant 0 rtx in mode MODE.  Integer modes are treated the
3006 3007 3008 3009
   same as VOIDmode.  */

#define CONST0_RTX(MODE) (const_tiny_rtx[0][(int) (MODE)])

3010
/* Likewise, for the constants 1 and 2 and -1.  */
3011 3012 3013

#define CONST1_RTX(MODE) (const_tiny_rtx[1][(int) (MODE)])
#define CONST2_RTX(MODE) (const_tiny_rtx[2][(int) (MODE)])
3014
#define CONSTM1_RTX(MODE) (const_tiny_rtx[3][(int) (MODE)])
Jim Wilson committed
3015

3016 3017 3018 3019
extern GTY(()) rtx pc_rtx;
extern GTY(()) rtx cc0_rtx;
extern GTY(()) rtx ret_rtx;
extern GTY(()) rtx simple_return_rtx;
3020

3021 3022 3023 3024 3025
/* If HARD_FRAME_POINTER_REGNUM is defined, then a special dummy reg
   is used to represent the frame pointer.  This is because the
   hard frame pointer and the automatic variables are separated by an amount
   that cannot be determined until after register allocation.  We can assume
   that in this case ELIMINABLE_REGS will be defined, one action of which
Kazu Hirata committed
3026
   will be to eliminate FRAME_POINTER_REGNUM into HARD_FRAME_POINTER_REGNUM.  */
3027 3028 3029 3030
#ifndef HARD_FRAME_POINTER_REGNUM
#define HARD_FRAME_POINTER_REGNUM FRAME_POINTER_REGNUM
#endif

3031 3032 3033 3034 3035 3036 3037 3038 3039 3040
#ifndef HARD_FRAME_POINTER_IS_FRAME_POINTER
#define HARD_FRAME_POINTER_IS_FRAME_POINTER \
  (HARD_FRAME_POINTER_REGNUM == FRAME_POINTER_REGNUM)
#endif

#ifndef HARD_FRAME_POINTER_IS_ARG_POINTER
#define HARD_FRAME_POINTER_IS_ARG_POINTER \
  (HARD_FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM)
#endif

3041 3042
/* Index labels for global_rtl.  */
enum global_rtl_index
3043
{
3044 3045 3046 3047 3048 3049 3050 3051
  GR_STACK_POINTER,
  GR_FRAME_POINTER,
/* For register elimination to work properly these hard_frame_pointer_rtx,
   frame_pointer_rtx, and arg_pointer_rtx must be the same if they refer to
   the same register.  */
#if FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM
  GR_ARG_POINTER = GR_FRAME_POINTER,
#endif
3052
#if HARD_FRAME_POINTER_IS_FRAME_POINTER
3053 3054 3055 3056 3057
  GR_HARD_FRAME_POINTER = GR_FRAME_POINTER,
#else
  GR_HARD_FRAME_POINTER,
#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3058
#if HARD_FRAME_POINTER_IS_ARG_POINTER
3059 3060 3061 3062 3063 3064 3065 3066 3067 3068
  GR_ARG_POINTER = GR_HARD_FRAME_POINTER,
#else
  GR_ARG_POINTER,
#endif
#endif
  GR_VIRTUAL_INCOMING_ARGS,
  GR_VIRTUAL_STACK_ARGS,
  GR_VIRTUAL_STACK_DYNAMIC,
  GR_VIRTUAL_OUTGOING_ARGS,
  GR_VIRTUAL_CFA,
3069
  GR_VIRTUAL_PREFERRED_STACK_BOUNDARY,
3070 3071 3072 3073

  GR_MAX
};

3074 3075 3076 3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105
/* Target-dependent globals.  */
struct GTY(()) target_rtl {
  /* All references to the hard registers in global_rtl_index go through
     these unique rtl objects.  On machines where the frame-pointer and
     arg-pointer are the same register, they use the same unique object.

     After register allocation, other rtl objects which used to be pseudo-regs
     may be clobbered to refer to the frame-pointer register.
     But references that were originally to the frame-pointer can be
     distinguished from the others because they contain frame_pointer_rtx.

     When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
     tricky: until register elimination has taken place hard_frame_pointer_rtx
     should be used if it is being set, and frame_pointer_rtx otherwise.  After
     register elimination hard_frame_pointer_rtx should always be used.
     On machines where the two registers are same (most) then these are the
     same.  */
  rtx x_global_rtl[GR_MAX];

  /* A unique representation of (REG:Pmode PIC_OFFSET_TABLE_REGNUM).  */
  rtx x_pic_offset_table_rtx;

  /* A unique representation of (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM).
     This is used to implement __builtin_return_address for some machines;
     see for instance the MIPS port.  */
  rtx x_return_address_pointer_rtx;

  /* Commonly used RTL for hard registers.  These objects are not
     necessarily unique, so we allocate them separately from global_rtl.
     They are initialized once per compilation unit, then copied into
     regno_reg_rtx at the beginning of each function.  */
  rtx x_initial_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3106 3107 3108

  /* A sample (mem:M stack_pointer_rtx) rtx for each mode M.  */
  rtx x_top_of_stack[MAX_MACHINE_MODE];
3109 3110 3111 3112

  /* Static hunks of RTL used by the aliasing code; these are treated
     as persistent to avoid unnecessary RTL allocations.  */
  rtx x_static_reg_base_value[FIRST_PSEUDO_REGISTER];
Richard Sandiford committed
3113 3114 3115

  /* The default memory attributes for each mode.  */
  struct mem_attrs *x_mode_mem_attrs[(int) MAX_MACHINE_MODE];
3116 3117 3118

  /* Track if RTL has been initialized.  */
  bool target_specific_initialized;
3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133
};

extern GTY(()) struct target_rtl default_target_rtl;
#if SWITCHABLE_TARGET
extern struct target_rtl *this_target_rtl;
#else
#define this_target_rtl (&default_target_rtl)
#endif

#define global_rtl				\
  (this_target_rtl->x_global_rtl)
#define pic_offset_table_rtx \
  (this_target_rtl->x_pic_offset_table_rtx)
#define return_address_pointer_rtx \
  (this_target_rtl->x_return_address_pointer_rtx)
3134 3135
#define top_of_stack \
  (this_target_rtl->x_top_of_stack)
Richard Sandiford committed
3136 3137
#define mode_mem_attrs \
  (this_target_rtl->x_mode_mem_attrs)
3138

Jim Wilson committed
3139 3140 3141
/* All references to certain hard regs, except those created
   by allocating pseudo regs into them (when that's possible),
   go through these unique rtx objects.  */
3142 3143 3144 3145
#define stack_pointer_rtx       (global_rtl[GR_STACK_POINTER])
#define frame_pointer_rtx       (global_rtl[GR_FRAME_POINTER])
#define hard_frame_pointer_rtx	(global_rtl[GR_HARD_FRAME_POINTER])
#define arg_pointer_rtx		(global_rtl[GR_ARG_POINTER])
3146

Richard Sandiford committed
3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160
#ifndef GENERATOR_FILE
/* Return the attributes of a MEM rtx.  */
static inline struct mem_attrs *
get_mem_attrs (const_rtx x)
{
  struct mem_attrs *attrs;

  attrs = MEM_ATTRS (x);
  if (!attrs)
    attrs = mode_mem_attrs[(int) GET_MODE (x)];
  return attrs;
}
#endif

3161 3162
/* Include the RTL generation functions.  */

3163
#ifndef GENERATOR_FILE
3164
#include "genrtl.h"
3165 3166 3167 3168 3169
#undef gen_rtx_ASM_INPUT
#define gen_rtx_ASM_INPUT(MODE, ARG0)				\
  gen_rtx_fmt_si (ASM_INPUT, (MODE), (ARG0), 0)
#define gen_rtx_ASM_INPUT_loc(MODE, ARG0, LOC)			\
  gen_rtx_fmt_si (ASM_INPUT, (MODE), (ARG0), (LOC))
3170 3171
#endif

3172 3173
/* There are some RTL codes that require special attention; the
   generation functions included above do the raw handling.  If you
3174
   add to this list, modify special_rtx in gengenrtl.c as well.  */
3175

3176
extern rtx_expr_list *gen_rtx_EXPR_LIST (enum machine_mode, rtx, rtx);
3177
extern rtx_insn_list *gen_rtx_INSN_LIST (enum machine_mode, rtx, rtx);
David Malcolm committed
3178 3179 3180 3181
extern rtx_insn *
gen_rtx_INSN (enum machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
	      basic_block bb, rtx pattern, int location, int code,
	      rtx reg_notes);
3182 3183 3184 3185 3186 3187
extern rtx gen_rtx_CONST_INT (enum machine_mode, HOST_WIDE_INT);
extern rtx gen_rtx_CONST_VECTOR (enum machine_mode, rtvec);
extern rtx gen_raw_REG (enum machine_mode, int);
extern rtx gen_rtx_REG (enum machine_mode, unsigned);
extern rtx gen_rtx_SUBREG (enum machine_mode, rtx, int);
extern rtx gen_rtx_MEM (enum machine_mode, rtx);
3188 3189
extern rtx gen_rtx_VAR_LOCATION (enum machine_mode, tree, rtx,
				 enum var_init_status);
3190

3191
#define GEN_INT(N)  gen_rtx_CONST_INT (VOIDmode, (N))
3192

Jim Wilson committed
3193 3194 3195 3196 3197 3198 3199 3200 3201 3202 3203 3204
/* Virtual registers are used during RTL generation to refer to locations into
   the stack frame when the actual location isn't known until RTL generation
   is complete.  The routine instantiate_virtual_regs replaces these with
   the proper value, which is normally {frame,arg,stack}_pointer_rtx plus
   a constant.  */

#define FIRST_VIRTUAL_REGISTER	(FIRST_PSEUDO_REGISTER)

/* This points to the first word of the incoming arguments passed on the stack,
   either by the caller or by the callee when pretending it was passed by the
   caller.  */

3205
#define virtual_incoming_args_rtx       (global_rtl[GR_VIRTUAL_INCOMING_ARGS])
Jim Wilson committed
3206 3207 3208

#define VIRTUAL_INCOMING_ARGS_REGNUM	(FIRST_VIRTUAL_REGISTER)

Richard Stallman committed
3209
/* If FRAME_GROWS_DOWNWARD, this points to immediately above the first
Jim Wilson committed
3210 3211 3212
   variable on the stack.  Otherwise, it points to the first variable on
   the stack.  */

3213
#define virtual_stack_vars_rtx	        (global_rtl[GR_VIRTUAL_STACK_ARGS])
Jim Wilson committed
3214 3215 3216 3217 3218 3219 3220

#define VIRTUAL_STACK_VARS_REGNUM	((FIRST_VIRTUAL_REGISTER) + 1)

/* This points to the location of dynamically-allocated memory on the stack
   immediately after the stack pointer has been adjusted by the amount
   desired.  */

3221
#define virtual_stack_dynamic_rtx	(global_rtl[GR_VIRTUAL_STACK_DYNAMIC])
Jim Wilson committed
3222 3223 3224 3225 3226 3227 3228

#define VIRTUAL_STACK_DYNAMIC_REGNUM	((FIRST_VIRTUAL_REGISTER) + 2)

/* This points to the location in the stack at which outgoing arguments should
   be written when the stack is pre-pushed (arguments pushed using push
   insns always use sp).  */

3229
#define virtual_outgoing_args_rtx	(global_rtl[GR_VIRTUAL_OUTGOING_ARGS])
Jim Wilson committed
3230 3231 3232

#define VIRTUAL_OUTGOING_ARGS_REGNUM	((FIRST_VIRTUAL_REGISTER) + 3)

3233
/* This points to the Canonical Frame Address of the function.  This
3234
   should correspond to the CFA produced by INCOMING_FRAME_SP_OFFSET,
3235
   but is calculated relative to the arg pointer for simplicity; the
3236
   frame pointer nor stack pointer are necessarily fixed relative to
3237 3238
   the CFA until after reload.  */

3239
#define virtual_cfa_rtx			(global_rtl[GR_VIRTUAL_CFA])
3240 3241 3242

#define VIRTUAL_CFA_REGNUM		((FIRST_VIRTUAL_REGISTER) + 4)

3243 3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254
#define LAST_VIRTUAL_POINTER_REGISTER	((FIRST_VIRTUAL_REGISTER) + 4)

/* This is replaced by crtl->preferred_stack_boundary / BITS_PER_UNIT
   when finalized.  */

#define virtual_preferred_stack_boundary_rtx \
	(global_rtl[GR_VIRTUAL_PREFERRED_STACK_BOUNDARY])

#define VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM \
					((FIRST_VIRTUAL_REGISTER) + 5)

#define LAST_VIRTUAL_REGISTER		((FIRST_VIRTUAL_REGISTER) + 5)
Jim Wilson committed
3255

3256
/* Nonzero if REGNUM is a pointer into the stack frame.  */
3257
#define REGNO_PTR_FRAME_P(REGNUM)		\
3258 3259 3260 3261 3262
  ((REGNUM) == STACK_POINTER_REGNUM		\
   || (REGNUM) == FRAME_POINTER_REGNUM		\
   || (REGNUM) == HARD_FRAME_POINTER_REGNUM	\
   || (REGNUM) == ARG_POINTER_REGNUM		\
   || ((REGNUM) >= FIRST_VIRTUAL_REGISTER	\
3263
       && (REGNUM) <= LAST_VIRTUAL_POINTER_REGISTER))
3264

3265
/* REGNUM never really appearing in the INSN stream.  */
Kazu Hirata committed
3266
#define INVALID_REGNUM			(~(unsigned int) 0)
3267

3268 3269 3270
/* REGNUM for which no debug information can be generated.  */
#define IGNORED_DWARF_REGNUM            (INVALID_REGNUM - 1)

3271
extern rtx output_constant_def (tree, int);
3272
extern rtx lookup_constant_def (tree);
Jim Wilson committed
3273 3274

/* Nonzero after end of reload pass.
3275
   Set to 1 or 0 by reload1.c.  */
Jim Wilson committed
3276 3277 3278

extern int reload_completed;

Stephen Clarke committed
3279 3280 3281
/* Nonzero after thread_prologue_and_epilogue_insns has run.  */
extern int epilogue_completed;

Jim Wilson committed
3282 3283 3284 3285 3286
/* Set to 1 while reload_as_needed is operating.
   Required by some machines to handle any generated moves differently.  */

extern int reload_in_progress;

3287 3288 3289
/* Set to 1 while in lra.  */
extern int lra_in_progress;

3290 3291 3292 3293 3294
/* This macro indicates whether you may create a new
   pseudo-register.  */

#define can_create_pseudo_p() (!reload_in_progress && !reload_completed)

3295 3296 3297 3298 3299 3300
#ifdef STACK_REGS
/* Nonzero after end of regstack pass.
   Set to 1 or 0 by reg-stack.c.  */
extern int regstack_completed;
#endif

Jim Wilson committed
3301 3302 3303 3304 3305 3306 3307 3308
/* If this is nonzero, we do not bother generating VOLATILE
   around volatile memory references, and we are willing to
   output indirect addresses.  If cse is to follow, we reject
   indirect addresses so a useful potential cse is generated;
   if it is used only once, instruction combination will produce
   the same indirect address eventually.  */
extern int cse_not_expected;

3309
/* Translates rtx code to tree code, for those codes needed by
3310 3311 3312
   REAL_ARITHMETIC.  The function returns an int because the caller may not
   know what `enum tree_code' means.  */

3313
extern int rtx_to_tree_code (enum rtx_code);
3314

3315
/* In cse.c */
3316
extern int delete_trivially_dead_insns (rtx_insn *, int);
3317 3318
extern int exp_equiv_p (const_rtx, const_rtx, int, bool);
extern unsigned hash_rtx (const_rtx x, enum machine_mode, int *, int *, bool);
3319

3320
/* In dse.c */
David Malcolm committed
3321
extern bool check_for_inc_dec (rtx_insn *insn);
3322

3323
/* In jump.c */
3324
extern int comparison_dominates_p (enum rtx_code, enum rtx_code);
3325 3326 3327 3328 3329 3330 3331 3332
extern bool jump_to_label_p (const rtx_insn *);
extern int condjump_p (const rtx_insn *);
extern int any_condjump_p (const rtx_insn *);
extern int any_uncondjump_p (const rtx_insn *);
extern rtx pc_set (const rtx_insn *);
extern rtx condjump_label (const rtx_insn *);
extern int simplejump_p (const rtx_insn *);
extern int returnjump_p (const rtx_insn *);
3333
extern int eh_returnjump_p (rtx_insn *);
3334
extern int onlyjump_p (const rtx_insn *);
3335 3336
extern int only_sets_cc0_p (const_rtx);
extern int sets_cc0_p (const_rtx);
3337 3338
extern int invert_jump_1 (rtx_insn *, rtx);
extern int invert_jump (rtx_insn *, rtx, int);
3339
extern int rtx_renumbered_equal_p (const_rtx, const_rtx);
3340 3341
extern int true_regnum (const_rtx);
extern unsigned int reg_or_subregno (const_rtx);
3342
extern int redirect_jump_1 (rtx, rtx);
3343
extern void redirect_jump_2 (rtx, rtx, rtx, int, int);
3344
extern int redirect_jump (rtx, rtx, int);
3345 3346
extern void rebuild_jump_labels (rtx_insn *);
extern void rebuild_jump_labels_chain (rtx_insn *);
3347 3348 3349 3350
extern rtx reversed_comparison (const_rtx, enum machine_mode);
extern enum rtx_code reversed_comparison_code (const_rtx, const_rtx);
extern enum rtx_code reversed_comparison_code_parts (enum rtx_code, const_rtx,
						     const_rtx, const_rtx);
3351
extern void delete_for_peephole (rtx_insn *, rtx_insn *);
3352
extern int condjump_in_parallel_p (const rtx_insn *);
3353

Kazu Hirata committed
3354
/* In emit-rtl.c.  */
3355 3356 3357
extern int max_reg_num (void);
extern int max_label_num (void);
extern int get_first_label_num (void);
3358
extern void maybe_set_first_label_num (rtx);
3359
extern void delete_insns_since (rtx_insn *);
3360 3361 3362
extern void mark_reg_pointer (rtx, int);
extern void mark_user_reg (rtx);
extern void reset_used_flags (rtx);
3363
extern void set_used_flags (rtx);
3364
extern void reorder_insns (rtx_insn *, rtx_insn *, rtx_insn *);
3365
extern void reorder_insns_nobb (rtx_insn *, rtx_insn *, rtx_insn *);
3366
extern int get_max_insn_count (void);
3367 3368
extern int in_sequence_p (void);
extern void init_emit (void);
3369
extern void init_emit_regs (void);
3370
extern void init_derived_machine_modes (void);
3371
extern void init_emit_once (void);
3372 3373
extern void push_topmost_sequence (void);
extern void pop_topmost_sequence (void);
3374
extern void set_new_first_and_last_insn (rtx_insn *, rtx_insn *);
3375
extern unsigned int unshare_all_rtl (void);
3376
extern void unshare_all_rtl_again (rtx_insn *);
3377
extern void unshare_all_rtl_in_chain (rtx_insn *);
3378
extern void verify_rtl_sharing (void);
3379
extern void add_insn (rtx_insn *);
3380 3381
extern void add_insn_before (rtx, rtx, basic_block);
extern void add_insn_after (rtx, rtx, basic_block);
3382
extern void remove_insn (rtx);
3383
extern rtx_insn *emit (rtx);
3384
extern void delete_insn (rtx);
3385
extern rtx_insn *entry_of_function (void);
3386
extern void emit_insn_at_entry (rtx);
3387
extern void delete_insn_chain (rtx, rtx, bool);
3388
extern rtx_insn *unlink_insn_chain (rtx_insn *, rtx_insn *);
3389
extern void delete_insn_and_edges (rtx_insn *);
3390
extern rtx gen_lowpart_SUBREG (enum machine_mode, rtx);
3391
extern rtx gen_const_mem (enum machine_mode, rtx);
3392 3393
extern rtx gen_frame_mem (enum machine_mode, rtx);
extern rtx gen_tmp_stack_mem (enum machine_mode, rtx);
3394
extern bool validate_subreg (enum machine_mode, enum machine_mode,
3395
			     const_rtx, unsigned int);
3396

3397
/* In combine.c  */
Uros Bizjak committed
3398
extern unsigned int extended_count (const_rtx, enum machine_mode, int);
3399
extern rtx remove_death (unsigned int, rtx_insn *);
3400 3401
extern void dump_combine_stats (FILE *);
extern void dump_combine_total_stats (FILE *);
3402
extern rtx make_compound_operation (rtx, enum rtx_code);
3403

3404 3405 3406
/* In cfgcleanup.c  */
extern void delete_dead_jumptables (void);

3407
/* In sched-rgn.c.  */
3408
extern void schedule_insns (void);
3409 3410

/* In sched-ebb.c.  */
3411
extern void schedule_ebbs (void);
3412

3413 3414 3415
/* In sel-sched-dump.c.  */
extern void sel_sched_fix_param (const char *param, const char *val);

3416
/* In print-rtl.c */
3417
extern const char *print_rtx_head;
3418 3419
extern void debug (const rtx_def &ref);
extern void debug (const rtx_def *ptr);
3420
extern void debug_rtx (const_rtx);
3421 3422 3423
extern void debug_rtx_list (const rtx_insn *, int);
extern void debug_rtx_range (const rtx_insn *, const rtx_insn *);
extern const_rtx debug_rtx_find (const rtx_insn *, int);
3424 3425 3426 3427
extern void print_mem_expr (FILE *, const_tree);
extern void print_rtl (FILE *, const_rtx);
extern void print_simple_rtl (FILE *, const_rtx);
extern int print_rtl_single (FILE *, const_rtx);
3428
extern int print_rtl_single_with_indent (FILE *, const_rtx, int);
3429
extern void print_inline_rtx (FILE *, const_rtx, int);
3430

3431
/* Functions in sched-vis.c.  FIXME: Ideally these functions would
3432 3433
   not be in sched-vis.c but in rtl.c, because they are not only used
   by the scheduler anymore but for all "slim" RTL dumping.  */
3434 3435
extern void dump_value_slim (FILE *, const_rtx, int);
extern void dump_insn_slim (FILE *, const_rtx);
3436 3437
extern void dump_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
			   int, int);
3438 3439 3440
extern void print_value (pretty_printer *, const_rtx, int);
extern void print_pattern (pretty_printer *, const_rtx, int);
extern void print_insn (pretty_printer *, const_rtx, int);
3441
extern void rtl_dump_bb_for_graph (pretty_printer *, basic_block);
3442
extern const char *str_pattern_slim (const_rtx);
3443

3444
/* In function.c */
3445
extern void reposition_prologue_and_epilogue_notes (void);
3446 3447
extern int prologue_epilogue_contains (const_rtx);
extern int sibcall_epilogue_contains (const_rtx);
3448
extern void update_temp_slot_address (rtx, rtx);
3449
extern void maybe_copy_prologue_epilogue_insn (rtx, rtx);
3450
extern void set_return_jump_label (rtx);
3451 3452

/* In stmt.c */
3453
extern void expand_null_return (void);
3454
extern void expand_naked_return (void);
3455
extern void emit_jump (rtx);
3456 3457

/* In expr.c */
3458 3459
extern rtx move_by_pieces (rtx, rtx, unsigned HOST_WIDE_INT,
			   unsigned int, int);
David Malcolm committed
3460
extern HOST_WIDE_INT find_args_size_adjust (rtx_insn *);
3461
extern int fixup_args_size_notes (rtx_insn *, rtx_insn *, int);
3462

3463
/* In cfgrtl.c */
3464
extern void print_rtl_with_bb (FILE *, const rtx_insn *, int);
3465
extern rtx_insn *duplicate_insn_chain (rtx_insn *, rtx_insn *);
3466

3467
/* In expmed.c */
3468 3469 3470
extern void init_expmed (void);
extern void expand_inc (rtx, rtx);
extern void expand_dec (rtx, rtx);
3471

3472 3473 3474
/* In lower-subreg.c */
extern void init_lower_subreg (void);

Kaveh R. Ghazi committed
3475
/* In gcse.c */
3476
extern bool can_copy_p (enum machine_mode);
3477
extern bool can_assign_to_reg_without_clobbers_p (rtx);
3478
extern rtx fis_get_condition (rtx_insn *);
3479

3480
/* In ira.c */
Vladimir Makarov committed
3481 3482 3483
#ifdef HARD_CONST
extern HARD_REG_SET eliminable_regset;
#endif
3484
extern void mark_elimination (int, int);
3485

3486
/* In reginfo.c */
3487
extern int reg_classes_intersect_p (reg_class_t, reg_class_t);
3488
extern int reg_class_subset_p (reg_class_t, reg_class_t);
3489
extern void globalize_reg (tree, int);
3490
extern void init_reg_modes_target (void);
3491
extern void init_regs (void);
3492
extern void reinit_regs (void);
3493
extern void init_fake_stack_mems (void);
3494
extern void save_register_info (void);
3495
extern void init_reg_sets (void);
3496
extern void regclass (rtx, int);
3497
extern void reg_scan (rtx_insn *, unsigned int);
3498
extern void fix_register (const char *, int, int);
3499
extern bool invalid_mode_change_p (unsigned int, enum reg_class);
3500

3501
/* In reload1.c */
3502
extern int function_invariant_p (const_rtx);
3503

3504
/* In calls.c */
3505 3506 3507 3508 3509
enum libcall_type
{
  LCT_NORMAL = 0,
  LCT_CONST = 1,
  LCT_PURE = 2,
3510 3511 3512
  LCT_NORETURN = 3,
  LCT_THROW = 4,
  LCT_RETURNS_TWICE = 5
3513 3514
};

3515 3516 3517 3518
extern void emit_library_call (rtx, enum libcall_type, enum machine_mode, int,
			       ...);
extern rtx emit_library_call_value (rtx, rtx, enum libcall_type,
				    enum machine_mode, int, ...);
3519 3520

/* In varasm.c */
3521
extern void init_varasm_once (void);
H.J. Lu committed
3522

3523 3524
extern rtx make_debug_expr_from_rtl (const_rtx);

3525
/* In read-rtl.c */
3526
extern bool read_rtx (const char *, rtx *);
3527

3528
/* In alias.c */
3529
extern rtx canon_rtx (rtx);
3530
extern int true_dependence (const_rtx, enum machine_mode, const_rtx);
3531
extern rtx get_addr (rtx);
3532 3533
extern int canon_true_dependence (const_rtx, enum machine_mode, rtx,
				  const_rtx, rtx);
3534 3535
extern int read_dependence (const_rtx, const_rtx);
extern int anti_dependence (const_rtx, const_rtx);
3536 3537
extern int canon_anti_dependence (const_rtx, bool,
				  const_rtx, enum machine_mode, rtx);
3538
extern int output_dependence (const_rtx, const_rtx);
3539
extern int may_alias_p (const_rtx, const_rtx);
3540
extern void init_alias_target (void);
3541 3542
extern void init_alias_analysis (void);
extern void end_alias_analysis (void);
3543
extern void vt_equate_reg_base_value (const_rtx, const_rtx);
3544
extern bool memory_modified_in_insn_p (const_rtx, const_rtx);
3545
extern bool memory_must_be_modified_in_insn_p (const_rtx, const_rtx);
3546
extern bool may_be_sp_based_p (rtx);
3547
extern rtx gen_hard_reg_clobber (enum machine_mode, unsigned int);
3548 3549
extern rtx get_reg_known_value (unsigned int);
extern bool get_reg_known_equiv_p (unsigned int);
3550
extern rtx get_reg_base_value (unsigned int);
3551

3552
#ifdef STACK_REGS
3553
extern int stack_regs_mentioned (const_rtx insn);
3554 3555
#endif

3556
/* In toplev.c */
3557
extern GTY(()) rtx stack_limit_rtx;
3558

Jan Hubicka committed
3559
/* In predict.c */
3560 3561
extern void invert_br_probabilities (rtx);
extern bool expensive_function_p (int);
3562

3563
/* In var-tracking.c */
3564
extern unsigned int variable_tracking_main (void);
3565

Zdenek Dvorak committed
3566
/* In stor-layout.c.  */
3567 3568
extern void get_mode_bounds (enum machine_mode, int, enum machine_mode,
			     rtx *, rtx *);
Zdenek Dvorak committed
3569 3570 3571

/* In loop-iv.c  */
extern rtx canon_condition (rtx);
3572
extern void simplify_using_condition (rtx, rtx *, bitmap);
3573 3574 3575

/* In final.c  */
extern unsigned int compute_alignments (void);
3576
extern void update_alignments (vec<rtx> &);
3577
extern int asm_str_count (const char *templ);
3578 3579 3580 3581

struct rtl_hooks
{
  rtx (*gen_lowpart) (enum machine_mode, rtx);
3582
  rtx (*gen_lowpart_no_emit) (enum machine_mode, rtx);
3583
  rtx (*reg_nonzero_bits) (const_rtx, enum machine_mode, const_rtx, enum machine_mode,
3584
			   unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT *);
3585
  rtx (*reg_num_sign_bit_copies) (const_rtx, enum machine_mode, const_rtx, enum machine_mode,
3586
				  unsigned int, unsigned int *);
3587
  bool (*reg_truncated_to_mode) (enum machine_mode, const_rtx);
3588

3589
  /* Whenever you add entries here, make sure you adjust rtlhooks-def.h.  */
3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600
};

/* Each pass can provide its own.  */
extern struct rtl_hooks rtl_hooks;

/* ... but then it has to restore these.  */
extern const struct rtl_hooks general_rtl_hooks;

/* Keep this for the nonce.  */
#define gen_lowpart rtl_hooks.gen_lowpart

3601 3602 3603 3604
extern void insn_locations_init (void);
extern void insn_locations_finalize (void);
extern void set_curr_insn_location (location_t);
extern location_t curr_insn_location (void);
3605 3606
extern bool optimize_insn_for_size_p (void);
extern bool optimize_insn_for_speed_p (void);
3607

3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618
/* rtl-error.c */
extern void _fatal_insn_not_found (const_rtx, const char *, int, const char *)
     ATTRIBUTE_NORETURN;
extern void _fatal_insn (const char *, const_rtx, const char *, int, const char *)
     ATTRIBUTE_NORETURN;

#define fatal_insn(msgid, insn) \
	_fatal_insn (msgid, insn, __FILE__, __LINE__, __FUNCTION__)
#define fatal_insn_not_found(insn) \
	_fatal_insn_not_found (insn, __FILE__, __LINE__, __FUNCTION__)

3619 3620
/* reginfo.c */
extern tree GTY(()) global_regs_decl[FIRST_PSEUDO_REGISTER];
3621 3622


3623
#endif /* ! GCC_RTL_H */