recog.c 98.8 KB
Newer Older
Richard Kenner committed
1
/* Subroutines used by or related to instruction recognition.
Jeff Law committed
2
   Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4
   Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
21 22 23


#include "config.h"
24
#include "system.h"
25 26
#include "coretypes.h"
#include "tm.h"
27
#include "rtl.h"
28
#include "tm_p.h"
Richard Kenner committed
29 30
#include "insn-config.h"
#include "insn-attr.h"
31
#include "hard-reg-set.h"
Richard Kenner committed
32 33
#include "recog.h"
#include "regs.h"
34
#include "addresses.h"
Richard Kenner committed
35
#include "expr.h"
36
#include "function.h"
Richard Kenner committed
37 38
#include "flags.h"
#include "real.h"
Kaveh R. Ghazi committed
39
#include "toplev.h"
40
#include "basic-block.h"
41
#include "output.h"
42
#include "reload.h"
43
#include "target.h"
44 45
#include "timevar.h"
#include "tree-pass.h"
46
#include "df.h"
Richard Kenner committed
47 48 49 50 51 52 53 54 55

#ifndef STACK_PUSH_CODE
#ifdef STACK_GROWS_DOWNWARD
#define STACK_PUSH_CODE PRE_DEC
#else
#define STACK_PUSH_CODE PRE_INC
#endif
#endif

56 57 58 59 60 61 62 63
#ifndef STACK_POP_CODE
#ifdef STACK_GROWS_DOWNWARD
#define STACK_POP_CODE POST_INC
#else
#define STACK_POP_CODE POST_DEC
#endif
#endif

64 65 66 67 68 69 70 71
#ifndef HAVE_ATTR_enabled
static inline bool
get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
{
  return true;
}
#endif

72
static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 74
static void validate_replace_src_1 (rtx *, void *);
static rtx split_insn (rtx);
Richard Kenner committed
75 76 77 78 79

/* Nonzero means allow operands to be volatile.
   This should be 0 if you are generating rtl, such as if you are calling
   the functions in optabs.c and expmed.c (most of the time).
   This should be 1 if all valid insns need to be recognized,
80
   such as in reginfo.c and final.c and reload.c.
Richard Kenner committed
81 82 83 84 85

   init_recog and init_recog_no_volatile are responsible for setting this.  */

int volatile_ok;

86
struct recog_data recog_data;
87

88 89 90 91
/* Contains a vector of operand_alternative structures for every operand.
   Set up by preprocess_constraints.  */
struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];

Richard Kenner committed
92 93 94 95 96 97 98 99 100 101 102
/* On return from `constrain_operands', indicate which alternative
   was satisfied.  */

int which_alternative;

/* Nonzero after end of reload pass.
   Set to 1 or 0 by toplev.c.
   Controls the significance of (SUBREG (MEM)).  */

int reload_completed;

Stephen Clarke committed
103 104 105
/* Nonzero after thread_prologue_and_epilogue_insns has run.  */
int epilogue_completed;

Richard Kenner committed
106 107 108 109 110
/* Initialize data used by the function `recog'.
   This must be called once in the compilation of a function
   before any insn recognition may be done in the function.  */

void
111
init_recog_no_volatile (void)
Richard Kenner committed
112 113 114 115
{
  volatile_ok = 0;
}

116
void
117
init_recog (void)
Richard Kenner committed
118 119 120 121 122 123 124 125 126
{
  volatile_ok = 1;
}


/* Check that X is an insn-body for an `asm' with operands
   and that the operands mentioned in it are legitimate.  */

int
127
check_asm_operands (rtx x)
Richard Kenner committed
128
{
129
  int noperands;
Richard Kenner committed
130
  rtx *operands;
131
  const char **constraints;
Richard Kenner committed
132 133
  int i;

134 135 136 137 138 139 140 141 142 143
  /* Post-reload, be more strict with things.  */
  if (reload_completed)
    {
      /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
      extract_insn (make_insn_raw (x));
      constrain_operands (1);
      return which_alternative >= 0;
    }

  noperands = asm_noperands (x);
Richard Kenner committed
144 145 146 147 148
  if (noperands < 0)
    return 0;
  if (noperands == 0)
    return 1;

149 150
  operands = XALLOCAVEC (rtx, noperands);
  constraints = XALLOCAVEC (const char *, noperands);
151

152
  decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
Richard Kenner committed
153 154

  for (i = 0; i < noperands; i++)
155
    {
156
      const char *c = constraints[i];
157 158
      if (c[0] == '%')
	c++;
159
      if (! asm_operand_ok (operands[i], c, constraints))
160
	return 0;
161
    }
Richard Kenner committed
162 163 164 165

  return 1;
}

166
/* Static data for the next two routines.  */
Richard Kenner committed
167

168 169 170 171 172 173
typedef struct change_t
{
  rtx object;
  int old_code;
  rtx *loc;
  rtx old;
174
  bool unshare;
175
} change_t;
Richard Kenner committed
176

177 178
static change_t *changes;
static int changes_allocated;
Richard Kenner committed
179 180 181

static int num_changes = 0;

182
/* Validate a proposed change to OBJECT.  LOC is the location in the rtl
183
   at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
Richard Kenner committed
184 185 186 187 188 189 190
   the change is simply made.

   Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
   will be called with the address and mode as parameters.  If OBJECT is
   an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
   the change in place.

191
   IN_GROUP is nonzero if this is part of a group of changes that must be
Richard Kenner committed
192 193 194 195 196 197 198 199
   performed as a group.  In that case, the changes will be stored.  The
   function `apply_change_group' will validate and apply the changes.

   If IN_GROUP is zero, this is a single change.  Try to recognize the insn
   or validate the memory reference with the change applied.  If the result
   is not valid for the machine, suppress the change and return zero.
   Otherwise, perform the change and return 1.  */

200
static bool
201
validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
Richard Kenner committed
202 203 204
{
  rtx old = *loc;

205
  if (old == new_rtx || rtx_equal_p (old, new_rtx))
Richard Kenner committed
206 207
    return 1;

208
  gcc_assert (in_group != 0 || num_changes == 0);
Richard Kenner committed
209

210
  *loc = new_rtx;
Richard Kenner committed
211 212

  /* Save the information describing this change.  */
213 214 215 216 217 218 219 220 221
  if (num_changes >= changes_allocated)
    {
      if (changes_allocated == 0)
	/* This value allows for repeated substitutions inside complex
	   indexed addresses, or changes in up to 5 insns.  */
	changes_allocated = MAX_RECOG_OPERANDS * 5;
      else
	changes_allocated *= 2;

222
      changes = XRESIZEVEC (change_t, changes, changes_allocated);
223
    }
224

225 226 227
  changes[num_changes].object = object;
  changes[num_changes].loc = loc;
  changes[num_changes].old = old;
228
  changes[num_changes].unshare = unshare;
Richard Kenner committed
229

230
  if (object && !MEM_P (object))
Richard Kenner committed
231 232 233
    {
      /* Set INSN_CODE to force rerecognition of insn.  Save old code in
	 case invalid.  */
234
      changes[num_changes].old_code = INSN_CODE (object);
Richard Kenner committed
235 236 237 238 239 240 241 242 243 244 245 246 247 248
      INSN_CODE (object) = -1;
    }

  num_changes++;

  /* If we are making a group of changes, return 1.  Otherwise, validate the
     change group we made.  */

  if (in_group)
    return 1;
  else
    return apply_change_group ();
}

249 250 251 252
/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
   UNSHARE to false.  */

bool
253
validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
254
{
255
  return validate_change_1 (object, loc, new_rtx, in_group, false);
256 257 258 259 260 261
}

/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
   UNSHARE to true.  */

bool
262
validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
263
{
264
  return validate_change_1 (object, loc, new_rtx, in_group, true);
265 266 267
}


Paolo Bonzini committed
268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289
/* Keep X canonicalized if some changes have made it non-canonical; only
   modifies the operands of X, not (for example) its code.  Simplifications
   are not the job of this routine.

   Return true if anything was changed.  */
bool
canonicalize_change_group (rtx insn, rtx x)
{
  if (COMMUTATIVE_P (x)
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
    {
      /* Oops, the caller has made X no longer canonical.
	 Let's redo the changes in the correct order.  */
      rtx tem = XEXP (x, 0);
      validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
      validate_change (insn, &XEXP (x, 1), tem, 1);
      return true;
    }
  else
    return false;
}
  
290

291 292 293
/* This subroutine of apply_change_group verifies whether the changes to INSN
   were valid; i.e. whether INSN can still be recognized.  */

294
int
295
insn_invalid_p (rtx insn)
296
{
297 298 299 300 301 302 303
  rtx pat = PATTERN (insn);
  int num_clobbers = 0;
  /* If we are before reload and the pattern is a SET, see if we can add
     clobbers.  */
  int icode = recog (pat, insn,
		     (GET_CODE (pat) == SET
		      && ! reload_completed && ! reload_in_progress)
304
		     ? &num_clobbers : 0);
305 306
  int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;

307

308 309 310 311
  /* If this is an asm and the operand aren't legal, then fail.  Likewise if
     this is not an asm and the insn wasn't recognized.  */
  if ((is_asm && ! check_asm_operands (PATTERN (insn)))
      || (!is_asm && icode < 0))
312 313
    return 1;

314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329
  /* If we have to add CLOBBERs, fail if we have to add ones that reference
     hard registers since our callers can't know if they are live or not.
     Otherwise, add them.  */
  if (num_clobbers > 0)
    {
      rtx newpat;

      if (added_clobbers_hard_reg_p (icode))
	return 1;

      newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
      XVECEXP (newpat, 0, 0) = pat;
      add_clobbers (newpat, icode);
      PATTERN (insn) = pat = newpat;
    }

330 331 332
  /* After reload, verify that all constraints are satisfied.  */
  if (reload_completed)
    {
333
      extract_insn (insn);
334

335
      if (! constrain_operands (1))
336 337 338
	return 1;
    }

339
  INSN_CODE (insn) = icode;
340 341 342
  return 0;
}

343 344
/* Return number of changes made and not validated yet.  */
int
345
num_changes_pending (void)
346 347 348 349
{
  return num_changes;
}

350
/* Tentatively apply the changes numbered NUM and up.
Richard Kenner committed
351 352
   Return 1 if all changes are valid, zero otherwise.  */

353
int
354
verify_changes (int num)
Richard Kenner committed
355 356
{
  int i;
357
  rtx last_validated = NULL_RTX;
Richard Kenner committed
358 359 360 361 362 363 364 365

  /* The changes have been applied and all INSN_CODEs have been reset to force
     rerecognition.

     The changes are valid if we aren't given an object, or if we are
     given a MEM and it still is a valid address, or if this is in insn
     and it is recognized.  In the latter case, if reload has completed,
     we also require that the operands meet the constraints for
366
     the insn.  */
Richard Kenner committed
367

368
  for (i = num; i < num_changes; i++)
Richard Kenner committed
369
    {
370
      rtx object = changes[i].object;
Richard Kenner committed
371

372
      /* If there is no object to test or if it is the same as the one we
373 374
         already tested, ignore it.  */
      if (object == 0 || object == last_validated)
Richard Kenner committed
375 376
	continue;

377
      if (MEM_P (object))
Richard Kenner committed
378 379 380 381
	{
	  if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
	    break;
	}
382 383 384 385 386 387 388 389 390 391
      else if (REG_P (changes[i].old)
	       && asm_noperands (PATTERN (object)) > 0
	       && REG_EXPR (changes[i].old) != NULL_TREE
	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
	{
	  /* Don't allow changes of hard register operands to inline
	     assemblies if they have been defined as register asm ("x").  */
	  break;
	}
392
      else if (insn_invalid_p (object))
Richard Kenner committed
393 394 395 396 397 398 399 400 401 402 403 404
	{
	  rtx pat = PATTERN (object);

	  /* Perhaps we couldn't recognize the insn because there were
	     extra CLOBBERs at the end.  If so, try to re-recognize
	     without the last CLOBBER (later iterations will cause each of
	     them to be eliminated, in turn).  But don't do this if we
	     have an ASM_OPERAND.  */
	  if (GET_CODE (pat) == PARALLEL
	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
	      && asm_noperands (PATTERN (object)) < 0)
	    {
405 406 407 408 409 410 411 412 413
	      rtx newpat;

	      if (XVECLEN (pat, 0) == 2)
		newpat = XVECEXP (pat, 0, 0);
	      else
		{
		  int j;

		  newpat
414
		    = gen_rtx_PARALLEL (VOIDmode,
415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431
					rtvec_alloc (XVECLEN (pat, 0) - 1));
		  for (j = 0; j < XVECLEN (newpat, 0); j++)
		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
		}

	      /* Add a new change to this group to replace the pattern
		 with this new pattern.  Then consider this change
		 as having succeeded.  The change we added will
		 cause the entire call to fail if things remain invalid.

		 Note that this can lose if a later change than the one
		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
		 but this shouldn't occur.  */

	      validate_change (object, &PATTERN (object), newpat, 1);
	      continue;
	    }
Richard Kenner committed
432 433 434 435 436 437 438
	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
	    /* If this insn is a CLOBBER or USE, it is always valid, but is
	       never recognized.  */
	    continue;
	  else
	    break;
	}
439
      last_validated = object;
Richard Kenner committed
440 441
    }

442 443 444
  return (i == num_changes);
}

445 446 447
/* A group of changes has previously been issued with validate_change
   and verified with verify_changes.  Call df_insn_rescan for each of
   the insn changed and clear num_changes.  */
448 449 450 451 452

void
confirm_change_group (void)
{
  int i;
453
  rtx last_object = NULL;
454

455
  for (i = 0; i < num_changes; i++)
456 457
    {
      rtx object = changes[i].object;
458 459 460 461

      if (changes[i].unshare)
	*changes[i].loc = copy_rtx (*changes[i].loc);

462
      /* Avoid unnecessary rescanning when multiple changes to same instruction
463 464 465 466 467 468 469
         are made.  */
      if (object)
	{
	  if (object != last_object && last_object && INSN_P (last_object))
	    df_insn_rescan (last_object);
	  last_object = object;
	}
470
    }
471

472 473
  if (last_object && INSN_P (last_object))
    df_insn_rescan (last_object);
474 475 476 477 478 479
  num_changes = 0;
}

/* Apply a group of changes previously issued with `validate_change'.
   If all changes are valid, call confirm_change_group and return 1,
   otherwise, call cancel_changes and return 0.  */
480

481 482 483 484 485 486
int
apply_change_group (void)
{
  if (verify_changes (0))
    {
      confirm_change_group ();
Richard Kenner committed
487 488 489 490 491 492 493 494 495
      return 1;
    }
  else
    {
      cancel_changes (0);
      return 0;
    }
}

496

Kazu Hirata committed
497
/* Return the number of changes so far in the current group.  */
Richard Kenner committed
498 499

int
500
num_validated_changes (void)
Richard Kenner committed
501 502 503 504 505 506 507
{
  return num_changes;
}

/* Retract the changes numbered NUM and up.  */

void
508
cancel_changes (int num)
Richard Kenner committed
509 510 511 512 513 514 515
{
  int i;

  /* Back out all the changes.  Do this in the opposite order in which
     they were made.  */
  for (i = num_changes - 1; i >= num; i--)
    {
516
      *changes[i].loc = changes[i].old;
517
      if (changes[i].object && !MEM_P (changes[i].object))
518
	INSN_CODE (changes[i].object) = changes[i].old_code;
Richard Kenner committed
519 520 521 522
    }
  num_changes = num;
}

523 524
/* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
   rtx.  */
Richard Kenner committed
525 526

static void
527 528
simplify_while_replacing (rtx *loc, rtx to, rtx object, 
                          enum machine_mode op0_mode)
Richard Kenner committed
529
{
530
  rtx x = *loc;
531
  enum rtx_code code = GET_CODE (x);
532
  rtx new_rtx;
Richard Kenner committed
533

534
  if (SWAPPABLE_OPERANDS_P (x)
535 536
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
    {
537 538 539 540 541
      validate_unshare_change (object, loc,
			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
					       : swap_condition (code),
					       GET_MODE (x), XEXP (x, 1),
					       XEXP (x, 0)), 1);
542 543 544
      x = *loc;
      code = GET_CODE (x);
    }
545

Richard Kenner committed
546 547 548
  switch (code)
    {
    case PLUS:
Jeff Law committed
549
      /* If we have a PLUS whose second operand is now a CONST_INT, use
550
         simplify_gen_binary to try to simplify it.
551 552
         ??? We may want later to remove this, once simplification is
         separated from this function.  */
Shujing Zhao committed
553
      if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
554
	validate_change (object, loc,
555 556
			 simplify_gen_binary
			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
557
      break;
558
    case MINUS:
Shujing Zhao committed
559
      if (CONST_INT_P (XEXP (x, 1))
560 561 562 563 564
	  || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
	validate_change (object, loc,
			 simplify_gen_binary
			 (PLUS, GET_MODE (x), XEXP (x, 0),
			  simplify_gen_unary (NEG,
565 566
					      GET_MODE (x), XEXP (x, 1),
					      GET_MODE (x))), 1);
567
      break;
Richard Kenner committed
568 569
    case ZERO_EXTEND:
    case SIGN_EXTEND:
570
      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
Richard Kenner committed
571
	{
572
	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
573
				    op0_mode);
574 575
	  /* If any of the above failed, substitute in something that
	     we know won't be recognized.  */
576 577 578
	  if (!new_rtx)
	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
	  validate_change (object, loc, new_rtx, 1);
Richard Kenner committed
579 580 581
	}
      break;
    case SUBREG:
582
      /* All subregs possible to simplify should be simplified.  */
583
      new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
584 585
			     SUBREG_BYTE (x));

586
      /* Subregs of VOIDmode operands are incorrect.  */
587 588 589 590
      if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
      if (new_rtx)
	validate_change (object, loc, new_rtx, 1);
Richard Kenner committed
591 592 593 594
      break;
    case ZERO_EXTRACT:
    case SIGN_EXTRACT:
      /* If we are replacing a register with memory, try to change the memory
595 596 597
         to be the mode required for memory in extract operations (this isn't
         likely to be an insertion operation; if it was, nothing bad will
         happen, we might just fail in some cases).  */
Richard Kenner committed
598

599
      if (MEM_P (XEXP (x, 0))
Shujing Zhao committed
600 601
	  && CONST_INT_P (XEXP (x, 1))
	  && CONST_INT_P (XEXP (x, 2))
602 603
	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
	  && !MEM_VOLATILE_P (XEXP (x, 0)))
Richard Kenner committed
604 605
	{
	  enum machine_mode wanted_mode = VOIDmode;
606
	  enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
Richard Kenner committed
607 608
	  int pos = INTVAL (XEXP (x, 2));

609
	  if (GET_CODE (x) == ZERO_EXTRACT)
610
	    {
611 612 613 614
	      enum machine_mode new_mode
		= mode_for_extraction (EP_extzv, 1);
	      if (new_mode != MAX_MACHINE_MODE)
		wanted_mode = new_mode;
615
	    }
616
	  else if (GET_CODE (x) == SIGN_EXTRACT)
617
	    {
618 619 620 621
	      enum machine_mode new_mode
		= mode_for_extraction (EP_extv, 1);
	      if (new_mode != MAX_MACHINE_MODE)
		wanted_mode = new_mode;
622
	    }
Richard Kenner committed
623

624
	  /* If we have a narrower mode, we can do something.  */
Richard Kenner committed
625 626 627 628 629 630
	  if (wanted_mode != VOIDmode
	      && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
	    {
	      int offset = pos / BITS_PER_UNIT;
	      rtx newmem;

631
	      /* If the bytes and bits are counted differently, we
632
	         must adjust the offset.  */
633
	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
634 635 636
		offset =
		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
		   offset);
Richard Kenner committed
637 638 639

	      pos %= GET_MODE_BITSIZE (wanted_mode);

Richard Kenner committed
640
	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
Richard Kenner committed
641

Charles Hannum committed
642
	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
Richard Kenner committed
643 644 645 646 647
	      validate_change (object, &XEXP (x, 0), newmem, 1);
	    }
	}

      break;
648

649 650
    default:
      break;
Richard Kenner committed
651 652 653
    }
}

654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740
/* Replace every occurrence of FROM in X with TO.  Mark each change with
   validate_change passing OBJECT.  */

static void
validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, 
                        bool simplify)
{
  int i, j;
  const char *fmt;
  rtx x = *loc;
  enum rtx_code code;
  enum machine_mode op0_mode = VOIDmode;
  int prev_changes = num_changes;

  if (!x)
    return;

  code = GET_CODE (x);
  fmt = GET_RTX_FORMAT (code);
  if (fmt[0] == 'e')
    op0_mode = GET_MODE (XEXP (x, 0));

  /* X matches FROM if it is the same rtx or they are both referring to the
     same register in the same mode.  Avoid calling rtx_equal_p unless the
     operands look similar.  */

  if (x == from
      || (REG_P (x) && REG_P (from)
	  && GET_MODE (x) == GET_MODE (from)
	  && REGNO (x) == REGNO (from))
      || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
	  && rtx_equal_p (x, from)))
    {
      validate_unshare_change (object, loc, to, 1);
      return;
    }

  /* Call ourself recursively to perform the replacements.
     We must not replace inside already replaced expression, otherwise we
     get infinite recursion for replacements like (reg X)->(subreg (reg X))
     done by regmove, so we must special case shared ASM_OPERANDS.  */

  if (GET_CODE (x) == PARALLEL)
    {
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
	{
	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
	    {
	      /* Verify that operands are really shared.  */
	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
							      (x, 0, j))));
	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
				      from, to, object, simplify);
	    }
	  else
	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object, 
                                    simplify);
	}
    }
  else
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
      {
	if (fmt[i] == 'e')
	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
	else if (fmt[i] == 'E')
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object, 
                                    simplify);
      }

  /* If we didn't substitute, there is nothing more to do.  */
  if (num_changes == prev_changes)
    return;

  /* Allow substituted expression to have different mode.  This is used by
     regmove to change mode of pseudo register.  */
  if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
    op0_mode = GET_MODE (XEXP (x, 0));

  /* Do changes needed to keep rtx consistent.  Don't do any other
     simplifications, as it is not our job.  */
  if (simplify)
    simplify_while_replacing (loc, to, object, op0_mode);
}

741 742 743 744 745 746 747 748 749 750 751
/* Try replacing every occurrence of FROM in subexpression LOC of INSN
   with TO.  After all changes have been made, validate by seeing
   if INSN is still valid.  */

int
validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
{
  validate_replace_rtx_1 (loc, from, to, insn, true);
  return apply_change_group ();
}

Richard Kenner committed
752 753 754 755
/* Try replacing every occurrence of FROM in INSN with TO.  After all
   changes have been made, validate by seeing if INSN is still valid.  */

int
756
validate_replace_rtx (rtx from, rtx to, rtx insn)
Richard Kenner committed
757
{
758
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
Richard Kenner committed
759 760
  return apply_change_group ();
}
761

762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784
/* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
   is a part of INSN.  After all changes have been made, validate by seeing if 
   INSN is still valid.  
   validate_replace_rtx (from, to, insn) is equivalent to 
   validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */

int
validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
{
  validate_replace_rtx_1 (where, from, to, insn, true);
  return apply_change_group ();
}

/* Same as above, but do not simplify rtx afterwards.  */
int 
validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, 
                                      rtx insn)
{
  validate_replace_rtx_1 (where, from, to, insn, false);
  return apply_change_group ();

}

785
/* Try replacing every occurrence of FROM in INSN with TO.  */
786 787

void
788
validate_replace_rtx_group (rtx from, rtx to, rtx insn)
789
{
790
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
791 792
}

793 794
/* Function called by note_uses to replace used subexpressions.  */
struct validate_replace_src_data
795 796 797 798 799
{
  rtx from;			/* Old RTX */
  rtx to;			/* New RTX */
  rtx insn;			/* Insn in which substitution is occurring.  */
};
800 801

static void
802
validate_replace_src_1 (rtx *x, void *data)
803 804 805 806
{
  struct validate_replace_src_data *d
    = (struct validate_replace_src_data *) data;

807
  validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
808 809
}

810
/* Try replacing every occurrence of FROM in INSN with TO, avoiding
811
   SET_DESTs.  */
812

813
void
814
validate_replace_src_group (rtx from, rtx to, rtx insn)
815
{
816
  struct validate_replace_src_data d;
817

818 819 820 821
  d.from = from;
  d.to = to;
  d.insn = insn;
  note_uses (&PATTERN (insn), validate_replace_src_1, &d);
822
}
Razya Ladelsky committed
823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862

/* Try simplify INSN.
   Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
   pattern and return true if something was simplified.  */

bool
validate_simplify_insn (rtx insn)
{
  int i;
  rtx pat = NULL;
  rtx newpat = NULL;

  pat = PATTERN (insn);

  if (GET_CODE (pat) == SET)
    {
      newpat = simplify_rtx (SET_SRC (pat));
      if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
	validate_change (insn, &SET_SRC (pat), newpat, 1);
      newpat = simplify_rtx (SET_DEST (pat));
      if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
	validate_change (insn, &SET_DEST (pat), newpat, 1);
    }
  else if (GET_CODE (pat) == PARALLEL)
    for (i = 0; i < XVECLEN (pat, 0); i++)
      {
	rtx s = XVECEXP (pat, 0, i);

	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
	  {
	    newpat = simplify_rtx (SET_SRC (s));
	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
	      validate_change (insn, &SET_SRC (s), newpat, 1);
	    newpat = simplify_rtx (SET_DEST (s));
	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
	      validate_change (insn, &SET_DEST (s), newpat, 1);
	  }
      }
  return ((num_changes_pending () > 0) && (apply_change_group () > 0));
}
Richard Kenner committed
863 864 865 866 867 868 869

#ifdef HAVE_cc0
/* Return 1 if the insn using CC0 set by INSN does not contain
   any ordered tests applied to the condition codes.
   EQ and NE tests do not count.  */

int
870
next_insn_tests_no_inequality (rtx insn)
Richard Kenner committed
871
{
872
  rtx next = next_cc0_user (insn);
Richard Kenner committed
873 874 875 876 877

  /* If there is no next insn, we have to take the conservative choice.  */
  if (next == 0)
    return 0;

878
  return (INSN_P (next)
Richard Kenner committed
879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896
	  && ! inequality_comparisons_p (PATTERN (next)));
}
#endif

/* Return 1 if OP is a valid general operand for machine mode MODE.
   This is either a register reference, a memory reference,
   or a constant.  In the case of a memory reference, the address
   is checked for general validity for the target machine.

   Register and memory references must have mode MODE in order to be valid,
   but some constants have no machine mode and are valid for any mode.

   If MODE is VOIDmode, OP is checked for validity for whatever mode
   it has.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.

897
   For an explanation of this function's behavior for registers of
Richard Kenner committed
898 899 900
   class NO_REGS, see the comment for `register_operand'.  */

int
901
general_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
902
{
903
  enum rtx_code code = GET_CODE (op);
Richard Kenner committed
904 905 906 907 908 909 910

  if (mode == VOIDmode)
    mode = GET_MODE (op);

  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
911 912
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
913 914
    return 0;

Shujing Zhao committed
915
  if (CONST_INT_P (op)
916
      && mode != VOIDmode
917 918 919
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

Richard Kenner committed
920
  if (CONSTANT_P (op))
921 922
    return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
	     || mode == VOIDmode)
Richard Kenner committed
923 924 925 926 927 928 929 930 931 932 933
	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
	    && LEGITIMATE_CONSTANT_P (op));

  /* Except for certain constants with VOIDmode, already checked for,
     OP's mode must match MODE if MODE specifies a mode.  */

  if (GET_MODE (op) != mode)
    return 0;

  if (code == SUBREG)
    {
934 935
      rtx sub = SUBREG_REG (op);

Richard Kenner committed
936 937
#ifdef INSN_SCHEDULING
      /* On machines that have insn scheduling, we want all memory
938 939 940 941
	 reference to be explicit, so outlaw paradoxical SUBREGs.
	 However, we must allow them after reload so that they can
	 get cleaned up by cleanup_subreg_operands.  */
      if (!reload_completed && MEM_P (sub)
942
	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
Richard Kenner committed
943 944
	return 0;
#endif
945 946
      /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
         may result in incorrect reference.  We should simplify all valid
947
         subregs of MEM anyway.  But allow this after reload because we
948
	 might be called from cleanup_subreg_operands.
949 950 951

	 ??? This is a kludge.  */
      if (!reload_completed && SUBREG_BYTE (op) != 0
952
	  && MEM_P (sub))
953 954 955
	return 0;

      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
956
	 create such rtl, and we must reject it.  */
957
      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
958
	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
959
	return 0;
Richard Kenner committed
960

961
      op = sub;
Richard Kenner committed
962 963 964 965 966 967 968 969 970 971
      code = GET_CODE (op);
    }

  if (code == REG)
    /* A register whose class is NO_REGS is not a general operand.  */
    return (REGNO (op) >= FIRST_PSEUDO_REGISTER
	    || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);

  if (code == MEM)
    {
972
      rtx y = XEXP (op, 0);
Aldy Hernandez committed
973

Richard Kenner committed
974 975
      if (! volatile_ok && MEM_VOLATILE_P (op))
	return 0;
Aldy Hernandez committed
976

Richard Kenner committed
977
      /* Use the mem's mode, since it will be reloaded thus.  */
978 979
      if (memory_address_p (GET_MODE (op), y))
	return 1;
Richard Kenner committed
980
    }
981

Richard Kenner committed
982 983 984 985 986 987 988 989 990 991
  return 0;
}

/* Return 1 if OP is a valid memory address for a memory reference
   of mode MODE.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
992
address_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011
{
  return memory_address_p (mode, op);
}

/* Return 1 if OP is a register reference of mode MODE.
   If MODE is VOIDmode, accept a register in any mode.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.

   As a special exception, registers whose class is NO_REGS are
   not accepted by `register_operand'.  The reason for this change
   is to allow the representation of special architecture artifacts
   (such as a condition code register) without extending the rtl
   definitions.  Since registers of class NO_REGS cannot be used
   as registers in any case where register classes are examined,
   it is most consistent to keep this function from accepting them.  */

int
1012
register_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1013 1014 1015 1016 1017 1018
{
  if (GET_MODE (op) != mode && mode != VOIDmode)
    return 0;

  if (GET_CODE (op) == SUBREG)
    {
1019 1020
      rtx sub = SUBREG_REG (op);

Richard Kenner committed
1021 1022 1023 1024 1025 1026
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
	 because it is guaranteed to be reloaded into one.
	 Just make sure the MEM is valid in itself.
	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
	 but currently it does result from (SUBREG (REG)...) where the
	 reg went on the stack.)  */
1027
      if (! reload_completed && MEM_P (sub))
Richard Kenner committed
1028
	return general_operand (op, mode);
1029

1030
#ifdef CANNOT_CHANGE_MODE_CLASS
1031
      if (REG_P (sub)
1032
	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1033
	  && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1034 1035
	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1036 1037 1038
	return 0;
#endif

1039 1040
      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
	 create such rtl, and we must reject it.  */
1041
      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1042 1043 1044 1045
	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
	return 0;

      op = sub;
Richard Kenner committed
1046 1047 1048 1049
    }

  /* We don't consider registers whose class is NO_REGS
     to be a register operand.  */
1050
  return (REG_P (op)
Richard Kenner committed
1051 1052 1053 1054
	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
}

1055 1056 1057
/* Return 1 for a register in Pmode; ignore the tested mode.  */

int
1058
pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1059 1060 1061 1062
{
  return register_operand (op, Pmode);
}

Richard Kenner committed
1063 1064 1065 1066
/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
   or a hard register.  */

int
1067
scratch_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1068
{
1069 1070 1071 1072
  if (GET_MODE (op) != mode && mode != VOIDmode)
    return 0;

  return (GET_CODE (op) == SCRATCH
1073
	  || (REG_P (op)
1074
	      && REGNO (op) < FIRST_PSEUDO_REGISTER));
Richard Kenner committed
1075 1076 1077 1078 1079 1080 1081 1082
}

/* Return 1 if OP is a valid immediate operand for mode MODE.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1083
immediate_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1084 1085 1086 1087
{
  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1088 1089
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1090 1091
    return 0;

Shujing Zhao committed
1092
  if (CONST_INT_P (op)
1093
      && mode != VOIDmode
1094 1095 1096
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

Richard Kenner committed
1097 1098 1099 1100 1101 1102 1103 1104 1105 1106
  return (CONSTANT_P (op)
	  && (GET_MODE (op) == mode || mode == VOIDmode
	      || GET_MODE (op) == VOIDmode)
	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
	  && LEGITIMATE_CONSTANT_P (op));
}

/* Returns 1 if OP is an operand that is a CONST_INT.  */

int
1107
const_int_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1108
{
Shujing Zhao committed
1109
  if (!CONST_INT_P (op))
1110 1111 1112 1113 1114 1115 1116
    return 0;

  if (mode != VOIDmode
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

  return 1;
Richard Kenner committed
1117 1118 1119 1120 1121 1122
}

/* Returns 1 if OP is an operand that is a constant integer or constant
   floating-point number.  */

int
1123
const_double_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1124 1125 1126 1127
{
  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1128 1129
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1130 1131
    return 0;

Shujing Zhao committed
1132
  return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
Richard Kenner committed
1133 1134 1135 1136 1137 1138 1139
	  && (mode == VOIDmode || GET_MODE (op) == mode
	      || GET_MODE (op) == VOIDmode));
}

/* Return 1 if OP is a general operand that is not an immediate operand.  */

int
1140
nonimmediate_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1141 1142 1143 1144 1145 1146 1147
{
  return (general_operand (op, mode) && ! CONSTANT_P (op));
}

/* Return 1 if OP is a register reference or immediate value of mode MODE.  */

int
1148
nonmemory_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1149 1150 1151 1152 1153 1154
{
  if (CONSTANT_P (op))
    {
      /* Don't accept CONST_INT or anything similar
	 if the caller wants something floating.  */
      if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1155 1156
	  && GET_MODE_CLASS (mode) != MODE_INT
	  && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1157 1158
	return 0;

Shujing Zhao committed
1159
      if (CONST_INT_P (op)
1160
	  && mode != VOIDmode
1161 1162 1163
	  && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
	return 0;

1164
      return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1165
	       || mode == VOIDmode)
Richard Kenner committed
1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180
	      && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
	      && LEGITIMATE_CONSTANT_P (op));
    }

  if (GET_MODE (op) != mode && mode != VOIDmode)
    return 0;

  if (GET_CODE (op) == SUBREG)
    {
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
	 because it is guaranteed to be reloaded into one.
	 Just make sure the MEM is valid in itself.
	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
	 but currently it does result from (SUBREG (REG)...) where the
	 reg went on the stack.)  */
1181
      if (! reload_completed && MEM_P (SUBREG_REG (op)))
Richard Kenner committed
1182 1183 1184 1185 1186 1187
	return general_operand (op, mode);
      op = SUBREG_REG (op);
    }

  /* We don't consider registers whose class is NO_REGS
     to be a register operand.  */
1188
  return (REG_P (op)
Richard Kenner committed
1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199
	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
}

/* Return 1 if OP is a valid operand that stands for pushing a
   value of mode MODE onto the stack.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1200
push_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1201
{
1202 1203 1204 1205 1206 1207
  unsigned int rounded_size = GET_MODE_SIZE (mode);

#ifdef PUSH_ROUNDING
  rounded_size = PUSH_ROUNDING (rounded_size);
#endif

1208
  if (!MEM_P (op))
Richard Kenner committed
1209 1210
    return 0;

1211
  if (mode != VOIDmode && GET_MODE (op) != mode)
Richard Kenner committed
1212 1213 1214 1215
    return 0;

  op = XEXP (op, 0);

1216
  if (rounded_size == GET_MODE_SIZE (mode))
1217 1218 1219 1220 1221 1222 1223 1224 1225
    {
      if (GET_CODE (op) != STACK_PUSH_CODE)
	return 0;
    }
  else
    {
      if (GET_CODE (op) != PRE_MODIFY
	  || GET_CODE (XEXP (op, 1)) != PLUS
	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
Shujing Zhao committed
1226
	  || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1227
#ifdef STACK_GROWS_DOWNWARD
1228
	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1229
#else
1230
	  || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1231 1232 1233 1234
#endif
	  )
	return 0;
    }
Richard Kenner committed
1235 1236 1237 1238

  return XEXP (op, 0) == stack_pointer_rtx;
}

1239 1240 1241 1242 1243 1244 1245
/* Return 1 if OP is a valid operand that stands for popping a
   value of mode MODE off the stack.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1246
pop_operand (rtx op, enum machine_mode mode)
1247
{
1248
  if (!MEM_P (op))
1249 1250
    return 0;

1251
  if (mode != VOIDmode && GET_MODE (op) != mode)
1252 1253 1254 1255 1256 1257 1258 1259 1260 1261
    return 0;

  op = XEXP (op, 0);

  if (GET_CODE (op) != STACK_POP_CODE)
    return 0;

  return XEXP (op, 0) == stack_pointer_rtx;
}

Richard Kenner committed
1262 1263 1264
/* Return 1 if ADDR is a valid memory address for mode MODE.  */

int
1265
memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
Richard Kenner committed
1266
{
1267
#ifdef GO_IF_LEGITIMATE_ADDRESS
Richard Kenner committed
1268 1269 1270 1271 1272
  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
  return 0;

 win:
  return 1;
1273 1274 1275
#else
  return targetm.legitimate_address_p (mode, addr, 0);
#endif
Richard Kenner committed
1276 1277 1278 1279 1280 1281 1282 1283 1284
}

/* Return 1 if OP is a valid memory reference with mode MODE,
   including a valid address.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1285
memory_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1286 1287 1288 1289 1290 1291
{
  rtx inner;

  if (! reload_completed)
    /* Note that no SUBREG is a memory operand before end of reload pass,
       because (SUBREG (MEM...)) forces reloading into a register.  */
1292
    return MEM_P (op) && general_operand (op, mode);
Richard Kenner committed
1293 1294 1295 1296 1297 1298 1299 1300

  if (mode != VOIDmode && GET_MODE (op) != mode)
    return 0;

  inner = op;
  if (GET_CODE (inner) == SUBREG)
    inner = SUBREG_REG (inner);

1301
  return (MEM_P (inner) && general_operand (op, mode));
Richard Kenner committed
1302 1303 1304 1305 1306 1307
}

/* Return 1 if OP is a valid indirect memory reference with mode MODE;
   that is, a memory reference whose address is a general_operand.  */

int
1308
indirect_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1309 1310 1311
{
  /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
  if (! reload_completed
1312
      && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
Richard Kenner committed
1313
    {
1314
      int offset = SUBREG_BYTE (op);
Richard Kenner committed
1315 1316
      rtx inner = SUBREG_REG (op);

1317 1318 1319
      if (mode != VOIDmode && GET_MODE (op) != mode)
	return 0;

Richard Kenner committed
1320 1321 1322 1323 1324 1325 1326
      /* The only way that we can have a general_operand as the resulting
	 address is if OFFSET is zero and the address already is an operand
	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
	 operand.  */

      return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
	      || (GET_CODE (XEXP (inner, 0)) == PLUS
Shujing Zhao committed
1327
		  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
Richard Kenner committed
1328 1329 1330 1331
		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
    }

1332
  return (MEM_P (op)
Richard Kenner committed
1333 1334 1335 1336
	  && memory_operand (op, mode)
	  && general_operand (XEXP (op, 0), Pmode));
}

1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362
/* Return 1 if this is an ordered comparison operator (not including
   ORDERED and UNORDERED).  */

int
ordered_comparison_operator (rtx op, enum machine_mode mode)
{
  if (mode != VOIDmode && GET_MODE (op) != mode)
    return false;
  switch (GET_CODE (op))
    {
    case EQ:
    case NE:
    case LT:
    case LTU:
    case LE:
    case LEU:
    case GT:
    case GTU:
    case GE:
    case GEU:
      return true;
    default:
      return false;
    }
}

Richard Kenner committed
1363 1364 1365 1366
/* Return 1 if this is a comparison operator.  This allows the use of
   MATCH_OPERATOR to recognize all the branch insns.  */

int
1367
comparison_operator (rtx op, enum machine_mode mode)
Richard Kenner committed
1368 1369
{
  return ((mode == VOIDmode || GET_MODE (op) == mode)
1370
	  && COMPARISON_P (op));
Richard Kenner committed
1371 1372 1373 1374 1375 1376 1377
}

/* If BODY is an insn body that uses ASM_OPERANDS,
   return the number of operands (both input and output) in the insn.
   Otherwise return -1.  */

int
1378
asm_noperands (const_rtx body)
Richard Kenner committed
1379
{
1380
  switch (GET_CODE (body))
Richard Kenner committed
1381
    {
1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393
    case ASM_OPERANDS:
      /* No output operands: return number of input operands.  */
      return ASM_OPERANDS_INPUT_LENGTH (body);
    case SET:
      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
	/* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
	return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
      else
	return -1;
    case PARALLEL:
      if (GET_CODE (XVECEXP (body, 0, 0)) == SET
	  && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
Richard Kenner committed
1394
	{
1395 1396 1397 1398
	  /* Multiple output operands, or 1 output plus some clobbers:
	     body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
	  int i;
	  int n_sets;
Richard Kenner committed
1399

1400 1401 1402 1403 1404 1405 1406 1407
	  /* Count backwards through CLOBBERs to determine number of SETs.  */
	  for (i = XVECLEN (body, 0); i > 0; i--)
	    {
	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
		break;
	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
		return -1;
	    }
Richard Kenner committed
1408

1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429
	  /* N_SETS is now number of output operands.  */
	  n_sets = i;

	  /* Verify that all the SETs we have
	     came from a single original asm_operands insn
	     (so that invalid combinations are blocked).  */
	  for (i = 0; i < n_sets; i++)
	    {
	      rtx elt = XVECEXP (body, 0, i);
	      if (GET_CODE (elt) != SET)
		return -1;
	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
		return -1;
	      /* If these ASM_OPERANDS rtx's came from different original insns
	         then they aren't allowed together.  */
	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
		  != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
		return -1;
	    }
	  return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
		  + n_sets);
Richard Kenner committed
1430
	}
1431 1432 1433 1434 1435
      else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
	{
	  /* 0 outputs, but some clobbers:
	     body is [(asm_operands ...) (clobber (reg ...))...].  */
	  int i;
Richard Kenner committed
1436

1437 1438 1439 1440
	  /* Make sure all the other parallel things really are clobbers.  */
	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
	      return -1;
Richard Kenner committed
1441

1442 1443 1444 1445 1446 1447
	  return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
	}
      else
	return -1;
    default:
      return -1;
Richard Kenner committed
1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460
    }
}

/* Assuming BODY is an insn body that uses ASM_OPERANDS,
   copy its operands (both input and output) into the vector OPERANDS,
   the locations of the operands within the insn into the vector OPERAND_LOCS,
   and the constraints for the operands into CONSTRAINTS.
   Write the modes of the operands into MODES.
   Return the assembler-template.

   If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
   we don't store that info.  */

1461
const char *
1462
decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1463 1464
		     const char **constraints, enum machine_mode *modes,
		     location_t *loc)
Richard Kenner committed
1465
{
1466
  int i;
Richard Kenner committed
1467
  int noperands;
1468
  rtx asmop = 0;
Richard Kenner committed
1469 1470 1471

  if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
    {
1472
      asmop = SET_SRC (body);
Richard Kenner committed
1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501
      /* Single output operand: BODY is (set OUTPUT (asm_operands ....)).  */

      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;

      for (i = 1; i < noperands; i++)
	{
	  if (operand_locs)
	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
	  if (operands)
	    operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
	  if (constraints)
	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
	  if (modes)
	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
	}

      /* The output is in the SET.
	 Its constraint is in the ASM_OPERANDS itself.  */
      if (operands)
	operands[0] = SET_DEST (body);
      if (operand_locs)
	operand_locs[0] = &SET_DEST (body);
      if (constraints)
	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
      if (modes)
	modes[0] = GET_MODE (SET_DEST (body));
    }
  else if (GET_CODE (body) == ASM_OPERANDS)
    {
1502
      asmop = body;
Richard Kenner committed
1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521
      /* No output operands: BODY is (asm_operands ....).  */

      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);

      /* The input operands are found in the 1st element vector.  */
      /* Constraints for inputs are in the 2nd element vector.  */
      for (i = 0; i < noperands; i++)
	{
	  if (operand_locs)
	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
	  if (operands)
	    operands[i] = ASM_OPERANDS_INPUT (asmop, i);
	  if (constraints)
	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
	  if (modes)
	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
	}
    }
  else if (GET_CODE (body) == PARALLEL
1522 1523
	   && GET_CODE (XVECEXP (body, 0, 0)) == SET
	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
Richard Kenner committed
1524 1525
    {
      int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1526
      int nin;
Richard Kenner committed
1527 1528
      int nout = 0;		/* Does not include CLOBBERs.  */

1529 1530 1531
      asmop = SET_SRC (XVECEXP (body, 0, 0));
      nin = ASM_OPERANDS_INPUT_LENGTH (asmop);

Richard Kenner committed
1532 1533 1534 1535 1536 1537 1538 1539
      /* At least one output, plus some CLOBBERs.  */

      /* The outputs are in the SETs.
	 Their constraints are in the ASM_OPERANDS itself.  */
      for (i = 0; i < nparallel; i++)
	{
	  if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
	    break;		/* Past last SET */
1540

Richard Kenner committed
1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568
	  if (operands)
	    operands[i] = SET_DEST (XVECEXP (body, 0, i));
	  if (operand_locs)
	    operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
	  if (constraints)
	    constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
	  if (modes)
	    modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
	  nout++;
	}

      for (i = 0; i < nin; i++)
	{
	  if (operand_locs)
	    operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
	  if (operands)
	    operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
	  if (constraints)
	    constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
	  if (modes)
	    modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
	}
    }
  else if (GET_CODE (body) == PARALLEL
	   && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
    {
      /* No outputs, but some CLOBBERs.  */

1569 1570
      int nin;

1571
      asmop = XVECEXP (body, 0, 0);
1572
      nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
Richard Kenner committed
1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587

      for (i = 0; i < nin; i++)
	{
	  if (operand_locs)
	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
	  if (operands)
	    operands[i] = ASM_OPERANDS_INPUT (asmop, i);
	  if (constraints)
	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
	  if (modes)
	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
	}

    }

1588
  if (loc)
1589
    *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1590 1591

  return ASM_OPERANDS_TEMPLATE (asmop);
Richard Kenner committed
1592
}
1593

1594
/* Check if an asm_operand matches its constraints.
1595
   Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1596 1597

int
1598
asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1599
{
1600 1601
  int result = 0;

1602
  /* Use constrain_operands after reload.  */
1603
  gcc_assert (!reload_completed);
1604 1605 1606

  while (*constraint)
    {
1607 1608
      char c = *constraint;
      int len;
1609
      switch (c)
1610
	{
1611 1612 1613
	case ',':
	  constraint++;
	  continue;
1614 1615 1616 1617 1618 1619 1620
	case '=':
	case '+':
	case '*':
	case '%':
	case '!':
	case '#':
	case '&':
1621
	case '?':
1622 1623 1624 1625
	  break;

	case '0': case '1': case '2': case '3': case '4':
	case '5': case '6': case '7': case '8': case '9':
1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648
	  /* If caller provided constraints pointer, look up
	     the maching constraint.  Otherwise, our caller should have
	     given us the proper matching constraint, but we can't
	     actually fail the check if they didn't.  Indicate that
	     results are inconclusive.  */
	  if (constraints)
	    {
	      char *end;
	      unsigned long match;

	      match = strtoul (constraint, &end, 10);
	      if (!result)
		result = asm_operand_ok (op, constraints[match], NULL);
	      constraint = (const char *) end;
	    }
	  else
	    {
	      do
		constraint++;
	      while (ISDIGIT (*constraint));
	      if (! result)
		result = -1;
	    }
1649
	  continue;
1650 1651 1652

	case 'p':
	  if (address_operand (op, VOIDmode))
1653
	    result = 1;
1654 1655
	  break;

1656
	case TARGET_MEM_CONSTRAINT:
1657 1658
	case 'V': /* non-offsettable */
	  if (memory_operand (op, VOIDmode))
1659
	    result = 1;
1660 1661 1662 1663
	  break;

	case 'o': /* offsettable */
	  if (offsettable_nonstrict_memref_p (op))
1664
	    result = 1;
1665 1666 1667
	  break;

	case '<':
1668
	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1669 1670 1671 1672 1673 1674
	     excepting those that expand_call created.  Further, on some
	     machines which do not have generalized auto inc/dec, an inc/dec
	     is not a memory_operand.

	     Match any memory and hope things are resolved after reload.  */

1675
	  if (MEM_P (op)
1676 1677
	      && (1
		  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1678
		  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1679
	    result = 1;
1680 1681 1682
	  break;

	case '>':
1683
	  if (MEM_P (op)
1684 1685
	      && (1
		  || GET_CODE (XEXP (op, 0)) == PRE_INC
1686
		  || GET_CODE (XEXP (op, 0)) == POST_INC))
1687
	    result = 1;
1688 1689 1690 1691
	  break;

	case 'E':
	case 'F':
1692 1693 1694
	  if (GET_CODE (op) == CONST_DOUBLE
	      || (GET_CODE (op) == CONST_VECTOR
		  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1695
	    result = 1;
1696 1697 1698 1699
	  break;

	case 'G':
	  if (GET_CODE (op) == CONST_DOUBLE
1700 1701
	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
	    result = 1;
1702 1703 1704
	  break;
	case 'H':
	  if (GET_CODE (op) == CONST_DOUBLE
1705 1706
	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
	    result = 1;
1707 1708 1709
	  break;

	case 's':
Shujing Zhao committed
1710
	  if (CONST_INT_P (op)
1711 1712 1713
	      || (GET_CODE (op) == CONST_DOUBLE
		  && GET_MODE (op) == VOIDmode))
	    break;
1714
	  /* Fall through.  */
1715 1716

	case 'i':
1717
	  if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1718
	    result = 1;
1719 1720 1721
	  break;

	case 'n':
Shujing Zhao committed
1722
	  if (CONST_INT_P (op)
1723 1724
	      || (GET_CODE (op) == CONST_DOUBLE
		  && GET_MODE (op) == VOIDmode))
1725
	    result = 1;
1726 1727 1728
	  break;

	case 'I':
Shujing Zhao committed
1729
	  if (CONST_INT_P (op)
1730 1731
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
	    result = 1;
1732 1733
	  break;
	case 'J':
Shujing Zhao committed
1734
	  if (CONST_INT_P (op)
1735 1736
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
	    result = 1;
1737 1738
	  break;
	case 'K':
Shujing Zhao committed
1739
	  if (CONST_INT_P (op)
1740 1741
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
	    result = 1;
1742 1743
	  break;
	case 'L':
Shujing Zhao committed
1744
	  if (CONST_INT_P (op)
1745 1746
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
	    result = 1;
1747 1748
	  break;
	case 'M':
Shujing Zhao committed
1749
	  if (CONST_INT_P (op)
1750 1751
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
	    result = 1;
1752 1753
	  break;
	case 'N':
Shujing Zhao committed
1754
	  if (CONST_INT_P (op)
1755 1756
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
	    result = 1;
1757 1758
	  break;
	case 'O':
Shujing Zhao committed
1759
	  if (CONST_INT_P (op)
1760 1761
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
	    result = 1;
1762 1763
	  break;
	case 'P':
Shujing Zhao committed
1764
	  if (CONST_INT_P (op)
1765 1766
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
	    result = 1;
1767 1768 1769
	  break;

	case 'X':
1770
	  result = 1;
1771
	  break;
1772 1773 1774

	case 'g':
	  if (general_operand (op, VOIDmode))
1775
	    result = 1;
1776 1777
	  break;

1778 1779 1780
	default:
	  /* For all other letters, we first check for a register class,
	     otherwise it is an EXTRA_CONSTRAINT.  */
1781
	  if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1782 1783 1784 1785 1786
	    {
	    case 'r':
	      if (GET_MODE (op) == BLKmode)
		break;
	      if (register_operand (op, VOIDmode))
1787
		result = 1;
1788
	    }
1789
#ifdef EXTRA_CONSTRAINT_STR
Andreas Krebbel committed
1790 1791 1792 1793 1794 1795
	  else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
	    /* Every memory operand can be reloaded to fit.  */
	    result = result || memory_operand (op, VOIDmode);
	  else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
	    /* Every address operand can be reloaded to fit.  */
	    result = result || address_operand (op, VOIDmode);
1796 1797
	  else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
	    result = 1;
1798 1799 1800
#endif
	  break;
	}
1801 1802 1803 1804 1805 1806
      len = CONSTRAINT_LEN (c, constraint);
      do
	constraint++;
      while (--len && *constraint);
      if (len)
	return 0;
1807 1808
    }

1809
  return result;
1810
}
Richard Kenner committed
1811 1812 1813 1814 1815

/* Given an rtx *P, if it is a sum containing an integer constant term,
   return the location (type rtx *) of the pointer to that constant term.
   Otherwise, return a null pointer.  */

1816
rtx *
1817
find_constant_term_loc (rtx *p)
Richard Kenner committed
1818
{
1819 1820
  rtx *tem;
  enum rtx_code code = GET_CODE (*p);
Richard Kenner committed
1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869

  /* If *P IS such a constant term, P is its location.  */

  if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
      || code == CONST)
    return p;

  /* Otherwise, if not a sum, it has no constant term.  */

  if (GET_CODE (*p) != PLUS)
    return 0;

  /* If one of the summands is constant, return its location.  */

  if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
      && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
    return p;

  /* Otherwise, check each summand for containing a constant term.  */

  if (XEXP (*p, 0) != 0)
    {
      tem = find_constant_term_loc (&XEXP (*p, 0));
      if (tem != 0)
	return tem;
    }

  if (XEXP (*p, 1) != 0)
    {
      tem = find_constant_term_loc (&XEXP (*p, 1));
      if (tem != 0)
	return tem;
    }

  return 0;
}

/* Return 1 if OP is a memory reference
   whose address contains no side effects
   and remains valid after the addition
   of a positive integer less than the
   size of the object being referenced.

   We assume that the original address is valid and do not check it.

   This uses strict_memory_address_p as a subroutine, so
   don't use it before reload.  */

int
1870
offsettable_memref_p (rtx op)
Richard Kenner committed
1871
{
1872
  return ((MEM_P (op))
Richard Kenner committed
1873 1874 1875 1876 1877 1878 1879
	  && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
}

/* Similar, but don't require a strictly valid mem ref:
   consider pseudo-regs valid as index or base regs.  */

int
1880
offsettable_nonstrict_memref_p (rtx op)
Richard Kenner committed
1881
{
1882
  return ((MEM_P (op))
Richard Kenner committed
1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896
	  && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
}

/* Return 1 if Y is a memory address which contains no side effects
   and would remain valid after the addition of a positive integer
   less than the size of that mode.

   We assume that the original address is valid and do not check it.
   We do check that it is valid for narrower modes.

   If STRICTP is nonzero, we require a strictly valid address,
   for the sake of use in reload.c.  */

int
1897
offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
Richard Kenner committed
1898
{
1899 1900
  enum rtx_code ycode = GET_CODE (y);
  rtx z;
Richard Kenner committed
1901 1902
  rtx y1 = y;
  rtx *y2;
1903
  int (*addressp) (enum machine_mode, rtx) =
1904
    (strictp ? strict_memory_address_p : memory_address_p);
1905
  unsigned int mode_sz = GET_MODE_SIZE (mode);
Richard Kenner committed
1906 1907 1908 1909 1910 1911 1912 1913 1914 1915

  if (CONSTANT_ADDRESS_P (y))
    return 1;

  /* Adjusting an offsettable address involves changing to a narrower mode.
     Make sure that's OK.  */

  if (mode_dependent_address_p (y))
    return 0;

1916 1917 1918 1919 1920 1921 1922
  /* ??? How much offset does an offsettable BLKmode reference need?
     Clearly that depends on the situation in which it's being used.
     However, the current situation in which we test 0xffffffff is
     less than ideal.  Caveat user.  */
  if (mode_sz == 0)
    mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;

Richard Kenner committed
1923 1924 1925 1926 1927 1928 1929 1930
  /* If the expression contains a constant term,
     see if it remains valid when max possible offset is added.  */

  if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
    {
      int good;

      y1 = *y2;
1931
      *y2 = plus_constant (*y2, mode_sz - 1);
Richard Kenner committed
1932 1933 1934 1935 1936 1937 1938 1939 1940
      /* Use QImode because an odd displacement may be automatically invalid
	 for any wider mode.  But it should be valid for a single byte.  */
      good = (*addressp) (QImode, y);

      /* In any case, restore old contents of memory.  */
      *y2 = y1;
      return good;
    }

1941
  if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
Richard Kenner committed
1942 1943 1944 1945 1946
    return 0;

  /* The offset added here is chosen as the maximum offset that
     any instruction could need to add when operating on something
     of the specified mode.  We assume that if Y and Y+c are
1947 1948
     valid addresses then so is Y+d for all 0<d<c.  adjust_address will
     go inside a LO_SUM here, so we do so as well.  */
1949 1950 1951
  if (GET_CODE (y) == LO_SUM
      && mode != BLKmode
      && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1952 1953 1954 1955
    z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
			plus_constant (XEXP (y, 1), mode_sz - 1));
  else
    z = plus_constant (y, mode_sz - 1);
Richard Kenner committed
1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968

  /* Use QImode because an odd displacement may be automatically invalid
     for any wider mode.  But it should be valid for a single byte.  */
  return (*addressp) (QImode, z);
}

/* Return 1 if ADDR is an address-expression whose effect depends
   on the mode of the memory reference it is used in.

   Autoincrement addressing is a typical example of mode-dependence
   because the amount of the increment depends on the mode.  */

int
1969
mode_dependent_address_p (rtx addr)
Richard Kenner committed
1970
{
1971 1972 1973 1974 1975 1976 1977 1978 1979
  /* Auto-increment addressing with anything other than post_modify
     or pre_modify always introduces a mode dependency.  Catch such
     cases now instead of deferring to the target.  */
  if (GET_CODE (addr) == PRE_INC
      || GET_CODE (addr) == POST_INC
      || GET_CODE (addr) == PRE_DEC
      || GET_CODE (addr) == POST_DEC)
    return 1;

Richard Kenner committed
1980 1981
  GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
  return 0;
1982
  /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS.  */
Kaveh R. Ghazi committed
1983
 win: ATTRIBUTE_UNUSED_LABEL
Richard Kenner committed
1984 1985 1986
  return 1;
}

1987 1988 1989 1990 1991
/* Like extract_insn, but save insn extracted and don't extract again, when
   called again for the same insn expecting that recog_data still contain the
   valid information.  This is used primary by gen_attr infrastructure that
   often does extract insn again and again.  */
void
1992
extract_insn_cached (rtx insn)
1993 1994 1995 1996 1997 1998
{
  if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
    return;
  extract_insn (insn);
  recog_data.insn = insn;
}
1999

2000
/* Do cached extract_insn, constrain_operands and complain about failures.
2001 2002
   Used by insn_attrtab.  */
void
2003
extract_constrain_insn_cached (rtx insn)
2004 2005 2006 2007 2008 2009
{
  extract_insn_cached (insn);
  if (which_alternative == -1
      && !constrain_operands (reload_completed))
    fatal_insn_not_found (insn);
}
2010

2011
/* Do cached constrain_operands and complain about failures.  */
2012
int
2013
constrain_operands_cached (int strict)
2014 2015 2016 2017 2018 2019
{
  if (which_alternative == -1)
    return constrain_operands (strict);
  else
    return 1;
}
2020

2021 2022
/* Analyze INSN and fill in recog_data.  */

2023
void
2024
extract_insn (rtx insn)
2025 2026 2027 2028 2029 2030
{
  int i;
  int icode;
  int noperands;
  rtx body = PATTERN (insn);

2031 2032 2033
  recog_data.n_operands = 0;
  recog_data.n_alternatives = 0;
  recog_data.n_dups = 0;
2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044

  switch (GET_CODE (body))
    {
    case USE:
    case CLOBBER:
    case ASM_INPUT:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
      return;

    case SET:
2045 2046 2047 2048
      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
	goto asm_insn;
      else
	goto normal_insn;
2049
    case PARALLEL:
2050 2051 2052 2053 2054 2055
      if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
	goto asm_insn;
      else
	goto normal_insn;
2056
    case ASM_OPERANDS:
2057
    asm_insn:
2058
      recog_data.n_operands = noperands = asm_noperands (body);
2059 2060 2061 2062 2063
      if (noperands >= 0)
	{
	  /* This insn is an `asm' with operands.  */

	  /* expand_asm_operands makes sure there aren't too many operands.  */
2064
	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2065 2066

	  /* Now get the operand values and constraints out of the insn.  */
2067 2068 2069
	  decode_asm_operands (body, recog_data.operand,
			       recog_data.operand_loc,
			       recog_data.constraints,
2070
			       recog_data.operand_mode, NULL);
2071 2072
	  if (noperands > 0)
	    {
2073 2074
	      const char *p =  recog_data.constraints[0];
	      recog_data.n_alternatives = 1;
2075
	      while (*p)
2076
		recog_data.n_alternatives += (*p++ == ',');
2077 2078 2079
	    }
	  break;
	}
2080
      fatal_insn_not_found (insn);
2081 2082

    default:
2083
    normal_insn:
2084 2085 2086 2087 2088 2089 2090
      /* Ordinary insn: recognize it, get the operands via insn_extract
	 and get the constraints.  */

      icode = recog_memoized (insn);
      if (icode < 0)
	fatal_insn_not_found (insn);

2091 2092 2093
      recog_data.n_operands = noperands = insn_data[icode].n_operands;
      recog_data.n_alternatives = insn_data[icode].n_alternatives;
      recog_data.n_dups = insn_data[icode].n_dups;
2094 2095 2096 2097 2098

      insn_extract (insn);

      for (i = 0; i < noperands; i++)
	{
2099 2100
	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2101 2102 2103
	  /* VOIDmode match_operands gets mode from their real operand.  */
	  if (recog_data.operand_mode[i] == VOIDmode)
	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2104 2105
	}
    }
2106
  for (i = 0; i < noperands; i++)
2107 2108 2109 2110
    recog_data.operand_type[i]
      = (recog_data.constraints[i][0] == '=' ? OP_OUT
	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
	 : OP_IN);
2111

2112
  gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128

  if (INSN_CODE (insn) < 0)
    for (i = 0; i < recog_data.n_alternatives; i++)
      recog_data.alternative_enabled_p[i] = true;
  else
    {
      recog_data.insn = insn;
      for (i = 0; i < recog_data.n_alternatives; i++)
	{
	  which_alternative = i;
	  recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
	}
    }

  recog_data.insn = NULL;
  which_alternative = -1;
2129 2130
}

2131 2132 2133 2134
/* After calling extract_insn, you can use this function to extract some
   information from the constraint strings into a more usable form.
   The collected data is stored in recog_op_alt.  */
void
2135
preprocess_constraints (void)
2136 2137 2138
{
  int i;

2139 2140 2141 2142
  for (i = 0; i < recog_data.n_operands; i++)
    memset (recog_op_alt[i], 0, (recog_data.n_alternatives
				 * sizeof (struct operand_alternative)));

2143
  for (i = 0; i < recog_data.n_operands; i++)
2144 2145 2146
    {
      int j;
      struct operand_alternative *op_alt;
2147
      const char *p = recog_data.constraints[i];
2148 2149 2150

      op_alt = recog_op_alt[i];

2151
      for (j = 0; j < recog_data.n_alternatives; j++)
2152
	{
2153
	  op_alt[j].cl = NO_REGS;
2154 2155 2156 2157
	  op_alt[j].constraint = p;
	  op_alt[j].matches = -1;
	  op_alt[j].matched = -1;

2158 2159 2160 2161 2162 2163
	  if (!recog_data.alternative_enabled_p[j])
	    {
	      p = skip_alternative (p);
	      continue;
	    }

2164 2165 2166 2167 2168 2169 2170 2171
	  if (*p == '\0' || *p == ',')
	    {
	      op_alt[j].anything_ok = 1;
	      continue;
	    }

	  for (;;)
	    {
2172
	      char c = *p;
2173 2174
	      if (c == '#')
		do
2175
		  c = *++p;
2176 2177
		while (c != ',' && c != '\0');
	      if (c == ',' || c == '\0')
2178 2179 2180 2181
		{
		  p++;
		  break;
		}
2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200

	      switch (c)
		{
		case '=': case '+': case '*': case '%':
		case 'E': case 'F': case 'G': case 'H':
		case 's': case 'i': case 'n':
		case 'I': case 'J': case 'K': case 'L':
		case 'M': case 'N': case 'O': case 'P':
		  /* These don't say anything we care about.  */
		  break;

		case '?':
		  op_alt[j].reject += 6;
		  break;
		case '!':
		  op_alt[j].reject += 600;
		  break;
		case '&':
		  op_alt[j].earlyclobber = 1;
2201
		  break;
2202 2203 2204

		case '0': case '1': case '2': case '3': case '4':
		case '5': case '6': case '7': case '8': case '9':
2205 2206
		  {
		    char *end;
2207
		    op_alt[j].matches = strtoul (p, &end, 10);
2208 2209 2210
		    recog_op_alt[op_alt[j].matches][j].matched = i;
		    p = end;
		  }
2211
		  continue;
2212

2213
		case TARGET_MEM_CONSTRAINT:
2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232
		  op_alt[j].memory_ok = 1;
		  break;
		case '<':
		  op_alt[j].decmem_ok = 1;
		  break;
		case '>':
		  op_alt[j].incmem_ok = 1;
		  break;
		case 'V':
		  op_alt[j].nonoffmem_ok = 1;
		  break;
		case 'o':
		  op_alt[j].offmem_ok = 1;
		  break;
		case 'X':
		  op_alt[j].anything_ok = 1;
		  break;

		case 'p':
2233
		  op_alt[j].is_address = 1;
2234
		  op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2235
		      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2236 2237
		  break;

2238 2239 2240 2241
		case 'g':
		case 'r':
		  op_alt[j].cl =
		   reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2242 2243 2244
		  break;

		default:
2245
		  if (EXTRA_MEMORY_CONSTRAINT (c, p))
2246 2247 2248 2249
		    {
		      op_alt[j].memory_ok = 1;
		      break;
		    }
2250
		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2251 2252
		    {
		      op_alt[j].is_address = 1;
2253
		      op_alt[j].cl
2254
			= (reg_class_subunion
2255
			   [(int) op_alt[j].cl]
2256 2257
			   [(int) base_reg_class (VOIDmode, ADDRESS,
						  SCRATCH)]);
2258 2259 2260
		      break;
		    }

2261
		  op_alt[j].cl
2262
		    = (reg_class_subunion
2263
		       [(int) op_alt[j].cl]
2264
		       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2265 2266
		  break;
		}
2267
	      p += CONSTRAINT_LEN (c, p);
2268 2269 2270 2271
	    }
	}
    }
}
2272

2273
/* Check the operands of an insn against the insn's operand constraints
Richard Kenner committed
2274
   and return 1 if they are valid.
2275 2276
   The information about the insn's operands, constraints, operand modes
   etc. is obtained from the global variables set up by extract_insn.
Richard Kenner committed
2277 2278 2279 2280 2281

   WHICH_ALTERNATIVE is set to a number which indicates which
   alternative of constraints was matched: 0 for the first alternative,
   1 for the next, etc.

2282
   In addition, when two operands are required to match
Richard Kenner committed
2283 2284 2285 2286 2287 2288 2289 2290
   and it happens that the output operand is (reg) while the
   input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
   make the output operand look like the input.
   This is because the output operand is the one the template will print.

   This is used in final, just before printing the assembler code and by
   the routines that determine an insn's attribute.

2291
   If STRICT is a positive nonzero value, it means that we have been
Richard Kenner committed
2292 2293 2294 2295 2296 2297 2298 2299 2300 2301
   called after reload has been completed.  In that case, we must
   do all checks strictly.  If it is zero, it means that we have been called
   before reload has completed.  In that case, we first try to see if we can
   find an alternative that matches strictly.  If not, we try again, this
   time assuming that reload will fix up the insn.  This provides a "best
   guess" for the alternative and is used to compute attributes of insns prior
   to reload.  A negative value of STRICT is used for this internal call.  */

struct funny_match
{
2302
  int this_op, other;
Richard Kenner committed
2303 2304 2305
};

int
2306
constrain_operands (int strict)
Richard Kenner committed
2307
{
2308
  const char *constraints[MAX_RECOG_OPERANDS];
2309 2310
  int matching_operands[MAX_RECOG_OPERANDS];
  int earlyclobber[MAX_RECOG_OPERANDS];
2311
  int c;
Richard Kenner committed
2312 2313 2314 2315

  struct funny_match funny_match[MAX_RECOG_OPERANDS];
  int funny_match_index;

2316
  which_alternative = 0;
2317
  if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
Richard Kenner committed
2318 2319
    return 1;

2320
  for (c = 0; c < recog_data.n_operands; c++)
2321
    {
2322
      constraints[c] = recog_data.constraints[c];
2323 2324
      matching_operands[c] = -1;
    }
Richard Kenner committed
2325

2326
  do
Richard Kenner committed
2327
    {
2328
      int seen_earlyclobber_at = -1;
2329
      int opno;
Richard Kenner committed
2330 2331 2332
      int lose = 0;
      funny_match_index = 0;

2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343
      if (!recog_data.alternative_enabled_p[which_alternative])
	{
	  int i;

	  for (i = 0; i < recog_data.n_operands; i++)
	    constraints[i] = skip_alternative (constraints[i]);

	  which_alternative++;
	  continue;
	}

2344
      for (opno = 0; opno < recog_data.n_operands; opno++)
Richard Kenner committed
2345
	{
2346
	  rtx op = recog_data.operand[opno];
Richard Kenner committed
2347
	  enum machine_mode mode = GET_MODE (op);
2348
	  const char *p = constraints[opno];
Richard Kenner committed
2349 2350 2351
	  int offset = 0;
	  int win = 0;
	  int val;
2352
	  int len;
Richard Kenner committed
2353

2354 2355
	  earlyclobber[opno] = 0;

2356
	  /* A unary operator may be accepted by the predicate, but it
2357
	     is irrelevant for matching constraints.  */
2358
	  if (UNARY_P (op))
2359 2360
	    op = XEXP (op, 0);

Richard Kenner committed
2361 2362
	  if (GET_CODE (op) == SUBREG)
	    {
2363
	      if (REG_P (SUBREG_REG (op))
Richard Kenner committed
2364
		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2365 2366 2367 2368
		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
					      GET_MODE (SUBREG_REG (op)),
					      SUBREG_BYTE (op),
					      GET_MODE (op));
Richard Kenner committed
2369 2370 2371 2372 2373 2374 2375 2376
	      op = SUBREG_REG (op);
	    }

	  /* An empty constraint or empty alternative
	     allows anything which matched the pattern.  */
	  if (*p == 0 || *p == ',')
	    win = 1;

2377 2378
	  do
	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
Richard Kenner committed
2379
	      {
2380 2381 2382 2383 2384 2385 2386
	      case '\0':
		len = 0;
		break;
	      case ',':
		c = '\0';
		break;

Jeff Law committed
2387 2388
	      case '?':  case '!': case '*':  case '%':
	      case '=':  case '+':
Richard Kenner committed
2389 2390
		break;

2391 2392 2393
	      case '#':
		/* Ignore rest of this alternative as far as
		   constraint checking is concerned.  */
2394
		do
2395
		  p++;
2396 2397
		while (*p && *p != ',');
		len = 0;
2398 2399
		break;

2400 2401
	      case '&':
		earlyclobber[opno] = 1;
2402 2403
		if (seen_earlyclobber_at < 0)
		  seen_earlyclobber_at = opno;
2404 2405
		break;

Jeff Law committed
2406 2407
	      case '0':  case '1':  case '2':  case '3':  case '4':
	      case '5':  case '6':  case '7':  case '8':  case '9':
2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420
		{
		  /* This operand must be the same as a previous one.
		     This kind of constraint is used for instructions such
		     as add when they take only two operands.

		     Note that the lower-numbered operand is passed first.

		     If we are not testing strictly, assume that this
		     constraint will be satisfied.  */

		  char *end;
		  int match;

2421
		  match = strtoul (p, &end, 10);
2422 2423 2424 2425 2426 2427 2428 2429 2430 2431 2432
		  p = end;

		  if (strict < 0)
		    val = 1;
		  else
		    {
		      rtx op1 = recog_data.operand[match];
		      rtx op2 = recog_data.operand[opno];

		      /* A unary operator may be accepted by the predicate,
			 but it is irrelevant for matching constraints.  */
2433
		      if (UNARY_P (op1))
2434
			op1 = XEXP (op1, 0);
2435
		      if (UNARY_P (op2))
2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449 2450 2451
			op2 = XEXP (op2, 0);

		      val = operands_match_p (op1, op2);
		    }

		  matching_operands[opno] = match;
		  matching_operands[match] = opno;

		  if (val != 0)
		    win = 1;

		  /* If output is *x and input is *--x, arrange later
		     to change the output to *--x as well, since the
		     output op is the one that will be printed.  */
		  if (val == 2 && strict > 0)
		    {
2452
		      funny_match[funny_match_index].this_op = opno;
2453 2454 2455
		      funny_match[funny_match_index++].other = match;
		    }
		}
2456
		len = 0;
Richard Kenner committed
2457 2458 2459 2460
		break;

	      case 'p':
		/* p is used for address_operands.  When we are called by
Richard Kenner committed
2461 2462 2463
		   gen_reload, no one will have checked that the address is
		   strictly valid, i.e., that all pseudos requiring hard regs
		   have gotten them.  */
Richard Kenner committed
2464
		if (strict <= 0
2465
		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2466
						 op)))
Richard Kenner committed
2467 2468 2469 2470
		  win = 1;
		break;

		/* No need to check general_operand again;
2471 2472 2473
		   it was done in insn-recog.c.  Well, except that reload
		   doesn't check the validity of its replacements, but
		   that should only matter when there's a bug.  */
Richard Kenner committed
2474 2475 2476
	      case 'g':
		/* Anything goes unless it is a REG and really has a hard reg
		   but the hard reg is not in the class GENERAL_REGS.  */
2477 2478 2479 2480 2481 2482 2483 2484 2485 2486
		if (REG_P (op))
		  {
		    if (strict < 0
			|| GENERAL_REGS == ALL_REGS
			|| (reload_in_progress
			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
		      win = 1;
		  }
		else if (strict < 0 || general_operand (op, mode))
Richard Kenner committed
2487 2488 2489 2490
		  win = 1;
		break;

	      case 'X':
Mike Stump committed
2491 2492 2493
		/* This is used for a MATCH_SCRATCH in the cases when
		   we don't actually need anything.  So anything goes
		   any time.  */
Richard Kenner committed
2494 2495 2496
		win = 1;
		break;

2497
	      case TARGET_MEM_CONSTRAINT:
2498 2499
		/* Memory operands must be valid, to the extent
		   required by STRICT.  */
2500
		if (MEM_P (op))
2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514
		  {
		    if (strict > 0
			&& !strict_memory_address_p (GET_MODE (op),
						     XEXP (op, 0)))
		      break;
		    if (strict == 0
			&& !memory_address_p (GET_MODE (op), XEXP (op, 0)))
		      break;
		    win = 1;
		  }
		/* Before reload, accept what reload can turn into mem.  */
		else if (strict < 0 && CONSTANT_P (op))
		  win = 1;
		/* During reload, accept a pseudo  */
2515
		else if (reload_in_progress && REG_P (op)
2516
			 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
Richard Kenner committed
2517 2518 2519 2520
		  win = 1;
		break;

	      case '<':
2521
		if (MEM_P (op)
Richard Kenner committed
2522 2523 2524 2525 2526 2527
		    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
			|| GET_CODE (XEXP (op, 0)) == POST_DEC))
		  win = 1;
		break;

	      case '>':
2528
		if (MEM_P (op)
Richard Kenner committed
2529 2530 2531 2532 2533 2534 2535
		    && (GET_CODE (XEXP (op, 0)) == PRE_INC
			|| GET_CODE (XEXP (op, 0)) == POST_INC))
		  win = 1;
		break;

	      case 'E':
	      case 'F':
2536 2537 2538
		if (GET_CODE (op) == CONST_DOUBLE
		    || (GET_CODE (op) == CONST_VECTOR
			&& GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
Richard Kenner committed
2539 2540 2541 2542 2543 2544
		  win = 1;
		break;

	      case 'G':
	      case 'H':
		if (GET_CODE (op) == CONST_DOUBLE
2545
		    && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
Richard Kenner committed
2546 2547 2548 2549
		  win = 1;
		break;

	      case 's':
Shujing Zhao committed
2550
		if (CONST_INT_P (op)
Richard Kenner committed
2551 2552 2553 2554 2555 2556 2557 2558 2559
		    || (GET_CODE (op) == CONST_DOUBLE
			&& GET_MODE (op) == VOIDmode))
		  break;
	      case 'i':
		if (CONSTANT_P (op))
		  win = 1;
		break;

	      case 'n':
Shujing Zhao committed
2560
		if (CONST_INT_P (op)
Richard Kenner committed
2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573
		    || (GET_CODE (op) == CONST_DOUBLE
			&& GET_MODE (op) == VOIDmode))
		  win = 1;
		break;

	      case 'I':
	      case 'J':
	      case 'K':
	      case 'L':
	      case 'M':
	      case 'N':
	      case 'O':
	      case 'P':
Shujing Zhao committed
2574
		if (CONST_INT_P (op)
2575
		    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
Richard Kenner committed
2576 2577 2578 2579
		  win = 1;
		break;

	      case 'V':
2580
		if (MEM_P (op)
2581 2582
		    && ((strict > 0 && ! offsettable_memref_p (op))
			|| (strict < 0
2583
			    && !(CONSTANT_P (op) || MEM_P (op)))
2584
			|| (reload_in_progress
2585
			    && !(REG_P (op)
2586
				 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
Richard Kenner committed
2587 2588 2589 2590 2591 2592 2593 2594
		  win = 1;
		break;

	      case 'o':
		if ((strict > 0 && offsettable_memref_p (op))
		    || (strict == 0 && offsettable_nonstrict_memref_p (op))
		    /* Before reload, accept what reload can handle.  */
		    || (strict < 0
2595
			&& (CONSTANT_P (op) || MEM_P (op)))
2596
		    /* During reload, accept a pseudo  */
2597
		    || (reload_in_progress && REG_P (op)
2598
			&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
Richard Kenner committed
2599 2600 2601 2602
		  win = 1;
		break;

	      default:
2603
		{
2604
		  enum reg_class cl;
2605

2606
		  cl = (c == 'r'
2607
			   ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2608
		  if (cl != NO_REGS)
2609 2610 2611
		    {
		      if (strict < 0
			  || (strict == 0
2612
			      && REG_P (op)
2613 2614
			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2615
			  || (REG_P (op)
2616
			      && reg_fits_class_p (op, cl, offset, mode)))
2617 2618
		        win = 1;
		    }
2619 2620
#ifdef EXTRA_CONSTRAINT_STR
		  else if (EXTRA_CONSTRAINT_STR (op, c, p))
2621
		    win = 1;
2622

2623 2624
		  else if (EXTRA_MEMORY_CONSTRAINT (c, p)
			   /* Every memory operand can be reloaded to fit.  */
2625
			   && ((strict < 0 && MEM_P (op))
2626 2627 2628 2629
			       /* Before reload, accept what reload can turn
				  into mem.  */
			       || (strict < 0 && CONSTANT_P (op))
			       /* During reload, accept a pseudo  */
2630
			       || (reload_in_progress && REG_P (op)
2631 2632 2633 2634 2635 2636
				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
		    win = 1;
		  else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
			   /* Every address operand can be reloaded to fit.  */
			   && strict < 0)
		    win = 1;
2637 2638 2639
#endif
		  break;
		}
Richard Kenner committed
2640
	      }
2641
	  while (p += len, c);
Richard Kenner committed
2642 2643 2644 2645 2646 2647 2648 2649 2650 2651 2652

	  constraints[opno] = p;
	  /* If this operand did not win somehow,
	     this alternative loses.  */
	  if (! win)
	    lose = 1;
	}
      /* This alternative won; the operands are ok.
	 Change whichever operands this alternative says to change.  */
      if (! lose)
	{
2653 2654 2655 2656 2657
	  int opno, eopno;

	  /* See if any earlyclobber operand conflicts with some other
	     operand.  */

2658 2659 2660 2661
	  if (strict > 0  && seen_earlyclobber_at >= 0)
	    for (eopno = seen_earlyclobber_at;
		 eopno < recog_data.n_operands;
		 eopno++)
2662 2663 2664 2665
	      /* Ignore earlyclobber operands now in memory,
		 because we would often report failure when we have
		 two memory operands, one of which was formerly a REG.  */
	      if (earlyclobber[eopno]
2666
		  && REG_P (recog_data.operand[eopno]))
2667
		for (opno = 0; opno < recog_data.n_operands; opno++)
2668
		  if ((MEM_P (recog_data.operand[opno])
2669
		       || recog_data.operand_type[opno] != OP_OUT)
2670
		      && opno != eopno
Mike Stump committed
2671
		      /* Ignore things like match_operator operands.  */
2672
		      && *recog_data.constraints[opno] != 0
2673
		      && ! (matching_operands[opno] == eopno
2674 2675 2676 2677
			    && operands_match_p (recog_data.operand[opno],
						 recog_data.operand[eopno]))
		      && ! safe_from_earlyclobber (recog_data.operand[opno],
						   recog_data.operand[eopno]))
2678 2679 2680
		    lose = 1;

	  if (! lose)
Richard Kenner committed
2681
	    {
2682 2683
	      while (--funny_match_index >= 0)
		{
2684
		  recog_data.operand[funny_match[funny_match_index].other]
2685
		    = recog_data.operand[funny_match[funny_match_index].this_op];
2686 2687 2688
		}

	      return 1;
Richard Kenner committed
2689 2690 2691 2692 2693
	    }
	}

      which_alternative++;
    }
2694
  while (which_alternative < recog_data.n_alternatives);
Richard Kenner committed
2695

2696
  which_alternative = -1;
Richard Kenner committed
2697 2698 2699
  /* If we are about to reject this, but we are not to test strictly,
     try a very loose test.  Only return failure if it fails also.  */
  if (strict == 0)
2700
    return constrain_operands (-1);
Richard Kenner committed
2701 2702 2703 2704 2705
  else
    return 0;
}

/* Return 1 iff OPERAND (assumed to be a REG rtx)
2706
   is a hard reg in class CLASS when its regno is offset by OFFSET
Richard Kenner committed
2707 2708 2709 2710
   and changed to mode MODE.
   If REG occupies multiple hard regs, all of them must be in CLASS.  */

int
2711
reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2712
		  enum machine_mode mode)
Richard Kenner committed
2713
{
2714
  int regno = REGNO (operand);
2715 2716 2717 2718

  if (cl == NO_REGS)
    return 0;

2719 2720 2721
  return (regno < FIRST_PSEUDO_REGISTER
	  && in_hard_reg_set_p (reg_class_contents[(int) cl],
				mode, regno + offset));
Richard Kenner committed
2722
}
2723

2724 2725 2726 2727
/* Split single instruction.  Helper function for split_all_insns and
   split_all_insns_noflow.  Return last insn in the sequence if successful,
   or NULL if unsuccessful.  */

2728
static rtx
2729
split_insn (rtx insn)
2730
{
2731 2732 2733
  /* Split insns here to get max fine-grain parallelism.  */
  rtx first = PREV_INSN (insn);
  rtx last = try_split (PATTERN (insn), insn, 1);
2734
  rtx insn_set, last_set, note;
2735 2736 2737 2738

  if (last == insn)
    return NULL_RTX;

2739 2740 2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756
  /* If the original instruction was a single set that was known to be
     equivalent to a constant, see if we can say the same about the last
     instruction in the split sequence.  The two instructions must set
     the same destination.  */
  insn_set = single_set (insn);
  if (insn_set)
    {
      last_set = single_set (last);
      if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
	{
	  note = find_reg_equal_equiv_note (insn);
	  if (note && CONSTANT_P (XEXP (note, 0)))
	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
	  else if (CONSTANT_P (SET_SRC (insn_set)))
	    set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
	}
    }

2757
  /* try_split returns the NOTE that INSN became.  */
2758
  SET_INSN_DELETED (insn);
2759

2760 2761 2762 2763 2764 2765
  /* ??? Coddle to md files that generate subregs in post-reload
     splitters instead of computing the proper hard register.  */
  if (reload_completed && first != last)
    {
      first = NEXT_INSN (first);
      for (;;)
2766
	{
2767 2768 2769 2770 2771
	  if (INSN_P (first))
	    cleanup_subreg_operands (first);
	  if (first == last)
	    break;
	  first = NEXT_INSN (first);
2772 2773
	}
    }
2774

2775
  return last;
2776
}
2777

2778
/* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2779 2780

void
2781
split_all_insns (void)
2782
{
2783
  sbitmap blocks;
2784
  bool changed;
2785
  basic_block bb;
2786

2787
  blocks = sbitmap_alloc (last_basic_block);
2788
  sbitmap_zero (blocks);
2789
  changed = false;
2790

2791
  FOR_EACH_BB_REVERSE (bb)
2792
    {
2793
      rtx insn, next;
2794
      bool finish = false;
2795

2796
      rtl_profile_for_bb (bb);
2797
      for (insn = BB_HEAD (bb); !finish ; insn = next)
2798
	{
2799 2800 2801
	  /* Can't use `next_real_insn' because that might go across
	     CODE_LABELS and short-out basic blocks.  */
	  next = NEXT_INSN (insn);
2802
	  finish = (insn == BB_END (bb));
2803
	  if (INSN_P (insn))
2804
	    {
2805 2806 2807 2808
	      rtx set = single_set (insn);

	      /* Don't split no-op move insns.  These should silently
		 disappear later in final.  Splitting such insns would
2809
		 break the code that handles LIBCALL blocks.  */
2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828
	      if (set && set_noop_p (set))
		{
		  /* Nops get in the way while scheduling, so delete them
		     now if register allocation has already been done.  It
		     is too risky to try to do this before register
		     allocation, and there are unlikely to be very many
		     nops then anyways.  */
		  if (reload_completed)
		      delete_insn_and_edges (insn);
		}
	      else
		{
		  rtx last = split_insn (insn);
		  if (last)
		    {
		      /* The split sequence may include barrier, but the
			 BB boundary we are interested in will be set to
			 previous one.  */

2829
		      while (BARRIER_P (last))
2830 2831 2832 2833 2834
			last = PREV_INSN (last);
		      SET_BIT (blocks, bb->index);
		      changed = true;
		    }
		}
2835 2836 2837
	    }
	}
    }
2838

2839
  default_rtl_profile ();
2840
  if (changed)
2841
    find_many_sub_basic_blocks (blocks);
2842

2843 2844 2845
#ifdef ENABLE_CHECKING
  verify_flow_info ();
#endif
2846 2847

  sbitmap_free (blocks);
2848
}
2849

2850
/* Same as split_all_insns, but do not expect CFG to be available.
2851
   Used by machine dependent reorg passes.  */
2852

2853
unsigned int
2854
split_all_insns_noflow (void)
2855 2856 2857 2858 2859 2860
{
  rtx next, insn;

  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);
2861 2862 2863 2864
      if (INSN_P (insn))
	{
	  /* Don't split no-op move insns.  These should silently
	     disappear later in final.  Splitting such insns would
2865
	     break the code that handles LIBCALL blocks.  */
2866 2867 2868 2869 2870 2871 2872 2873 2874 2875 2876 2877 2878 2879 2880 2881
	  rtx set = single_set (insn);
	  if (set && set_noop_p (set))
	    {
	      /* Nops get in the way while scheduling, so delete them
		 now if register allocation has already been done.  It
		 is too risky to try to do this before register
		 allocation, and there are unlikely to be very many
		 nops then anyways.

		 ??? Should we use delete_insn when the CFG isn't valid?  */
	      if (reload_completed)
		delete_insn_and_edges (insn);
	    }
	  else
	    split_insn (insn);
	}
2882
    }
2883
  return 0;
2884
}
2885 2886

#ifdef HAVE_peephole2
2887 2888 2889 2890 2891 2892 2893 2894
struct peep2_insn_data
{
  rtx insn;
  regset live_before;
};

static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
static int peep2_current;
2895 2896
/* The number of instructions available to match a peep2.  */
int peep2_current_count;
2897 2898 2899

/* A non-insn marker indicating the last insn of the block.
   The live_before regset for this element is correct, indicating
2900
   DF_LIVE_OUT for the block.  */
2901 2902 2903 2904 2905
#define PEEP2_EOB	pc_rtx

/* Return the Nth non-note insn after `current', or return NULL_RTX if it
   does not exist.  Used by the recognizer to find the next insn to match
   in a multi-insn pattern.  */
2906

2907
rtx
2908
peep2_next_insn (int n)
2909
{
2910
  gcc_assert (n <= peep2_current_count);
2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922

  n += peep2_current;
  if (n >= MAX_INSNS_PER_PEEP2 + 1)
    n -= MAX_INSNS_PER_PEEP2 + 1;

  return peep2_insn_data[n].insn;
}

/* Return true if REGNO is dead before the Nth non-note insn
   after `current'.  */

int
2923
peep2_regno_dead_p (int ofs, int regno)
2924
{
2925
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2926 2927 2928 2929 2930

  ofs += peep2_current;
  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
    ofs -= MAX_INSNS_PER_PEEP2 + 1;

2931
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2932 2933 2934 2935 2936 2937 2938

  return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
}

/* Similarly for a REG.  */

int
2939
peep2_reg_dead_p (int ofs, rtx reg)
2940 2941 2942
{
  int regno, n;

2943
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2944 2945 2946 2947 2948

  ofs += peep2_current;
  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
    ofs -= MAX_INSNS_PER_PEEP2 + 1;

2949
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2950 2951

  regno = REGNO (reg);
2952
  n = hard_regno_nregs[regno][GET_MODE (reg)];
2953 2954 2955 2956 2957 2958 2959 2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970
  while (--n >= 0)
    if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
      return 0;
  return 1;
}

/* Try to find a hard register of mode MODE, matching the register class in
   CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
   remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
   in which case the only condition is that the register must be available
   before CURRENT_INSN.
   Registers that already have bits set in REG_SET will not be considered.

   If an appropriate register is available, it will be returned and the
   corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
   returned.  */

rtx
2971 2972
peep2_find_free_register (int from, int to, const char *class_str,
			  enum machine_mode mode, HARD_REG_SET *reg_set)
2973 2974
{
  static int search_ofs;
2975
  enum reg_class cl;
2976 2977 2978
  HARD_REG_SET live;
  int i;

2979 2980
  gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
  gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2981 2982 2983 2984 2985 2986 2987 2988

  from += peep2_current;
  if (from >= MAX_INSNS_PER_PEEP2 + 1)
    from -= MAX_INSNS_PER_PEEP2 + 1;
  to += peep2_current;
  if (to >= MAX_INSNS_PER_PEEP2 + 1)
    to -= MAX_INSNS_PER_PEEP2 + 1;

2989
  gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2990 2991 2992
  REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);

  while (from != to)
2993
    {
2994 2995 2996 2997
      HARD_REG_SET this_live;

      if (++from >= MAX_INSNS_PER_PEEP2 + 1)
	from = 0;
2998
      gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2999 3000 3001 3002
      REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
      IOR_HARD_REG_SET (live, this_live);
    }

3003
  cl = (class_str[0] == 'r' ? GENERAL_REGS
3004
	   : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021 3022

  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    {
      int raw_regno, regno, success, j;

      /* Distribute the free registers as much as possible.  */
      raw_regno = search_ofs + i;
      if (raw_regno >= FIRST_PSEUDO_REGISTER)
	raw_regno -= FIRST_PSEUDO_REGISTER;
#ifdef REG_ALLOC_ORDER
      regno = reg_alloc_order[raw_regno];
#else
      regno = raw_regno;
#endif

      /* Don't allocate fixed registers.  */
      if (fixed_regs[regno])
	continue;
3023 3024 3025
      /* Don't allocate global registers.  */
      if (global_regs[regno])
	continue;
3026
      /* Make sure the register is of the right class.  */
3027
      if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3028 3029 3030 3031 3032
	continue;
      /* And can support the mode we need.  */
      if (! HARD_REGNO_MODE_OK (regno, mode))
	continue;
      /* And that we don't create an extra save/restore.  */
3033
      if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3034
	continue;
3035 3036 3037
      if (! targetm.hard_regno_scratch_ok (regno))
	continue;

3038 3039 3040 3041 3042 3043
      /* And we don't clobber traceback for noreturn functions.  */
      if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
	  && (! reload_completed || frame_pointer_needed))
	continue;

      success = 1;
3044
      for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3045 3046 3047 3048 3049 3050 3051 3052 3053
	{
	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
	      || TEST_HARD_REG_BIT (live, regno + j))
	    {
	      success = 0;
	      break;
	    }
	}
      if (success)
3054
	{
3055
	  add_to_hard_reg_set (reg_set, mode, regno);
3056

3057 3058 3059 3060
	  /* Start the next search with the next register.  */
	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
	    raw_regno = 0;
	  search_ofs = raw_regno;
3061

3062
	  return gen_rtx_REG (mode, regno);
3063
	}
3064 3065
    }

3066 3067
  search_ofs = 0;
  return NULL_RTX;
3068 3069
}

3070 3071 3072 3073 3074 3075 3076 3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089
/* Forget all currently tracked instructions, only remember current
   LIVE regset.  */

static void
peep2_reinit_state (regset live)
{
  int i;

  /* Indicate that all slots except the last holds invalid data.  */
  for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
    peep2_insn_data[i].insn = NULL_RTX;
  peep2_current_count = 0;

  /* Indicate that the last slot contains live_after data.  */
  peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
  peep2_current = MAX_INSNS_PER_PEEP2;

  COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
}

3090
/* Perform the peephole2 optimization pass.  */
3091

3092
static void
3093
peephole2_optimize (void)
3094
{
3095
  rtx insn, prev;
3096
  bitmap live;
3097 3098
  int i;
  basic_block bb;
3099
  bool do_cleanup_cfg = false;
3100
  bool do_rebuild_jump_labels = false;
3101

3102 3103 3104
  df_set_flags (DF_LR_RUN_DCE);
  df_analyze ();

3105 3106
  /* Initialize the regsets we're going to use.  */
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3107 3108
    peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
  live = BITMAP_ALLOC (&reg_obstack);
3109

3110
  FOR_EACH_BB_REVERSE (bb)
3111
    {
3112
      rtl_profile_for_bb (bb);
3113

3114
      /* Start up propagation.  */
3115
      bitmap_copy (live, DF_LR_OUT (bb));
3116
      df_simulate_initialize_backwards (bb, live);
3117
      peep2_reinit_state (live);
3118

3119
      for (insn = BB_END (bb); ; insn = prev)
3120 3121
	{
	  prev = PREV_INSN (insn);
3122
	  if (INSN_P (insn))
3123
	    {
3124
	      rtx attempt, before_try, x;
3125
	      int match_len;
3126
	      rtx note;
3127
	      bool was_call = false;
3128 3129 3130 3131

	      /* Record this insn.  */
	      if (--peep2_current < 0)
		peep2_current = MAX_INSNS_PER_PEEP2;
3132 3133
	      if (peep2_current_count < MAX_INSNS_PER_PEEP2
		  && peep2_insn_data[peep2_current].insn == NULL_RTX)
3134
		peep2_current_count++;
3135
	      peep2_insn_data[peep2_current].insn = insn;
3136
	      df_simulate_one_insn_backwards (bb, insn, live);
3137 3138
	      COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);

3139 3140 3141 3142 3143
	      if (RTX_FRAME_RELATED_P (insn))
		{
		  /* If an insn has RTX_FRAME_RELATED_P set, peephole
		     substitution would lose the
		     REG_FRAME_RELATED_EXPR that is attached.  */
3144
		  peep2_reinit_state (live);
3145
		  attempt = NULL;
3146 3147 3148
		}
	      else
		/* Match the peephole.  */
3149
		attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3150

3151
	      if (attempt != NULL)
3152
		{
3153 3154 3155 3156 3157
		  /* If we are splitting a CALL_INSN, look for the CALL_INSN
		     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
		     cfg-related call notes.  */
		  for (i = 0; i <= match_len; ++i)
		    {
3158
		      int j;
3159 3160 3161 3162 3163 3164
		      rtx old_insn, new_insn, note;

		      j = i + peep2_current;
		      if (j >= MAX_INSNS_PER_PEEP2 + 1)
			j -= MAX_INSNS_PER_PEEP2 + 1;
		      old_insn = peep2_insn_data[j].insn;
3165
		      if (!CALL_P (old_insn))
3166
			continue;
3167
		      was_call = true;
3168

3169
		      new_insn = attempt;
3170 3171
		      while (new_insn != NULL_RTX)
			{
3172
			  if (CALL_P (new_insn))
3173 3174 3175 3176
			    break;
			  new_insn = NEXT_INSN (new_insn);
			}

3177
		      gcc_assert (new_insn != NULL_RTX);
3178 3179 3180 3181 3182 3183 3184 3185 3186 3187 3188

		      CALL_INSN_FUNCTION_USAGE (new_insn)
			= CALL_INSN_FUNCTION_USAGE (old_insn);

		      for (note = REG_NOTES (old_insn);
			   note;
			   note = XEXP (note, 1))
			switch (REG_NOTE_KIND (note))
			  {
			  case REG_NORETURN:
			  case REG_SETJMP:
3189 3190 3191
			    add_reg_note (new_insn, REG_NOTE_KIND (note),
					  XEXP (note, 0));
			    break;
3192
			  default:
3193
			    /* Discard all other reg notes.  */
3194 3195 3196 3197 3198 3199 3200 3201 3202 3203
			    break;
			  }

		      /* Croak if there is another call in the sequence.  */
		      while (++i <= match_len)
			{
			  j = i + peep2_current;
			  if (j >= MAX_INSNS_PER_PEEP2 + 1)
			    j -= MAX_INSNS_PER_PEEP2 + 1;
			  old_insn = peep2_insn_data[j].insn;
3204
			  gcc_assert (!CALL_P (old_insn));
3205 3206 3207 3208
			}
		      break;
		    }

3209 3210 3211 3212
		  i = match_len + peep2_current;
		  if (i >= MAX_INSNS_PER_PEEP2 + 1)
		    i -= MAX_INSNS_PER_PEEP2 + 1;

3213
		  note = find_reg_note (peep2_insn_data[i].insn,
3214 3215
					REG_EH_REGION, NULL_RTX);

3216
		  /* Replace the old sequence with the new.  */
3217 3218 3219
		  attempt = emit_insn_after_setloc (attempt,
						    peep2_insn_data[i].insn,
				       INSN_LOCATOR (peep2_insn_data[i].insn));
3220
		  before_try = PREV_INSN (insn);
3221
		  delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3222

3223
		  /* Re-insert the EH_REGION notes.  */
3224
		  if (note || (was_call && nonlocal_goto_handler_labels))
3225
		    {
3226
		      edge eh_edge;
3227
		      edge_iterator ei;
3228

3229
		      FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3230
			if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3231 3232
			  break;

3233
		      for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3234
			if (CALL_P (x)
3235
			    || (flag_non_call_exceptions
3236 3237
				&& may_trap_p (PATTERN (x))
				&& !find_reg_note (x, REG_EH_REGION, NULL)))
3238
			  {
3239
			    if (note)
3240
			      add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
3241

3242
			    if (x != BB_END (bb) && eh_edge)
3243
			      {
3244 3245 3246 3247
				edge nfte, nehe;
				int flags;

				nfte = split_block (bb, x);
3248 3249
				flags = (eh_edge->flags
					 & (EDGE_EH | EDGE_ABNORMAL));
3250
				if (CALL_P (x))
3251 3252 3253 3254
				  flags |= EDGE_ABNORMAL_CALL;
				nehe = make_edge (nfte->src, eh_edge->dest,
						  flags);

3255 3256 3257 3258 3259 3260
				nehe->probability = eh_edge->probability;
				nfte->probability
				  = REG_BR_PROB_BASE - nehe->probability;

			        do_cleanup_cfg |= purge_dead_edges (nfte->dest);
				bb = nfte->src;
3261
				eh_edge = nehe;
3262 3263 3264 3265 3266 3267
			      }
			  }

		      /* Converting possibly trapping insn to non-trapping is
			 possible.  Zap dummy outgoing edges.  */
		      do_cleanup_cfg |= purge_dead_edges (bb);
3268 3269
		    }

3270 3271 3272 3273
#ifdef HAVE_conditional_execution
		  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
		    peep2_insn_data[i].insn = NULL_RTX;
		  peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3274
		  peep2_current_count = 0;
3275 3276 3277 3278 3279
#else
		  /* Back up lifetime information past the end of the
		     newly created sequence.  */
		  if (++i >= MAX_INSNS_PER_PEEP2 + 1)
		    i = 0;
3280
		  bitmap_copy (live, peep2_insn_data[i].live_before);
3281 3282

		  /* Update life information for the new sequence.  */
3283
		  x = attempt;
3284 3285
		  do
		    {
3286
		      if (INSN_P (x))
3287 3288 3289
			{
			  if (--i < 0)
			    i = MAX_INSNS_PER_PEEP2;
3290 3291
			  if (peep2_current_count < MAX_INSNS_PER_PEEP2
			      && peep2_insn_data[i].insn == NULL_RTX)
3292
			    peep2_current_count++;
3293
			  peep2_insn_data[i].insn = x;
3294
			  df_insn_rescan (x);
3295
			  df_simulate_one_insn_backwards (bb, x, live);
3296
			  bitmap_copy (peep2_insn_data[i].live_before, live);
3297
			}
3298
		      x = PREV_INSN (x);
3299
		    }
3300
		  while (x != prev);
3301 3302 3303

		  peep2_current = i;
#endif
3304 3305 3306

		  /* If we generated a jump instruction, it won't have
		     JUMP_LABEL set.  Recompute after we're done.  */
3307
		  for (x = attempt; x != before_try; x = PREV_INSN (x))
3308
		    if (JUMP_P (x))
3309 3310 3311 3312
		      {
		        do_rebuild_jump_labels = true;
			break;
		      }
3313
		}
3314
	    }
3315

3316
	  if (insn == BB_HEAD (bb))
3317
	    break;
3318 3319 3320
	}
    }

3321
  default_rtl_profile ();
3322
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3323 3324
    BITMAP_FREE (peep2_insn_data[i].live_before);
  BITMAP_FREE (live);
3325 3326
  if (do_rebuild_jump_labels)
    rebuild_jump_labels (get_insns ());
3327 3328
}
#endif /* HAVE_peephole2 */
3329 3330 3331 3332

/* Common predicates for use with define_bypass.  */

/* True if the dependency between OUT_INSN and IN_INSN is on the store
3333 3334
   data not the address operand(s) of the store.  IN_INSN and OUT_INSN
   must be either a single_set or a PARALLEL with SETs inside.  */
3335 3336

int
3337
store_data_bypass_p (rtx out_insn, rtx in_insn)
3338 3339
{
  rtx out_set, in_set;
3340 3341 3342
  rtx out_pat, in_pat;
  rtx out_exp, in_exp;
  int i, j;
3343 3344

  in_set = single_set (in_insn);
3345
  if (in_set)
3346
    {
3347
      if (!MEM_P (SET_DEST (in_set)))
3348
	return false;
3349 3350 3351 3352 3353 3354 3355 3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375

      out_set = single_set (out_insn);
      if (out_set)
        {
          if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
            return false;
        }
      else
        {
          out_pat = PATTERN (out_insn);

	  if (GET_CODE (out_pat) != PARALLEL)
	    return false;

          for (i = 0; i < XVECLEN (out_pat, 0); i++)
          {
            out_exp = XVECEXP (out_pat, 0, i);

            if (GET_CODE (out_exp) == CLOBBER)
              continue;

            gcc_assert (GET_CODE (out_exp) == SET);

            if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
              return false;
          }
      }
3376 3377 3378
    }
  else
    {
3379 3380
      in_pat = PATTERN (in_insn);
      gcc_assert (GET_CODE (in_pat) == PARALLEL);
3381

3382
      for (i = 0; i < XVECLEN (in_pat, 0); i++)
3383
	{
3384
	  in_exp = XVECEXP (in_pat, 0, i);
3385

3386
	  if (GET_CODE (in_exp) == CLOBBER)
3387 3388
	    continue;

3389
	  gcc_assert (GET_CODE (in_exp) == SET);
3390

3391
	  if (!MEM_P (SET_DEST (in_exp)))
3392
	    return false;
3393 3394 3395 3396 3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417 3418

          out_set = single_set (out_insn);
          if (out_set)
            {
              if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
                return false;
            }
          else
            {
              out_pat = PATTERN (out_insn);
              gcc_assert (GET_CODE (out_pat) == PARALLEL);

              for (j = 0; j < XVECLEN (out_pat, 0); j++)
                {
                  out_exp = XVECEXP (out_pat, 0, j);

                  if (GET_CODE (out_exp) == CLOBBER)
                    continue;

                  gcc_assert (GET_CODE (out_exp) == SET);

                  if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
                    return false;
                }
            }
        }
3419
    }
3420 3421 3422 3423

  return true;
}

3424 3425 3426 3427
/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
   condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
   or multiple set; IN_INSN should be single_set for truth, but for convenience
   of insn categorization may be any JUMP or CALL insn.  */
3428 3429

int
3430
if_test_bypass_p (rtx out_insn, rtx in_insn)
3431 3432 3433 3434 3435
{
  rtx out_set, in_set;

  in_set = single_set (in_insn);
  if (! in_set)
3436
    {
3437 3438
      gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
      return false;
3439
    }
3440 3441 3442

  if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
    return false;
3443
  in_set = SET_SRC (in_set);
3444

3445 3446 3447 3448 3449
  out_set = single_set (out_insn);
  if (out_set)
    {
      if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3450
	return false;
3451 3452 3453 3454 3455 3456 3457
    }
  else
    {
      rtx out_pat;
      int i;

      out_pat = PATTERN (out_insn);
3458
      gcc_assert (GET_CODE (out_pat) == PARALLEL);
3459 3460 3461 3462 3463 3464 3465 3466

      for (i = 0; i < XVECLEN (out_pat, 0); i++)
	{
	  rtx exp = XVECEXP (out_pat, 0, i);

	  if (GET_CODE (exp) == CLOBBER)
	    continue;

3467
	  gcc_assert (GET_CODE (exp) == SET);
3468 3469 3470 3471 3472 3473

	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
	    return false;
	}
    }
3474 3475 3476

  return true;
}
3477 3478 3479 3480 3481 3482 3483

static bool
gate_handle_peephole2 (void)
{
  return (optimize > 0 && flag_peephole2);
}

3484
static unsigned int
3485 3486 3487
rest_of_handle_peephole2 (void)
{
#ifdef HAVE_peephole2
3488
  peephole2_optimize ();
3489
#endif
3490
  return 0;
3491 3492
}

3493
struct rtl_opt_pass pass_peephole2 =
3494
{
3495 3496
 {
  RTL_PASS,
3497 3498 3499 3500 3501 3502 3503 3504 3505 3506 3507
  "peephole2",                          /* name */
  gate_handle_peephole2,                /* gate */
  rest_of_handle_peephole2,             /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_PEEPHOLE2,                         /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3508
  TODO_df_finish | TODO_verify_rtl_sharing |
3509 3510
  TODO_dump_func                       /* todo_flags_finish */
 }
3511 3512
};

3513
static unsigned int
3514 3515
rest_of_handle_split_all_insns (void)
{
3516
  split_all_insns ();
3517
  return 0;
3518 3519
}

3520
struct rtl_opt_pass pass_split_all_insns =
3521
{
3522 3523
 {
  RTL_PASS,
3524
  "split1",                             /* name */
3525 3526 3527 3528 3529
  NULL,                                 /* gate */
  rest_of_handle_split_all_insns,       /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3530
  TV_NONE,                              /* tv_id */
3531 3532 3533 3534
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3535 3536
  TODO_dump_func                        /* todo_flags_finish */
 }
3537 3538
};

3539 3540
static unsigned int
rest_of_handle_split_after_reload (void)
3541
{
3542 3543 3544 3545 3546
  /* If optimizing, then go ahead and split insns now.  */
#ifndef STACK_REGS
  if (optimize > 0)
#endif
    split_all_insns ();
3547 3548 3549
  return 0;
}

3550
struct rtl_opt_pass pass_split_after_reload =
3551
{
3552 3553
 {
  RTL_PASS,
3554 3555 3556
  "split2",                             /* name */
  NULL,                                 /* gate */
  rest_of_handle_split_after_reload,    /* execute */
3557 3558 3559
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3560
  TV_NONE,                              /* tv_id */
3561 3562 3563 3564
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3565 3566
  TODO_dump_func                        /* todo_flags_finish */
 }
3567 3568 3569 3570 3571 3572 3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585 3586
};

static bool
gate_handle_split_before_regstack (void)
{
#if defined (HAVE_ATTR_length) && defined (STACK_REGS)
  /* If flow2 creates new instructions which need splitting
     and scheduling after reload is not done, they might not be
     split until final which doesn't allow splitting
     if HAVE_ATTR_length.  */
# ifdef INSN_SCHEDULING
  return (optimize && !flag_schedule_insns_after_reload);
# else
  return (optimize);
# endif
#else
  return 0;
#endif
}

3587 3588 3589 3590 3591 3592 3593
static unsigned int
rest_of_handle_split_before_regstack (void)
{
  split_all_insns ();
  return 0;
}

3594
struct rtl_opt_pass pass_split_before_regstack =
3595
{
3596 3597
 {
  RTL_PASS,
3598
  "split3",                             /* name */
3599
  gate_handle_split_before_regstack,    /* gate */
3600 3601 3602 3603
  rest_of_handle_split_before_regstack, /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3604
  TV_NONE,                              /* tv_id */
3605 3606 3607 3608
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3609 3610
  TODO_dump_func                        /* todo_flags_finish */
 }
3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631
};

static bool
gate_handle_split_before_sched2 (void)
{
#ifdef INSN_SCHEDULING
  return optimize > 0 && flag_schedule_insns_after_reload;
#else
  return 0;
#endif
}

static unsigned int
rest_of_handle_split_before_sched2 (void)
{
#ifdef INSN_SCHEDULING
  split_all_insns ();
#endif
  return 0;
}

3632
struct rtl_opt_pass pass_split_before_sched2 =
3633
{
3634 3635
 {
  RTL_PASS,
3636 3637 3638 3639 3640 3641
  "split4",                             /* name */
  gate_handle_split_before_sched2,      /* gate */
  rest_of_handle_split_before_sched2,   /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3642
  TV_NONE,                              /* tv_id */
3643 3644 3645 3646 3647
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
  TODO_verify_flow |
3648 3649
  TODO_dump_func                        /* todo_flags_finish */
 }
3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663
};

/* The placement of the splitting that we do for shorten_branches
   depends on whether regstack is used by the target or not.  */
static bool
gate_do_final_split (void)
{
#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
  return 1;
#else
  return 0;
#endif 
}

3664
struct rtl_opt_pass pass_split_for_shorten_branches =
3665
{
3666 3667
 {
  RTL_PASS,
3668 3669 3670
  "split5",                             /* name */
  gate_do_final_split,                  /* gate */
  split_all_insns_noflow,               /* execute */
3671 3672 3673
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3674
  TV_NONE,                              /* tv_id */
3675 3676 3677 3678
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3679 3680
  TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
 }
3681
};