recog.c 98.5 KB
Newer Older
Richard Kenner committed
1
/* Subroutines used by or related to instruction recognition.
Jeff Law committed
2
   Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4
   Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
21 22 23


#include "config.h"
24
#include "system.h"
25 26
#include "coretypes.h"
#include "tm.h"
27
#include "rtl.h"
28
#include "tm_p.h"
Richard Kenner committed
29 30
#include "insn-config.h"
#include "insn-attr.h"
31
#include "hard-reg-set.h"
Richard Kenner committed
32 33
#include "recog.h"
#include "regs.h"
34
#include "addresses.h"
Richard Kenner committed
35
#include "expr.h"
36
#include "function.h"
Richard Kenner committed
37 38
#include "flags.h"
#include "real.h"
Kaveh R. Ghazi committed
39
#include "toplev.h"
40
#include "basic-block.h"
41
#include "output.h"
42
#include "reload.h"
43
#include "target.h"
44 45
#include "timevar.h"
#include "tree-pass.h"
46
#include "df.h"
Richard Kenner committed
47 48 49 50 51 52 53 54 55

#ifndef STACK_PUSH_CODE
#ifdef STACK_GROWS_DOWNWARD
#define STACK_PUSH_CODE PRE_DEC
#else
#define STACK_PUSH_CODE PRE_INC
#endif
#endif

56 57 58 59 60 61 62 63
#ifndef STACK_POP_CODE
#ifdef STACK_GROWS_DOWNWARD
#define STACK_POP_CODE POST_INC
#else
#define STACK_POP_CODE POST_DEC
#endif
#endif

64 65 66 67 68 69 70 71
#ifndef HAVE_ATTR_enabled
static inline bool
get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
{
  return true;
}
#endif

72
static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 74
static void validate_replace_src_1 (rtx *, void *);
static rtx split_insn (rtx);
Richard Kenner committed
75 76 77 78 79

/* Nonzero means allow operands to be volatile.
   This should be 0 if you are generating rtl, such as if you are calling
   the functions in optabs.c and expmed.c (most of the time).
   This should be 1 if all valid insns need to be recognized,
80
   such as in reginfo.c and final.c and reload.c.
Richard Kenner committed
81 82 83 84 85

   init_recog and init_recog_no_volatile are responsible for setting this.  */

int volatile_ok;

86
struct recog_data recog_data;
87

88 89 90 91
/* Contains a vector of operand_alternative structures for every operand.
   Set up by preprocess_constraints.  */
struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];

Richard Kenner committed
92 93 94 95 96 97 98 99 100 101 102
/* On return from `constrain_operands', indicate which alternative
   was satisfied.  */

int which_alternative;

/* Nonzero after end of reload pass.
   Set to 1 or 0 by toplev.c.
   Controls the significance of (SUBREG (MEM)).  */

int reload_completed;

Stephen Clarke committed
103 104 105
/* Nonzero after thread_prologue_and_epilogue_insns has run.  */
int epilogue_completed;

Richard Kenner committed
106 107 108 109 110
/* Initialize data used by the function `recog'.
   This must be called once in the compilation of a function
   before any insn recognition may be done in the function.  */

void
111
init_recog_no_volatile (void)
Richard Kenner committed
112 113 114 115
{
  volatile_ok = 0;
}

116
void
117
init_recog (void)
Richard Kenner committed
118 119 120 121 122 123 124 125 126
{
  volatile_ok = 1;
}


/* Check that X is an insn-body for an `asm' with operands
   and that the operands mentioned in it are legitimate.  */

int
127
check_asm_operands (rtx x)
Richard Kenner committed
128
{
129
  int noperands;
Richard Kenner committed
130
  rtx *operands;
131
  const char **constraints;
Richard Kenner committed
132 133
  int i;

134 135 136 137 138 139 140 141 142 143
  /* Post-reload, be more strict with things.  */
  if (reload_completed)
    {
      /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
      extract_insn (make_insn_raw (x));
      constrain_operands (1);
      return which_alternative >= 0;
    }

  noperands = asm_noperands (x);
Richard Kenner committed
144 145 146 147 148
  if (noperands < 0)
    return 0;
  if (noperands == 0)
    return 1;

149 150
  operands = XALLOCAVEC (rtx, noperands);
  constraints = XALLOCAVEC (const char *, noperands);
151

152
  decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
Richard Kenner committed
153 154

  for (i = 0; i < noperands; i++)
155
    {
156
      const char *c = constraints[i];
157 158
      if (c[0] == '%')
	c++;
159
      if (! asm_operand_ok (operands[i], c, constraints))
160
	return 0;
161
    }
Richard Kenner committed
162 163 164 165

  return 1;
}

166
/* Static data for the next two routines.  */
Richard Kenner committed
167

168 169 170 171 172 173
typedef struct change_t
{
  rtx object;
  int old_code;
  rtx *loc;
  rtx old;
174
  bool unshare;
175
} change_t;
Richard Kenner committed
176

177 178
static change_t *changes;
static int changes_allocated;
Richard Kenner committed
179 180 181

static int num_changes = 0;

182
/* Validate a proposed change to OBJECT.  LOC is the location in the rtl
183
   at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
Richard Kenner committed
184 185 186 187 188 189 190
   the change is simply made.

   Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
   will be called with the address and mode as parameters.  If OBJECT is
   an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
   the change in place.

191
   IN_GROUP is nonzero if this is part of a group of changes that must be
Richard Kenner committed
192 193 194 195 196 197 198 199
   performed as a group.  In that case, the changes will be stored.  The
   function `apply_change_group' will validate and apply the changes.

   If IN_GROUP is zero, this is a single change.  Try to recognize the insn
   or validate the memory reference with the change applied.  If the result
   is not valid for the machine, suppress the change and return zero.
   Otherwise, perform the change and return 1.  */

200
static bool
201
validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
Richard Kenner committed
202 203 204
{
  rtx old = *loc;

205
  if (old == new_rtx || rtx_equal_p (old, new_rtx))
Richard Kenner committed
206 207
    return 1;

208
  gcc_assert (in_group != 0 || num_changes == 0);
Richard Kenner committed
209

210
  *loc = new_rtx;
Richard Kenner committed
211 212

  /* Save the information describing this change.  */
213 214 215 216 217 218 219 220 221
  if (num_changes >= changes_allocated)
    {
      if (changes_allocated == 0)
	/* This value allows for repeated substitutions inside complex
	   indexed addresses, or changes in up to 5 insns.  */
	changes_allocated = MAX_RECOG_OPERANDS * 5;
      else
	changes_allocated *= 2;

222
      changes = XRESIZEVEC (change_t, changes, changes_allocated);
223
    }
224

225 226 227
  changes[num_changes].object = object;
  changes[num_changes].loc = loc;
  changes[num_changes].old = old;
228
  changes[num_changes].unshare = unshare;
Richard Kenner committed
229

230
  if (object && !MEM_P (object))
Richard Kenner committed
231 232 233
    {
      /* Set INSN_CODE to force rerecognition of insn.  Save old code in
	 case invalid.  */
234
      changes[num_changes].old_code = INSN_CODE (object);
Richard Kenner committed
235 236 237 238 239 240 241 242 243 244 245 246 247 248
      INSN_CODE (object) = -1;
    }

  num_changes++;

  /* If we are making a group of changes, return 1.  Otherwise, validate the
     change group we made.  */

  if (in_group)
    return 1;
  else
    return apply_change_group ();
}

249 250 251 252
/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
   UNSHARE to false.  */

bool
253
validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
254
{
255
  return validate_change_1 (object, loc, new_rtx, in_group, false);
256 257 258 259 260 261
}

/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
   UNSHARE to true.  */

bool
262
validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
263
{
264
  return validate_change_1 (object, loc, new_rtx, in_group, true);
265 266 267
}


Paolo Bonzini committed
268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288
/* Keep X canonicalized if some changes have made it non-canonical; only
   modifies the operands of X, not (for example) its code.  Simplifications
   are not the job of this routine.

   Return true if anything was changed.  */
bool
canonicalize_change_group (rtx insn, rtx x)
{
  if (COMMUTATIVE_P (x)
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
    {
      /* Oops, the caller has made X no longer canonical.
	 Let's redo the changes in the correct order.  */
      rtx tem = XEXP (x, 0);
      validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
      validate_change (insn, &XEXP (x, 1), tem, 1);
      return true;
    }
  else
    return false;
}
H.J. Lu committed
289

290

291 292 293
/* This subroutine of apply_change_group verifies whether the changes to INSN
   were valid; i.e. whether INSN can still be recognized.  */

294
int
295
insn_invalid_p (rtx insn)
296
{
297 298 299 300 301 302 303
  rtx pat = PATTERN (insn);
  int num_clobbers = 0;
  /* If we are before reload and the pattern is a SET, see if we can add
     clobbers.  */
  int icode = recog (pat, insn,
		     (GET_CODE (pat) == SET
		      && ! reload_completed && ! reload_in_progress)
304
		     ? &num_clobbers : 0);
305 306
  int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;

307

308 309 310 311
  /* If this is an asm and the operand aren't legal, then fail.  Likewise if
     this is not an asm and the insn wasn't recognized.  */
  if ((is_asm && ! check_asm_operands (PATTERN (insn)))
      || (!is_asm && icode < 0))
312 313
    return 1;

314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329
  /* If we have to add CLOBBERs, fail if we have to add ones that reference
     hard registers since our callers can't know if they are live or not.
     Otherwise, add them.  */
  if (num_clobbers > 0)
    {
      rtx newpat;

      if (added_clobbers_hard_reg_p (icode))
	return 1;

      newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
      XVECEXP (newpat, 0, 0) = pat;
      add_clobbers (newpat, icode);
      PATTERN (insn) = pat = newpat;
    }

330 331 332
  /* After reload, verify that all constraints are satisfied.  */
  if (reload_completed)
    {
333
      extract_insn (insn);
334

335
      if (! constrain_operands (1))
336 337 338
	return 1;
    }

339
  INSN_CODE (insn) = icode;
340 341 342
  return 0;
}

343 344
/* Return number of changes made and not validated yet.  */
int
345
num_changes_pending (void)
346 347 348 349
{
  return num_changes;
}

350
/* Tentatively apply the changes numbered NUM and up.
Richard Kenner committed
351 352
   Return 1 if all changes are valid, zero otherwise.  */

353
int
354
verify_changes (int num)
Richard Kenner committed
355 356
{
  int i;
357
  rtx last_validated = NULL_RTX;
Richard Kenner committed
358 359 360 361 362 363 364 365

  /* The changes have been applied and all INSN_CODEs have been reset to force
     rerecognition.

     The changes are valid if we aren't given an object, or if we are
     given a MEM and it still is a valid address, or if this is in insn
     and it is recognized.  In the latter case, if reload has completed,
     we also require that the operands meet the constraints for
366
     the insn.  */
Richard Kenner committed
367

368
  for (i = num; i < num_changes; i++)
Richard Kenner committed
369
    {
370
      rtx object = changes[i].object;
Richard Kenner committed
371

372
      /* If there is no object to test or if it is the same as the one we
373 374
         already tested, ignore it.  */
      if (object == 0 || object == last_validated)
Richard Kenner committed
375 376
	continue;

377
      if (MEM_P (object))
Richard Kenner committed
378
	{
379 380 381
	  if (! memory_address_addr_space_p (GET_MODE (object),
					     XEXP (object, 0),
					     MEM_ADDR_SPACE (object)))
Richard Kenner committed
382 383
	    break;
	}
384 385 386 387 388 389 390 391 392 393
      else if (REG_P (changes[i].old)
	       && asm_noperands (PATTERN (object)) > 0
	       && REG_EXPR (changes[i].old) != NULL_TREE
	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
	{
	  /* Don't allow changes of hard register operands to inline
	     assemblies if they have been defined as register asm ("x").  */
	  break;
	}
394 395
      else if (DEBUG_INSN_P (object))
	continue;
396
      else if (insn_invalid_p (object))
Richard Kenner committed
397 398 399 400 401 402 403 404 405 406 407 408
	{
	  rtx pat = PATTERN (object);

	  /* Perhaps we couldn't recognize the insn because there were
	     extra CLOBBERs at the end.  If so, try to re-recognize
	     without the last CLOBBER (later iterations will cause each of
	     them to be eliminated, in turn).  But don't do this if we
	     have an ASM_OPERAND.  */
	  if (GET_CODE (pat) == PARALLEL
	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
	      && asm_noperands (PATTERN (object)) < 0)
	    {
409 410 411 412 413 414 415 416 417
	      rtx newpat;

	      if (XVECLEN (pat, 0) == 2)
		newpat = XVECEXP (pat, 0, 0);
	      else
		{
		  int j;

		  newpat
418
		    = gen_rtx_PARALLEL (VOIDmode,
419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435
					rtvec_alloc (XVECLEN (pat, 0) - 1));
		  for (j = 0; j < XVECLEN (newpat, 0); j++)
		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
		}

	      /* Add a new change to this group to replace the pattern
		 with this new pattern.  Then consider this change
		 as having succeeded.  The change we added will
		 cause the entire call to fail if things remain invalid.

		 Note that this can lose if a later change than the one
		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
		 but this shouldn't occur.  */

	      validate_change (object, &PATTERN (object), newpat, 1);
	      continue;
	    }
436 437
	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
		   || GET_CODE (pat) == VAR_LOCATION)
Richard Kenner committed
438 439 440 441 442 443
	    /* If this insn is a CLOBBER or USE, it is always valid, but is
	       never recognized.  */
	    continue;
	  else
	    break;
	}
444
      last_validated = object;
Richard Kenner committed
445 446
    }

447 448 449
  return (i == num_changes);
}

450 451 452
/* A group of changes has previously been issued with validate_change
   and verified with verify_changes.  Call df_insn_rescan for each of
   the insn changed and clear num_changes.  */
453 454 455 456 457

void
confirm_change_group (void)
{
  int i;
458
  rtx last_object = NULL;
459

460
  for (i = 0; i < num_changes; i++)
461 462
    {
      rtx object = changes[i].object;
463 464 465 466

      if (changes[i].unshare)
	*changes[i].loc = copy_rtx (*changes[i].loc);

467
      /* Avoid unnecessary rescanning when multiple changes to same instruction
468 469 470 471 472 473 474
         are made.  */
      if (object)
	{
	  if (object != last_object && last_object && INSN_P (last_object))
	    df_insn_rescan (last_object);
	  last_object = object;
	}
475
    }
476

477 478
  if (last_object && INSN_P (last_object))
    df_insn_rescan (last_object);
479 480 481 482 483 484
  num_changes = 0;
}

/* Apply a group of changes previously issued with `validate_change'.
   If all changes are valid, call confirm_change_group and return 1,
   otherwise, call cancel_changes and return 0.  */
485

486 487 488 489 490 491
int
apply_change_group (void)
{
  if (verify_changes (0))
    {
      confirm_change_group ();
Richard Kenner committed
492 493 494 495 496 497 498 499 500
      return 1;
    }
  else
    {
      cancel_changes (0);
      return 0;
    }
}

501

Kazu Hirata committed
502
/* Return the number of changes so far in the current group.  */
Richard Kenner committed
503 504

int
505
num_validated_changes (void)
Richard Kenner committed
506 507 508 509 510 511 512
{
  return num_changes;
}

/* Retract the changes numbered NUM and up.  */

void
513
cancel_changes (int num)
Richard Kenner committed
514 515 516 517 518 519 520
{
  int i;

  /* Back out all the changes.  Do this in the opposite order in which
     they were made.  */
  for (i = num_changes - 1; i >= num; i--)
    {
521
      *changes[i].loc = changes[i].old;
522
      if (changes[i].object && !MEM_P (changes[i].object))
523
	INSN_CODE (changes[i].object) = changes[i].old_code;
Richard Kenner committed
524 525 526 527
    }
  num_changes = num;
}

528 529
/* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
   rtx.  */
Richard Kenner committed
530 531

static void
H.J. Lu committed
532
simplify_while_replacing (rtx *loc, rtx to, rtx object,
533
                          enum machine_mode op0_mode)
Richard Kenner committed
534
{
535
  rtx x = *loc;
536
  enum rtx_code code = GET_CODE (x);
537
  rtx new_rtx;
Richard Kenner committed
538

539
  if (SWAPPABLE_OPERANDS_P (x)
540 541
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
    {
542 543 544 545 546
      validate_unshare_change (object, loc,
			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
					       : swap_condition (code),
					       GET_MODE (x), XEXP (x, 1),
					       XEXP (x, 0)), 1);
547 548 549
      x = *loc;
      code = GET_CODE (x);
    }
550

Richard Kenner committed
551 552 553
  switch (code)
    {
    case PLUS:
Jeff Law committed
554
      /* If we have a PLUS whose second operand is now a CONST_INT, use
555
         simplify_gen_binary to try to simplify it.
556 557
         ??? We may want later to remove this, once simplification is
         separated from this function.  */
Shujing Zhao committed
558
      if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
559
	validate_change (object, loc,
560 561
			 simplify_gen_binary
			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
562
      break;
563
    case MINUS:
Shujing Zhao committed
564
      if (CONST_INT_P (XEXP (x, 1))
565 566 567 568 569
	  || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
	validate_change (object, loc,
			 simplify_gen_binary
			 (PLUS, GET_MODE (x), XEXP (x, 0),
			  simplify_gen_unary (NEG,
570 571
					      GET_MODE (x), XEXP (x, 1),
					      GET_MODE (x))), 1);
572
      break;
Richard Kenner committed
573 574
    case ZERO_EXTEND:
    case SIGN_EXTEND:
575
      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
Richard Kenner committed
576
	{
577
	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
578
				    op0_mode);
579 580
	  /* If any of the above failed, substitute in something that
	     we know won't be recognized.  */
581 582 583
	  if (!new_rtx)
	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
	  validate_change (object, loc, new_rtx, 1);
Richard Kenner committed
584 585 586
	}
      break;
    case SUBREG:
587
      /* All subregs possible to simplify should be simplified.  */
588
      new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
589 590
			     SUBREG_BYTE (x));

591
      /* Subregs of VOIDmode operands are incorrect.  */
592 593 594 595
      if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
      if (new_rtx)
	validate_change (object, loc, new_rtx, 1);
Richard Kenner committed
596 597 598 599
      break;
    case ZERO_EXTRACT:
    case SIGN_EXTRACT:
      /* If we are replacing a register with memory, try to change the memory
600 601 602
         to be the mode required for memory in extract operations (this isn't
         likely to be an insertion operation; if it was, nothing bad will
         happen, we might just fail in some cases).  */
Richard Kenner committed
603

604
      if (MEM_P (XEXP (x, 0))
Shujing Zhao committed
605 606
	  && CONST_INT_P (XEXP (x, 1))
	  && CONST_INT_P (XEXP (x, 2))
607 608
	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
	  && !MEM_VOLATILE_P (XEXP (x, 0)))
Richard Kenner committed
609 610
	{
	  enum machine_mode wanted_mode = VOIDmode;
611
	  enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
Richard Kenner committed
612 613
	  int pos = INTVAL (XEXP (x, 2));

614
	  if (GET_CODE (x) == ZERO_EXTRACT)
615
	    {
616 617 618 619
	      enum machine_mode new_mode
		= mode_for_extraction (EP_extzv, 1);
	      if (new_mode != MAX_MACHINE_MODE)
		wanted_mode = new_mode;
620
	    }
621
	  else if (GET_CODE (x) == SIGN_EXTRACT)
622
	    {
623 624 625 626
	      enum machine_mode new_mode
		= mode_for_extraction (EP_extv, 1);
	      if (new_mode != MAX_MACHINE_MODE)
		wanted_mode = new_mode;
627
	    }
Richard Kenner committed
628

629
	  /* If we have a narrower mode, we can do something.  */
Richard Kenner committed
630 631 632 633 634 635
	  if (wanted_mode != VOIDmode
	      && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
	    {
	      int offset = pos / BITS_PER_UNIT;
	      rtx newmem;

636
	      /* If the bytes and bits are counted differently, we
637
	         must adjust the offset.  */
638
	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
639 640 641
		offset =
		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
		   offset);
Richard Kenner committed
642 643 644

	      pos %= GET_MODE_BITSIZE (wanted_mode);

Richard Kenner committed
645
	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
Richard Kenner committed
646

Charles Hannum committed
647
	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
Richard Kenner committed
648 649 650 651 652
	      validate_change (object, &XEXP (x, 0), newmem, 1);
	    }
	}

      break;
653

654 655
    default:
      break;
Richard Kenner committed
656 657 658
    }
}

659 660 661 662
/* Replace every occurrence of FROM in X with TO.  Mark each change with
   validate_change passing OBJECT.  */

static void
H.J. Lu committed
663
validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715
                        bool simplify)
{
  int i, j;
  const char *fmt;
  rtx x = *loc;
  enum rtx_code code;
  enum machine_mode op0_mode = VOIDmode;
  int prev_changes = num_changes;

  if (!x)
    return;

  code = GET_CODE (x);
  fmt = GET_RTX_FORMAT (code);
  if (fmt[0] == 'e')
    op0_mode = GET_MODE (XEXP (x, 0));

  /* X matches FROM if it is the same rtx or they are both referring to the
     same register in the same mode.  Avoid calling rtx_equal_p unless the
     operands look similar.  */

  if (x == from
      || (REG_P (x) && REG_P (from)
	  && GET_MODE (x) == GET_MODE (from)
	  && REGNO (x) == REGNO (from))
      || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
	  && rtx_equal_p (x, from)))
    {
      validate_unshare_change (object, loc, to, 1);
      return;
    }

  /* Call ourself recursively to perform the replacements.
     We must not replace inside already replaced expression, otherwise we
     get infinite recursion for replacements like (reg X)->(subreg (reg X))
     done by regmove, so we must special case shared ASM_OPERANDS.  */

  if (GET_CODE (x) == PARALLEL)
    {
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
	{
	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
	    {
	      /* Verify that operands are really shared.  */
	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
							      (x, 0, j))));
	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
				      from, to, object, simplify);
	    }
	  else
H.J. Lu committed
716
	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
717 718 719 720 721 722 723 724 725 726
                                    simplify);
	}
    }
  else
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
      {
	if (fmt[i] == 'e')
	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
	else if (fmt[i] == 'E')
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
H.J. Lu committed
727
	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745
                                    simplify);
      }

  /* If we didn't substitute, there is nothing more to do.  */
  if (num_changes == prev_changes)
    return;

  /* Allow substituted expression to have different mode.  This is used by
     regmove to change mode of pseudo register.  */
  if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
    op0_mode = GET_MODE (XEXP (x, 0));

  /* Do changes needed to keep rtx consistent.  Don't do any other
     simplifications, as it is not our job.  */
  if (simplify)
    simplify_while_replacing (loc, to, object, op0_mode);
}

746 747 748 749 750 751 752 753 754 755 756
/* Try replacing every occurrence of FROM in subexpression LOC of INSN
   with TO.  After all changes have been made, validate by seeing
   if INSN is still valid.  */

int
validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
{
  validate_replace_rtx_1 (loc, from, to, insn, true);
  return apply_change_group ();
}

Richard Kenner committed
757 758 759 760
/* Try replacing every occurrence of FROM in INSN with TO.  After all
   changes have been made, validate by seeing if INSN is still valid.  */

int
761
validate_replace_rtx (rtx from, rtx to, rtx insn)
Richard Kenner committed
762
{
763
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
Richard Kenner committed
764 765
  return apply_change_group ();
}
766

767
/* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
H.J. Lu committed
768 769 770
   is a part of INSN.  After all changes have been made, validate by seeing if
   INSN is still valid.
   validate_replace_rtx (from, to, insn) is equivalent to
771 772 773 774 775 776 777 778 779 780
   validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */

int
validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
{
  validate_replace_rtx_1 (where, from, to, insn, true);
  return apply_change_group ();
}

/* Same as above, but do not simplify rtx afterwards.  */
H.J. Lu committed
781 782
int
validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
783 784 785 786 787 788 789
                                      rtx insn)
{
  validate_replace_rtx_1 (where, from, to, insn, false);
  return apply_change_group ();

}

790
/* Try replacing every occurrence of FROM in INSN with TO.  */
791 792

void
793
validate_replace_rtx_group (rtx from, rtx to, rtx insn)
794
{
795
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
796 797
}

798 799
/* Function called by note_uses to replace used subexpressions.  */
struct validate_replace_src_data
800 801 802 803 804
{
  rtx from;			/* Old RTX */
  rtx to;			/* New RTX */
  rtx insn;			/* Insn in which substitution is occurring.  */
};
805 806

static void
807
validate_replace_src_1 (rtx *x, void *data)
808 809 810 811
{
  struct validate_replace_src_data *d
    = (struct validate_replace_src_data *) data;

812
  validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
813 814
}

815
/* Try replacing every occurrence of FROM in INSN with TO, avoiding
816
   SET_DESTs.  */
817

818
void
819
validate_replace_src_group (rtx from, rtx to, rtx insn)
820
{
821
  struct validate_replace_src_data d;
822

823 824 825 826
  d.from = from;
  d.to = to;
  d.insn = insn;
  note_uses (&PATTERN (insn), validate_replace_src_1, &d);
827
}
Razya Ladelsky committed
828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867

/* Try simplify INSN.
   Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
   pattern and return true if something was simplified.  */

bool
validate_simplify_insn (rtx insn)
{
  int i;
  rtx pat = NULL;
  rtx newpat = NULL;

  pat = PATTERN (insn);

  if (GET_CODE (pat) == SET)
    {
      newpat = simplify_rtx (SET_SRC (pat));
      if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
	validate_change (insn, &SET_SRC (pat), newpat, 1);
      newpat = simplify_rtx (SET_DEST (pat));
      if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
	validate_change (insn, &SET_DEST (pat), newpat, 1);
    }
  else if (GET_CODE (pat) == PARALLEL)
    for (i = 0; i < XVECLEN (pat, 0); i++)
      {
	rtx s = XVECEXP (pat, 0, i);

	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
	  {
	    newpat = simplify_rtx (SET_SRC (s));
	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
	      validate_change (insn, &SET_SRC (s), newpat, 1);
	    newpat = simplify_rtx (SET_DEST (s));
	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
	      validate_change (insn, &SET_DEST (s), newpat, 1);
	  }
      }
  return ((num_changes_pending () > 0) && (apply_change_group () > 0));
}
Richard Kenner committed
868 869 870 871 872 873 874

#ifdef HAVE_cc0
/* Return 1 if the insn using CC0 set by INSN does not contain
   any ordered tests applied to the condition codes.
   EQ and NE tests do not count.  */

int
875
next_insn_tests_no_inequality (rtx insn)
Richard Kenner committed
876
{
877
  rtx next = next_cc0_user (insn);
Richard Kenner committed
878 879 880 881 882

  /* If there is no next insn, we have to take the conservative choice.  */
  if (next == 0)
    return 0;

883
  return (INSN_P (next)
Richard Kenner committed
884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901
	  && ! inequality_comparisons_p (PATTERN (next)));
}
#endif

/* Return 1 if OP is a valid general operand for machine mode MODE.
   This is either a register reference, a memory reference,
   or a constant.  In the case of a memory reference, the address
   is checked for general validity for the target machine.

   Register and memory references must have mode MODE in order to be valid,
   but some constants have no machine mode and are valid for any mode.

   If MODE is VOIDmode, OP is checked for validity for whatever mode
   it has.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.

902
   For an explanation of this function's behavior for registers of
Richard Kenner committed
903 904 905
   class NO_REGS, see the comment for `register_operand'.  */

int
906
general_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
907
{
908
  enum rtx_code code = GET_CODE (op);
Richard Kenner committed
909 910 911 912 913 914 915

  if (mode == VOIDmode)
    mode = GET_MODE (op);

  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
916 917
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
918 919
    return 0;

Shujing Zhao committed
920
  if (CONST_INT_P (op)
921
      && mode != VOIDmode
922 923 924
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

Richard Kenner committed
925
  if (CONSTANT_P (op))
926 927
    return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
	     || mode == VOIDmode)
Richard Kenner committed
928 929 930 931 932 933 934 935 936 937 938
	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
	    && LEGITIMATE_CONSTANT_P (op));

  /* Except for certain constants with VOIDmode, already checked for,
     OP's mode must match MODE if MODE specifies a mode.  */

  if (GET_MODE (op) != mode)
    return 0;

  if (code == SUBREG)
    {
939 940
      rtx sub = SUBREG_REG (op);

Richard Kenner committed
941 942
#ifdef INSN_SCHEDULING
      /* On machines that have insn scheduling, we want all memory
943 944 945 946
	 reference to be explicit, so outlaw paradoxical SUBREGs.
	 However, we must allow them after reload so that they can
	 get cleaned up by cleanup_subreg_operands.  */
      if (!reload_completed && MEM_P (sub)
947
	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
Richard Kenner committed
948 949
	return 0;
#endif
950 951
      /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
         may result in incorrect reference.  We should simplify all valid
952
         subregs of MEM anyway.  But allow this after reload because we
953
	 might be called from cleanup_subreg_operands.
954 955 956

	 ??? This is a kludge.  */
      if (!reload_completed && SUBREG_BYTE (op) != 0
957
	  && MEM_P (sub))
958 959 960
	return 0;

      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
961
	 create such rtl, and we must reject it.  */
962
      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
963
	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
964
	return 0;
Richard Kenner committed
965

966
      op = sub;
Richard Kenner committed
967 968 969 970 971 972 973 974 975 976
      code = GET_CODE (op);
    }

  if (code == REG)
    /* A register whose class is NO_REGS is not a general operand.  */
    return (REGNO (op) >= FIRST_PSEUDO_REGISTER
	    || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);

  if (code == MEM)
    {
977
      rtx y = XEXP (op, 0);
Aldy Hernandez committed
978

Richard Kenner committed
979 980
      if (! volatile_ok && MEM_VOLATILE_P (op))
	return 0;
Aldy Hernandez committed
981

Richard Kenner committed
982
      /* Use the mem's mode, since it will be reloaded thus.  */
983
      if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
984
	return 1;
Richard Kenner committed
985
    }
986

Richard Kenner committed
987 988 989 990 991 992 993 994 995 996
  return 0;
}

/* Return 1 if OP is a valid memory address for a memory reference
   of mode MODE.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
997
address_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016
{
  return memory_address_p (mode, op);
}

/* Return 1 if OP is a register reference of mode MODE.
   If MODE is VOIDmode, accept a register in any mode.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.

   As a special exception, registers whose class is NO_REGS are
   not accepted by `register_operand'.  The reason for this change
   is to allow the representation of special architecture artifacts
   (such as a condition code register) without extending the rtl
   definitions.  Since registers of class NO_REGS cannot be used
   as registers in any case where register classes are examined,
   it is most consistent to keep this function from accepting them.  */

int
1017
register_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1018 1019 1020 1021 1022 1023
{
  if (GET_MODE (op) != mode && mode != VOIDmode)
    return 0;

  if (GET_CODE (op) == SUBREG)
    {
1024 1025
      rtx sub = SUBREG_REG (op);

Richard Kenner committed
1026 1027 1028 1029 1030 1031
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
	 because it is guaranteed to be reloaded into one.
	 Just make sure the MEM is valid in itself.
	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
	 but currently it does result from (SUBREG (REG)...) where the
	 reg went on the stack.)  */
1032
      if (! reload_completed && MEM_P (sub))
Richard Kenner committed
1033
	return general_operand (op, mode);
1034

1035
#ifdef CANNOT_CHANGE_MODE_CLASS
1036
      if (REG_P (sub)
1037
	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1038
	  && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1039 1040
	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1041 1042 1043
	return 0;
#endif

1044 1045
      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
	 create such rtl, and we must reject it.  */
1046
      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1047 1048 1049 1050
	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
	return 0;

      op = sub;
Richard Kenner committed
1051 1052 1053 1054
    }

  /* We don't consider registers whose class is NO_REGS
     to be a register operand.  */
1055
  return (REG_P (op)
Richard Kenner committed
1056 1057 1058 1059
	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
}

1060 1061 1062
/* Return 1 for a register in Pmode; ignore the tested mode.  */

int
1063
pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1064 1065 1066 1067
{
  return register_operand (op, Pmode);
}

Richard Kenner committed
1068 1069 1070 1071
/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
   or a hard register.  */

int
1072
scratch_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1073
{
1074 1075 1076 1077
  if (GET_MODE (op) != mode && mode != VOIDmode)
    return 0;

  return (GET_CODE (op) == SCRATCH
1078
	  || (REG_P (op)
1079
	      && REGNO (op) < FIRST_PSEUDO_REGISTER));
Richard Kenner committed
1080 1081 1082 1083 1084 1085 1086 1087
}

/* Return 1 if OP is a valid immediate operand for mode MODE.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1088
immediate_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1089 1090 1091 1092
{
  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1093 1094
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1095 1096
    return 0;

Shujing Zhao committed
1097
  if (CONST_INT_P (op)
1098
      && mode != VOIDmode
1099 1100 1101
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

Richard Kenner committed
1102 1103 1104 1105 1106 1107 1108 1109 1110 1111
  return (CONSTANT_P (op)
	  && (GET_MODE (op) == mode || mode == VOIDmode
	      || GET_MODE (op) == VOIDmode)
	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
	  && LEGITIMATE_CONSTANT_P (op));
}

/* Returns 1 if OP is an operand that is a CONST_INT.  */

int
1112
const_int_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1113
{
Shujing Zhao committed
1114
  if (!CONST_INT_P (op))
1115 1116 1117 1118 1119 1120 1121
    return 0;

  if (mode != VOIDmode
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

  return 1;
Richard Kenner committed
1122 1123 1124 1125 1126 1127
}

/* Returns 1 if OP is an operand that is a constant integer or constant
   floating-point number.  */

int
1128
const_double_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1129 1130 1131 1132
{
  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1133 1134
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1135 1136
    return 0;

Shujing Zhao committed
1137
  return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
Richard Kenner committed
1138 1139 1140 1141 1142 1143 1144
	  && (mode == VOIDmode || GET_MODE (op) == mode
	      || GET_MODE (op) == VOIDmode));
}

/* Return 1 if OP is a general operand that is not an immediate operand.  */

int
1145
nonimmediate_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1146 1147 1148 1149 1150 1151 1152
{
  return (general_operand (op, mode) && ! CONSTANT_P (op));
}

/* Return 1 if OP is a register reference or immediate value of mode MODE.  */

int
1153
nonmemory_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1154 1155 1156 1157 1158 1159
{
  if (CONSTANT_P (op))
    {
      /* Don't accept CONST_INT or anything similar
	 if the caller wants something floating.  */
      if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1160 1161
	  && GET_MODE_CLASS (mode) != MODE_INT
	  && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1162 1163
	return 0;

Shujing Zhao committed
1164
      if (CONST_INT_P (op)
1165
	  && mode != VOIDmode
1166 1167 1168
	  && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
	return 0;

1169
      return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1170
	       || mode == VOIDmode)
Richard Kenner committed
1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185
	      && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
	      && LEGITIMATE_CONSTANT_P (op));
    }

  if (GET_MODE (op) != mode && mode != VOIDmode)
    return 0;

  if (GET_CODE (op) == SUBREG)
    {
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
	 because it is guaranteed to be reloaded into one.
	 Just make sure the MEM is valid in itself.
	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
	 but currently it does result from (SUBREG (REG)...) where the
	 reg went on the stack.)  */
1186
      if (! reload_completed && MEM_P (SUBREG_REG (op)))
Richard Kenner committed
1187 1188 1189 1190 1191 1192
	return general_operand (op, mode);
      op = SUBREG_REG (op);
    }

  /* We don't consider registers whose class is NO_REGS
     to be a register operand.  */
1193
  return (REG_P (op)
Richard Kenner committed
1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204
	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
}

/* Return 1 if OP is a valid operand that stands for pushing a
   value of mode MODE onto the stack.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1205
push_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1206
{
1207 1208 1209 1210 1211 1212
  unsigned int rounded_size = GET_MODE_SIZE (mode);

#ifdef PUSH_ROUNDING
  rounded_size = PUSH_ROUNDING (rounded_size);
#endif

1213
  if (!MEM_P (op))
Richard Kenner committed
1214 1215
    return 0;

1216
  if (mode != VOIDmode && GET_MODE (op) != mode)
Richard Kenner committed
1217 1218 1219 1220
    return 0;

  op = XEXP (op, 0);

1221
  if (rounded_size == GET_MODE_SIZE (mode))
1222 1223 1224 1225 1226 1227 1228 1229 1230
    {
      if (GET_CODE (op) != STACK_PUSH_CODE)
	return 0;
    }
  else
    {
      if (GET_CODE (op) != PRE_MODIFY
	  || GET_CODE (XEXP (op, 1)) != PLUS
	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
Shujing Zhao committed
1231
	  || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1232
#ifdef STACK_GROWS_DOWNWARD
1233
	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1234
#else
1235
	  || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1236 1237 1238 1239
#endif
	  )
	return 0;
    }
Richard Kenner committed
1240 1241 1242 1243

  return XEXP (op, 0) == stack_pointer_rtx;
}

1244 1245 1246 1247 1248 1249 1250
/* Return 1 if OP is a valid operand that stands for popping a
   value of mode MODE off the stack.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1251
pop_operand (rtx op, enum machine_mode mode)
1252
{
1253
  if (!MEM_P (op))
1254 1255
    return 0;

1256
  if (mode != VOIDmode && GET_MODE (op) != mode)
1257 1258 1259 1260 1261 1262 1263 1264 1265 1266
    return 0;

  op = XEXP (op, 0);

  if (GET_CODE (op) != STACK_POP_CODE)
    return 0;

  return XEXP (op, 0) == stack_pointer_rtx;
}

1267 1268
/* Return 1 if ADDR is a valid memory address
   for mode MODE in address space AS.  */
Richard Kenner committed
1269 1270

int
1271 1272
memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
			     rtx addr, addr_space_t as)
Richard Kenner committed
1273
{
1274
#ifdef GO_IF_LEGITIMATE_ADDRESS
1275
  gcc_assert (ADDR_SPACE_GENERIC_P (as));
Richard Kenner committed
1276 1277 1278 1279 1280
  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
  return 0;

 win:
  return 1;
1281
#else
1282
  return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1283
#endif
Richard Kenner committed
1284 1285 1286 1287 1288 1289 1290 1291 1292
}

/* Return 1 if OP is a valid memory reference with mode MODE,
   including a valid address.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1293
memory_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1294 1295 1296 1297 1298 1299
{
  rtx inner;

  if (! reload_completed)
    /* Note that no SUBREG is a memory operand before end of reload pass,
       because (SUBREG (MEM...)) forces reloading into a register.  */
1300
    return MEM_P (op) && general_operand (op, mode);
Richard Kenner committed
1301 1302 1303 1304 1305 1306 1307 1308

  if (mode != VOIDmode && GET_MODE (op) != mode)
    return 0;

  inner = op;
  if (GET_CODE (inner) == SUBREG)
    inner = SUBREG_REG (inner);

1309
  return (MEM_P (inner) && general_operand (op, mode));
Richard Kenner committed
1310 1311 1312 1313 1314 1315
}

/* Return 1 if OP is a valid indirect memory reference with mode MODE;
   that is, a memory reference whose address is a general_operand.  */

int
1316
indirect_operand (rtx op, enum machine_mode mode)
Richard Kenner committed
1317 1318 1319
{
  /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
  if (! reload_completed
1320
      && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
Richard Kenner committed
1321
    {
1322
      int offset = SUBREG_BYTE (op);
Richard Kenner committed
1323 1324
      rtx inner = SUBREG_REG (op);

1325 1326 1327
      if (mode != VOIDmode && GET_MODE (op) != mode)
	return 0;

Richard Kenner committed
1328 1329 1330 1331 1332 1333 1334
      /* The only way that we can have a general_operand as the resulting
	 address is if OFFSET is zero and the address already is an operand
	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
	 operand.  */

      return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
	      || (GET_CODE (XEXP (inner, 0)) == PLUS
Shujing Zhao committed
1335
		  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
Richard Kenner committed
1336 1337 1338 1339
		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
    }

1340
  return (MEM_P (op)
Richard Kenner committed
1341 1342 1343 1344
	  && memory_operand (op, mode)
	  && general_operand (XEXP (op, 0), Pmode));
}

1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370
/* Return 1 if this is an ordered comparison operator (not including
   ORDERED and UNORDERED).  */

int
ordered_comparison_operator (rtx op, enum machine_mode mode)
{
  if (mode != VOIDmode && GET_MODE (op) != mode)
    return false;
  switch (GET_CODE (op))
    {
    case EQ:
    case NE:
    case LT:
    case LTU:
    case LE:
    case LEU:
    case GT:
    case GTU:
    case GE:
    case GEU:
      return true;
    default:
      return false;
    }
}

Richard Kenner committed
1371 1372 1373 1374
/* Return 1 if this is a comparison operator.  This allows the use of
   MATCH_OPERATOR to recognize all the branch insns.  */

int
1375
comparison_operator (rtx op, enum machine_mode mode)
Richard Kenner committed
1376 1377
{
  return ((mode == VOIDmode || GET_MODE (op) == mode)
1378
	  && COMPARISON_P (op));
Richard Kenner committed
1379 1380
}

1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416
/* If BODY is an insn body that uses ASM_OPERANDS, return it.  */

rtx
extract_asm_operands (rtx body)
{
  rtx tmp;
  switch (GET_CODE (body))
    {
    case ASM_OPERANDS:
      return body;

    case SET:
      /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
      tmp = SET_SRC (body);
      if (GET_CODE (tmp) == ASM_OPERANDS)
	return tmp;
      break;

    case PARALLEL:
      tmp = XVECEXP (body, 0, 0);
      if (GET_CODE (tmp) == ASM_OPERANDS)
	return tmp;
      if (GET_CODE (tmp) == SET)
	{
	  tmp = SET_SRC (tmp);
	  if (GET_CODE (tmp) == ASM_OPERANDS)
	    return tmp;
	}
      break;

    default:
      break;
    }
  return NULL;
}

Richard Kenner committed
1417 1418 1419 1420 1421
/* If BODY is an insn body that uses ASM_OPERANDS,
   return the number of operands (both input and output) in the insn.
   Otherwise return -1.  */

int
1422
asm_noperands (const_rtx body)
Richard Kenner committed
1423
{
1424 1425 1426 1427 1428 1429 1430 1431 1432
  rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
  int n_sets = 0;

  if (asm_op == NULL)
    return -1;

  if (GET_CODE (body) == SET)
    n_sets = 1;
  else if (GET_CODE (body) == PARALLEL)
Richard Kenner committed
1433
    {
1434 1435
      int i;
      if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
Richard Kenner committed
1436
	{
1437
	  /* Multiple output operands, or 1 output plus some clobbers:
H.J. Lu committed
1438
	     body is
1439
	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1440 1441 1442 1443 1444 1445 1446 1447
	  /* Count backwards through CLOBBERs to determine number of SETs.  */
	  for (i = XVECLEN (body, 0); i > 0; i--)
	    {
	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
		break;
	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
		return -1;
	    }
Richard Kenner committed
1448

1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464
	  /* N_SETS is now number of output operands.  */
	  n_sets = i;

	  /* Verify that all the SETs we have
	     came from a single original asm_operands insn
	     (so that invalid combinations are blocked).  */
	  for (i = 0; i < n_sets; i++)
	    {
	      rtx elt = XVECEXP (body, 0, i);
	      if (GET_CODE (elt) != SET)
		return -1;
	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
		return -1;
	      /* If these ASM_OPERANDS rtx's came from different original insns
	         then they aren't allowed together.  */
	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1465
		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1466 1467
		return -1;
	    }
Richard Kenner committed
1468
	}
1469
      else
1470 1471 1472 1473 1474 1475 1476 1477
	{
	  /* 0 outputs, but some clobbers:
	     body is [(asm_operands ...) (clobber (reg ...))...].  */
	  /* Make sure all the other parallel things really are clobbers.  */
	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
	      return -1;
	}
Richard Kenner committed
1478
    }
1479 1480 1481

  return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
Richard Kenner committed
1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493
}

/* Assuming BODY is an insn body that uses ASM_OPERANDS,
   copy its operands (both input and output) into the vector OPERANDS,
   the locations of the operands within the insn into the vector OPERAND_LOCS,
   and the constraints for the operands into CONSTRAINTS.
   Write the modes of the operands into MODES.
   Return the assembler-template.

   If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
   we don't store that info.  */

1494
const char *
1495
decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1496 1497
		     const char **constraints, enum machine_mode *modes,
		     location_t *loc)
Richard Kenner committed
1498
{
1499
  int nbase = 0, n, i;
1500
  rtx asmop;
Richard Kenner committed
1501

1502
  switch (GET_CODE (body))
Richard Kenner committed
1503
    {
1504 1505 1506 1507
    case ASM_OPERANDS:
      /* Zero output asm: BODY is (asm_operands ...).  */
      asmop = body;
      break;
Richard Kenner committed
1508

1509 1510 1511
    case SET:
      /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
      asmop = SET_SRC (body);
Richard Kenner committed
1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522

      /* The output is in the SET.
	 Its constraint is in the ASM_OPERANDS itself.  */
      if (operands)
	operands[0] = SET_DEST (body);
      if (operand_locs)
	operand_locs[0] = &SET_DEST (body);
      if (constraints)
	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
      if (modes)
	modes[0] = GET_MODE (SET_DEST (body));
1523 1524
      nbase = 1;
      break;
1525

1526 1527 1528
    case PARALLEL:
      {
	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
Richard Kenner committed
1529

1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553
	asmop = XVECEXP (body, 0, 0);
	if (GET_CODE (asmop) == SET)
	  {
	    asmop = SET_SRC (asmop);

	    /* At least one output, plus some CLOBBERs.  The outputs are in
	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
	    for (i = 0; i < nparallel; i++)
	      {
		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
		  break;		/* Past last SET */
		if (operands)
		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
		if (operand_locs)
		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
		if (constraints)
		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
		if (modes)
		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
	      }
	    nbase = i;
	  }
	break;
      }
Richard Kenner committed
1554

1555 1556
    default:
      gcc_unreachable ();
Richard Kenner committed
1557
    }
1558

1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571
  n = ASM_OPERANDS_INPUT_LENGTH (asmop);
  for (i = 0; i < n; i++)
    {
      if (operand_locs)
	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
      if (operands)
	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
      if (constraints)
	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
      if (modes)
	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
    }
  nbase += n;
Richard Kenner committed
1572

1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583
  n = ASM_OPERANDS_LABEL_LENGTH (asmop);
  for (i = 0; i < n; i++)
    {
      if (operand_locs)
	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
      if (operands)
	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
      if (constraints)
	constraints[nbase + i] = "";
      if (modes)
	modes[nbase + i] = Pmode;
Richard Kenner committed
1584 1585
    }

1586
  if (loc)
1587
    *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1588 1589

  return ASM_OPERANDS_TEMPLATE (asmop);
Richard Kenner committed
1590
}
1591

1592
/* Check if an asm_operand matches its constraints.
1593
   Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1594 1595

int
1596
asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1597
{
1598 1599
  int result = 0;

1600
  /* Use constrain_operands after reload.  */
1601
  gcc_assert (!reload_completed);
1602

1603 1604 1605 1606 1607
  /* Empty constraint string is the same as "X,...,X", i.e. X for as
     many alternatives as required to match the other operands.  */
  if (*constraint == '\0')
    return 1;

1608 1609
  while (*constraint)
    {
1610 1611
      char c = *constraint;
      int len;
1612
      switch (c)
1613
	{
1614 1615 1616
	case ',':
	  constraint++;
	  continue;
1617 1618 1619 1620 1621 1622 1623
	case '=':
	case '+':
	case '*':
	case '%':
	case '!':
	case '#':
	case '&':
1624
	case '?':
1625 1626 1627 1628
	  break;

	case '0': case '1': case '2': case '3': case '4':
	case '5': case '6': case '7': case '8': case '9':
1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651
	  /* If caller provided constraints pointer, look up
	     the maching constraint.  Otherwise, our caller should have
	     given us the proper matching constraint, but we can't
	     actually fail the check if they didn't.  Indicate that
	     results are inconclusive.  */
	  if (constraints)
	    {
	      char *end;
	      unsigned long match;

	      match = strtoul (constraint, &end, 10);
	      if (!result)
		result = asm_operand_ok (op, constraints[match], NULL);
	      constraint = (const char *) end;
	    }
	  else
	    {
	      do
		constraint++;
	      while (ISDIGIT (*constraint));
	      if (! result)
		result = -1;
	    }
1652
	  continue;
1653 1654 1655

	case 'p':
	  if (address_operand (op, VOIDmode))
1656
	    result = 1;
1657 1658
	  break;

1659
	case TARGET_MEM_CONSTRAINT:
1660 1661
	case 'V': /* non-offsettable */
	  if (memory_operand (op, VOIDmode))
1662
	    result = 1;
1663 1664 1665 1666
	  break;

	case 'o': /* offsettable */
	  if (offsettable_nonstrict_memref_p (op))
1667
	    result = 1;
1668 1669 1670
	  break;

	case '<':
1671
	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1672 1673 1674 1675 1676 1677
	     excepting those that expand_call created.  Further, on some
	     machines which do not have generalized auto inc/dec, an inc/dec
	     is not a memory_operand.

	     Match any memory and hope things are resolved after reload.  */

1678
	  if (MEM_P (op)
1679 1680
	      && (1
		  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1681
		  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1682
	    result = 1;
1683 1684 1685
	  break;

	case '>':
1686
	  if (MEM_P (op)
1687 1688
	      && (1
		  || GET_CODE (XEXP (op, 0)) == PRE_INC
1689
		  || GET_CODE (XEXP (op, 0)) == POST_INC))
1690
	    result = 1;
1691 1692 1693 1694
	  break;

	case 'E':
	case 'F':
1695 1696 1697
	  if (GET_CODE (op) == CONST_DOUBLE
	      || (GET_CODE (op) == CONST_VECTOR
		  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1698
	    result = 1;
1699 1700 1701 1702
	  break;

	case 'G':
	  if (GET_CODE (op) == CONST_DOUBLE
1703 1704
	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
	    result = 1;
1705 1706 1707
	  break;
	case 'H':
	  if (GET_CODE (op) == CONST_DOUBLE
1708 1709
	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
	    result = 1;
1710 1711 1712
	  break;

	case 's':
Shujing Zhao committed
1713
	  if (CONST_INT_P (op)
1714 1715 1716
	      || (GET_CODE (op) == CONST_DOUBLE
		  && GET_MODE (op) == VOIDmode))
	    break;
1717
	  /* Fall through.  */
1718 1719

	case 'i':
1720
	  if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1721
	    result = 1;
1722 1723 1724
	  break;

	case 'n':
Shujing Zhao committed
1725
	  if (CONST_INT_P (op)
1726 1727
	      || (GET_CODE (op) == CONST_DOUBLE
		  && GET_MODE (op) == VOIDmode))
1728
	    result = 1;
1729 1730 1731
	  break;

	case 'I':
Shujing Zhao committed
1732
	  if (CONST_INT_P (op)
1733 1734
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
	    result = 1;
1735 1736
	  break;
	case 'J':
Shujing Zhao committed
1737
	  if (CONST_INT_P (op)
1738 1739
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
	    result = 1;
1740 1741
	  break;
	case 'K':
Shujing Zhao committed
1742
	  if (CONST_INT_P (op)
1743 1744
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
	    result = 1;
1745 1746
	  break;
	case 'L':
Shujing Zhao committed
1747
	  if (CONST_INT_P (op)
1748 1749
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
	    result = 1;
1750 1751
	  break;
	case 'M':
Shujing Zhao committed
1752
	  if (CONST_INT_P (op)
1753 1754
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
	    result = 1;
1755 1756
	  break;
	case 'N':
Shujing Zhao committed
1757
	  if (CONST_INT_P (op)
1758 1759
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
	    result = 1;
1760 1761
	  break;
	case 'O':
Shujing Zhao committed
1762
	  if (CONST_INT_P (op)
1763 1764
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
	    result = 1;
1765 1766
	  break;
	case 'P':
Shujing Zhao committed
1767
	  if (CONST_INT_P (op)
1768 1769
	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
	    result = 1;
1770 1771 1772
	  break;

	case 'X':
1773
	  result = 1;
1774
	  break;
1775 1776 1777

	case 'g':
	  if (general_operand (op, VOIDmode))
1778
	    result = 1;
1779 1780
	  break;

1781 1782 1783
	default:
	  /* For all other letters, we first check for a register class,
	     otherwise it is an EXTRA_CONSTRAINT.  */
1784
	  if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1785 1786 1787 1788 1789
	    {
	    case 'r':
	      if (GET_MODE (op) == BLKmode)
		break;
	      if (register_operand (op, VOIDmode))
1790
		result = 1;
1791
	    }
1792
#ifdef EXTRA_CONSTRAINT_STR
Andreas Krebbel committed
1793 1794 1795 1796 1797 1798
	  else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
	    /* Every memory operand can be reloaded to fit.  */
	    result = result || memory_operand (op, VOIDmode);
	  else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
	    /* Every address operand can be reloaded to fit.  */
	    result = result || address_operand (op, VOIDmode);
1799 1800
	  else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
	    result = 1;
1801 1802 1803
#endif
	  break;
	}
1804 1805 1806 1807 1808 1809
      len = CONSTRAINT_LEN (c, constraint);
      do
	constraint++;
      while (--len && *constraint);
      if (len)
	return 0;
1810 1811
    }

1812
  return result;
1813
}
Richard Kenner committed
1814 1815 1816 1817 1818

/* Given an rtx *P, if it is a sum containing an integer constant term,
   return the location (type rtx *) of the pointer to that constant term.
   Otherwise, return a null pointer.  */

1819
rtx *
1820
find_constant_term_loc (rtx *p)
Richard Kenner committed
1821
{
1822 1823
  rtx *tem;
  enum rtx_code code = GET_CODE (*p);
Richard Kenner committed
1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872

  /* If *P IS such a constant term, P is its location.  */

  if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
      || code == CONST)
    return p;

  /* Otherwise, if not a sum, it has no constant term.  */

  if (GET_CODE (*p) != PLUS)
    return 0;

  /* If one of the summands is constant, return its location.  */

  if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
      && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
    return p;

  /* Otherwise, check each summand for containing a constant term.  */

  if (XEXP (*p, 0) != 0)
    {
      tem = find_constant_term_loc (&XEXP (*p, 0));
      if (tem != 0)
	return tem;
    }

  if (XEXP (*p, 1) != 0)
    {
      tem = find_constant_term_loc (&XEXP (*p, 1));
      if (tem != 0)
	return tem;
    }

  return 0;
}

/* Return 1 if OP is a memory reference
   whose address contains no side effects
   and remains valid after the addition
   of a positive integer less than the
   size of the object being referenced.

   We assume that the original address is valid and do not check it.

   This uses strict_memory_address_p as a subroutine, so
   don't use it before reload.  */

int
1873
offsettable_memref_p (rtx op)
Richard Kenner committed
1874
{
1875
  return ((MEM_P (op))
1876 1877
	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
					       MEM_ADDR_SPACE (op)));
Richard Kenner committed
1878 1879 1880 1881 1882 1883
}

/* Similar, but don't require a strictly valid mem ref:
   consider pseudo-regs valid as index or base regs.  */

int
1884
offsettable_nonstrict_memref_p (rtx op)
Richard Kenner committed
1885
{
1886
  return ((MEM_P (op))
1887 1888
	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
					       MEM_ADDR_SPACE (op)));
Richard Kenner committed
1889 1890 1891
}

/* Return 1 if Y is a memory address which contains no side effects
1892 1893
   and would remain valid for address space AS after the addition of
   a positive integer less than the size of that mode.
Richard Kenner committed
1894 1895 1896 1897 1898 1899 1900 1901

   We assume that the original address is valid and do not check it.
   We do check that it is valid for narrower modes.

   If STRICTP is nonzero, we require a strictly valid address,
   for the sake of use in reload.c.  */

int
1902 1903
offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
				  addr_space_t as)
Richard Kenner committed
1904
{
1905 1906
  enum rtx_code ycode = GET_CODE (y);
  rtx z;
Richard Kenner committed
1907 1908
  rtx y1 = y;
  rtx *y2;
1909 1910 1911
  int (*addressp) (enum machine_mode, rtx, addr_space_t) =
    (strictp ? strict_memory_address_addr_space_p
	     : memory_address_addr_space_p);
1912
  unsigned int mode_sz = GET_MODE_SIZE (mode);
Richard Kenner committed
1913 1914 1915 1916 1917 1918 1919 1920 1921 1922

  if (CONSTANT_ADDRESS_P (y))
    return 1;

  /* Adjusting an offsettable address involves changing to a narrower mode.
     Make sure that's OK.  */

  if (mode_dependent_address_p (y))
    return 0;

1923 1924 1925 1926 1927 1928 1929
  /* ??? How much offset does an offsettable BLKmode reference need?
     Clearly that depends on the situation in which it's being used.
     However, the current situation in which we test 0xffffffff is
     less than ideal.  Caveat user.  */
  if (mode_sz == 0)
    mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;

Richard Kenner committed
1930 1931 1932 1933 1934 1935 1936 1937
  /* If the expression contains a constant term,
     see if it remains valid when max possible offset is added.  */

  if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
    {
      int good;

      y1 = *y2;
1938
      *y2 = plus_constant (*y2, mode_sz - 1);
Richard Kenner committed
1939 1940
      /* Use QImode because an odd displacement may be automatically invalid
	 for any wider mode.  But it should be valid for a single byte.  */
1941
      good = (*addressp) (QImode, y, as);
Richard Kenner committed
1942 1943 1944 1945 1946 1947

      /* In any case, restore old contents of memory.  */
      *y2 = y1;
      return good;
    }

1948
  if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
Richard Kenner committed
1949 1950 1951 1952 1953
    return 0;

  /* The offset added here is chosen as the maximum offset that
     any instruction could need to add when operating on something
     of the specified mode.  We assume that if Y and Y+c are
1954 1955
     valid addresses then so is Y+d for all 0<d<c.  adjust_address will
     go inside a LO_SUM here, so we do so as well.  */
1956 1957 1958
  if (GET_CODE (y) == LO_SUM
      && mode != BLKmode
      && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1959 1960 1961 1962
    z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
			plus_constant (XEXP (y, 1), mode_sz - 1));
  else
    z = plus_constant (y, mode_sz - 1);
Richard Kenner committed
1963 1964 1965

  /* Use QImode because an odd displacement may be automatically invalid
     for any wider mode.  But it should be valid for a single byte.  */
1966
  return (*addressp) (QImode, z, as);
Richard Kenner committed
1967 1968 1969 1970 1971 1972 1973 1974 1975
}

/* Return 1 if ADDR is an address-expression whose effect depends
   on the mode of the memory reference it is used in.

   Autoincrement addressing is a typical example of mode-dependence
   because the amount of the increment depends on the mode.  */

int
1976
mode_dependent_address_p (rtx addr)
Richard Kenner committed
1977
{
1978 1979 1980 1981 1982 1983 1984 1985 1986
  /* Auto-increment addressing with anything other than post_modify
     or pre_modify always introduces a mode dependency.  Catch such
     cases now instead of deferring to the target.  */
  if (GET_CODE (addr) == PRE_INC
      || GET_CODE (addr) == POST_INC
      || GET_CODE (addr) == PRE_DEC
      || GET_CODE (addr) == POST_DEC)
    return 1;

Richard Kenner committed
1987 1988
  GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
  return 0;
1989
  /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS.  */
Kaveh R. Ghazi committed
1990
 win: ATTRIBUTE_UNUSED_LABEL
Richard Kenner committed
1991 1992 1993
  return 1;
}

1994 1995 1996 1997 1998
/* Like extract_insn, but save insn extracted and don't extract again, when
   called again for the same insn expecting that recog_data still contain the
   valid information.  This is used primary by gen_attr infrastructure that
   often does extract insn again and again.  */
void
1999
extract_insn_cached (rtx insn)
2000 2001 2002 2003 2004 2005
{
  if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
    return;
  extract_insn (insn);
  recog_data.insn = insn;
}
2006

2007
/* Do cached extract_insn, constrain_operands and complain about failures.
2008 2009
   Used by insn_attrtab.  */
void
2010
extract_constrain_insn_cached (rtx insn)
2011 2012 2013 2014 2015 2016
{
  extract_insn_cached (insn);
  if (which_alternative == -1
      && !constrain_operands (reload_completed))
    fatal_insn_not_found (insn);
}
2017

2018
/* Do cached constrain_operands and complain about failures.  */
2019
int
2020
constrain_operands_cached (int strict)
2021 2022 2023 2024 2025 2026
{
  if (which_alternative == -1)
    return constrain_operands (strict);
  else
    return 1;
}
2027

2028 2029
/* Analyze INSN and fill in recog_data.  */

2030
void
2031
extract_insn (rtx insn)
2032 2033 2034 2035 2036 2037
{
  int i;
  int icode;
  int noperands;
  rtx body = PATTERN (insn);

2038 2039 2040
  recog_data.n_operands = 0;
  recog_data.n_alternatives = 0;
  recog_data.n_dups = 0;
2041 2042 2043 2044 2045 2046 2047 2048

  switch (GET_CODE (body))
    {
    case USE:
    case CLOBBER:
    case ASM_INPUT:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
2049
    case VAR_LOCATION:
2050 2051 2052
      return;

    case SET:
2053 2054 2055 2056
      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
	goto asm_insn;
      else
	goto normal_insn;
2057
    case PARALLEL:
2058 2059 2060 2061 2062 2063
      if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
	goto asm_insn;
      else
	goto normal_insn;
2064
    case ASM_OPERANDS:
2065
    asm_insn:
2066
      recog_data.n_operands = noperands = asm_noperands (body);
2067 2068 2069 2070 2071
      if (noperands >= 0)
	{
	  /* This insn is an `asm' with operands.  */

	  /* expand_asm_operands makes sure there aren't too many operands.  */
2072
	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2073 2074

	  /* Now get the operand values and constraints out of the insn.  */
2075 2076 2077
	  decode_asm_operands (body, recog_data.operand,
			       recog_data.operand_loc,
			       recog_data.constraints,
2078
			       recog_data.operand_mode, NULL);
2079 2080
	  if (noperands > 0)
	    {
2081 2082
	      const char *p =  recog_data.constraints[0];
	      recog_data.n_alternatives = 1;
2083
	      while (*p)
2084
		recog_data.n_alternatives += (*p++ == ',');
2085 2086 2087
	    }
	  break;
	}
2088
      fatal_insn_not_found (insn);
2089 2090

    default:
2091
    normal_insn:
2092 2093 2094 2095 2096 2097 2098
      /* Ordinary insn: recognize it, get the operands via insn_extract
	 and get the constraints.  */

      icode = recog_memoized (insn);
      if (icode < 0)
	fatal_insn_not_found (insn);

2099 2100 2101
      recog_data.n_operands = noperands = insn_data[icode].n_operands;
      recog_data.n_alternatives = insn_data[icode].n_alternatives;
      recog_data.n_dups = insn_data[icode].n_dups;
2102 2103 2104 2105 2106

      insn_extract (insn);

      for (i = 0; i < noperands; i++)
	{
2107 2108
	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2109 2110 2111
	  /* VOIDmode match_operands gets mode from their real operand.  */
	  if (recog_data.operand_mode[i] == VOIDmode)
	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2112 2113
	}
    }
2114
  for (i = 0; i < noperands; i++)
2115 2116 2117 2118
    recog_data.operand_type[i]
      = (recog_data.constraints[i][0] == '=' ? OP_OUT
	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
	 : OP_IN);
2119

2120
  gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136

  if (INSN_CODE (insn) < 0)
    for (i = 0; i < recog_data.n_alternatives; i++)
      recog_data.alternative_enabled_p[i] = true;
  else
    {
      recog_data.insn = insn;
      for (i = 0; i < recog_data.n_alternatives; i++)
	{
	  which_alternative = i;
	  recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
	}
    }

  recog_data.insn = NULL;
  which_alternative = -1;
2137 2138
}

2139 2140 2141 2142
/* After calling extract_insn, you can use this function to extract some
   information from the constraint strings into a more usable form.
   The collected data is stored in recog_op_alt.  */
void
2143
preprocess_constraints (void)
2144 2145 2146
{
  int i;

2147 2148 2149 2150
  for (i = 0; i < recog_data.n_operands; i++)
    memset (recog_op_alt[i], 0, (recog_data.n_alternatives
				 * sizeof (struct operand_alternative)));

2151
  for (i = 0; i < recog_data.n_operands; i++)
2152 2153 2154
    {
      int j;
      struct operand_alternative *op_alt;
2155
      const char *p = recog_data.constraints[i];
2156 2157 2158

      op_alt = recog_op_alt[i];

2159
      for (j = 0; j < recog_data.n_alternatives; j++)
2160
	{
2161
	  op_alt[j].cl = NO_REGS;
2162 2163 2164 2165
	  op_alt[j].constraint = p;
	  op_alt[j].matches = -1;
	  op_alt[j].matched = -1;

2166 2167 2168 2169 2170 2171
	  if (!recog_data.alternative_enabled_p[j])
	    {
	      p = skip_alternative (p);
	      continue;
	    }

2172 2173 2174 2175 2176 2177 2178 2179
	  if (*p == '\0' || *p == ',')
	    {
	      op_alt[j].anything_ok = 1;
	      continue;
	    }

	  for (;;)
	    {
2180
	      char c = *p;
2181 2182
	      if (c == '#')
		do
2183
		  c = *++p;
2184 2185
		while (c != ',' && c != '\0');
	      if (c == ',' || c == '\0')
2186 2187 2188 2189
		{
		  p++;
		  break;
		}
2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208

	      switch (c)
		{
		case '=': case '+': case '*': case '%':
		case 'E': case 'F': case 'G': case 'H':
		case 's': case 'i': case 'n':
		case 'I': case 'J': case 'K': case 'L':
		case 'M': case 'N': case 'O': case 'P':
		  /* These don't say anything we care about.  */
		  break;

		case '?':
		  op_alt[j].reject += 6;
		  break;
		case '!':
		  op_alt[j].reject += 600;
		  break;
		case '&':
		  op_alt[j].earlyclobber = 1;
2209
		  break;
2210 2211 2212

		case '0': case '1': case '2': case '3': case '4':
		case '5': case '6': case '7': case '8': case '9':
2213 2214
		  {
		    char *end;
2215
		    op_alt[j].matches = strtoul (p, &end, 10);
2216 2217 2218
		    recog_op_alt[op_alt[j].matches][j].matched = i;
		    p = end;
		  }
2219
		  continue;
2220

2221
		case TARGET_MEM_CONSTRAINT:
2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240
		  op_alt[j].memory_ok = 1;
		  break;
		case '<':
		  op_alt[j].decmem_ok = 1;
		  break;
		case '>':
		  op_alt[j].incmem_ok = 1;
		  break;
		case 'V':
		  op_alt[j].nonoffmem_ok = 1;
		  break;
		case 'o':
		  op_alt[j].offmem_ok = 1;
		  break;
		case 'X':
		  op_alt[j].anything_ok = 1;
		  break;

		case 'p':
2241
		  op_alt[j].is_address = 1;
2242
		  op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2243
		      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2244 2245
		  break;

2246 2247 2248 2249
		case 'g':
		case 'r':
		  op_alt[j].cl =
		   reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2250 2251 2252
		  break;

		default:
2253
		  if (EXTRA_MEMORY_CONSTRAINT (c, p))
2254 2255 2256 2257
		    {
		      op_alt[j].memory_ok = 1;
		      break;
		    }
2258
		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2259 2260
		    {
		      op_alt[j].is_address = 1;
2261
		      op_alt[j].cl
2262
			= (reg_class_subunion
2263
			   [(int) op_alt[j].cl]
2264 2265
			   [(int) base_reg_class (VOIDmode, ADDRESS,
						  SCRATCH)]);
2266 2267 2268
		      break;
		    }

2269
		  op_alt[j].cl
2270
		    = (reg_class_subunion
2271
		       [(int) op_alt[j].cl]
2272
		       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2273 2274
		  break;
		}
2275
	      p += CONSTRAINT_LEN (c, p);
2276 2277 2278 2279
	    }
	}
    }
}
2280

2281
/* Check the operands of an insn against the insn's operand constraints
Richard Kenner committed
2282
   and return 1 if they are valid.
2283 2284
   The information about the insn's operands, constraints, operand modes
   etc. is obtained from the global variables set up by extract_insn.
Richard Kenner committed
2285 2286 2287 2288 2289

   WHICH_ALTERNATIVE is set to a number which indicates which
   alternative of constraints was matched: 0 for the first alternative,
   1 for the next, etc.

2290
   In addition, when two operands are required to match
Richard Kenner committed
2291 2292 2293 2294 2295 2296 2297 2298
   and it happens that the output operand is (reg) while the
   input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
   make the output operand look like the input.
   This is because the output operand is the one the template will print.

   This is used in final, just before printing the assembler code and by
   the routines that determine an insn's attribute.

2299
   If STRICT is a positive nonzero value, it means that we have been
Richard Kenner committed
2300 2301 2302 2303 2304 2305 2306 2307 2308 2309
   called after reload has been completed.  In that case, we must
   do all checks strictly.  If it is zero, it means that we have been called
   before reload has completed.  In that case, we first try to see if we can
   find an alternative that matches strictly.  If not, we try again, this
   time assuming that reload will fix up the insn.  This provides a "best
   guess" for the alternative and is used to compute attributes of insns prior
   to reload.  A negative value of STRICT is used for this internal call.  */

struct funny_match
{
2310
  int this_op, other;
Richard Kenner committed
2311 2312 2313
};

int
2314
constrain_operands (int strict)
Richard Kenner committed
2315
{
2316
  const char *constraints[MAX_RECOG_OPERANDS];
2317 2318
  int matching_operands[MAX_RECOG_OPERANDS];
  int earlyclobber[MAX_RECOG_OPERANDS];
2319
  int c;
Richard Kenner committed
2320 2321 2322 2323

  struct funny_match funny_match[MAX_RECOG_OPERANDS];
  int funny_match_index;

2324
  which_alternative = 0;
2325
  if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
Richard Kenner committed
2326 2327
    return 1;

2328
  for (c = 0; c < recog_data.n_operands; c++)
2329
    {
2330
      constraints[c] = recog_data.constraints[c];
2331 2332
      matching_operands[c] = -1;
    }
Richard Kenner committed
2333

2334
  do
Richard Kenner committed
2335
    {
2336
      int seen_earlyclobber_at = -1;
2337
      int opno;
Richard Kenner committed
2338 2339 2340
      int lose = 0;
      funny_match_index = 0;

2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351
      if (!recog_data.alternative_enabled_p[which_alternative])
	{
	  int i;

	  for (i = 0; i < recog_data.n_operands; i++)
	    constraints[i] = skip_alternative (constraints[i]);

	  which_alternative++;
	  continue;
	}

2352
      for (opno = 0; opno < recog_data.n_operands; opno++)
Richard Kenner committed
2353
	{
2354
	  rtx op = recog_data.operand[opno];
Richard Kenner committed
2355
	  enum machine_mode mode = GET_MODE (op);
2356
	  const char *p = constraints[opno];
Richard Kenner committed
2357 2358 2359
	  int offset = 0;
	  int win = 0;
	  int val;
2360
	  int len;
Richard Kenner committed
2361

2362 2363
	  earlyclobber[opno] = 0;

2364
	  /* A unary operator may be accepted by the predicate, but it
2365
	     is irrelevant for matching constraints.  */
2366
	  if (UNARY_P (op))
2367 2368
	    op = XEXP (op, 0);

Richard Kenner committed
2369 2370
	  if (GET_CODE (op) == SUBREG)
	    {
2371
	      if (REG_P (SUBREG_REG (op))
Richard Kenner committed
2372
		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2373 2374 2375 2376
		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
					      GET_MODE (SUBREG_REG (op)),
					      SUBREG_BYTE (op),
					      GET_MODE (op));
Richard Kenner committed
2377 2378 2379 2380 2381 2382 2383 2384
	      op = SUBREG_REG (op);
	    }

	  /* An empty constraint or empty alternative
	     allows anything which matched the pattern.  */
	  if (*p == 0 || *p == ',')
	    win = 1;

2385 2386
	  do
	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
Richard Kenner committed
2387
	      {
2388 2389 2390 2391 2392 2393 2394
	      case '\0':
		len = 0;
		break;
	      case ',':
		c = '\0';
		break;

Jeff Law committed
2395 2396
	      case '?':  case '!': case '*':  case '%':
	      case '=':  case '+':
Richard Kenner committed
2397 2398
		break;

2399 2400 2401
	      case '#':
		/* Ignore rest of this alternative as far as
		   constraint checking is concerned.  */
2402
		do
2403
		  p++;
2404 2405
		while (*p && *p != ',');
		len = 0;
2406 2407
		break;

2408 2409
	      case '&':
		earlyclobber[opno] = 1;
2410 2411
		if (seen_earlyclobber_at < 0)
		  seen_earlyclobber_at = opno;
2412 2413
		break;

Jeff Law committed
2414 2415
	      case '0':  case '1':  case '2':  case '3':  case '4':
	      case '5':  case '6':  case '7':  case '8':  case '9':
2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427 2428
		{
		  /* This operand must be the same as a previous one.
		     This kind of constraint is used for instructions such
		     as add when they take only two operands.

		     Note that the lower-numbered operand is passed first.

		     If we are not testing strictly, assume that this
		     constraint will be satisfied.  */

		  char *end;
		  int match;

2429
		  match = strtoul (p, &end, 10);
2430 2431 2432 2433 2434 2435 2436 2437 2438 2439 2440
		  p = end;

		  if (strict < 0)
		    val = 1;
		  else
		    {
		      rtx op1 = recog_data.operand[match];
		      rtx op2 = recog_data.operand[opno];

		      /* A unary operator may be accepted by the predicate,
			 but it is irrelevant for matching constraints.  */
2441
		      if (UNARY_P (op1))
2442
			op1 = XEXP (op1, 0);
2443
		      if (UNARY_P (op2))
2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459
			op2 = XEXP (op2, 0);

		      val = operands_match_p (op1, op2);
		    }

		  matching_operands[opno] = match;
		  matching_operands[match] = opno;

		  if (val != 0)
		    win = 1;

		  /* If output is *x and input is *--x, arrange later
		     to change the output to *--x as well, since the
		     output op is the one that will be printed.  */
		  if (val == 2 && strict > 0)
		    {
2460
		      funny_match[funny_match_index].this_op = opno;
2461 2462 2463
		      funny_match[funny_match_index++].other = match;
		    }
		}
2464
		len = 0;
Richard Kenner committed
2465 2466 2467 2468
		break;

	      case 'p':
		/* p is used for address_operands.  When we are called by
Richard Kenner committed
2469 2470 2471
		   gen_reload, no one will have checked that the address is
		   strictly valid, i.e., that all pseudos requiring hard regs
		   have gotten them.  */
Richard Kenner committed
2472
		if (strict <= 0
2473
		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2474
						 op)))
Richard Kenner committed
2475 2476 2477 2478
		  win = 1;
		break;

		/* No need to check general_operand again;
2479 2480 2481
		   it was done in insn-recog.c.  Well, except that reload
		   doesn't check the validity of its replacements, but
		   that should only matter when there's a bug.  */
Richard Kenner committed
2482 2483 2484
	      case 'g':
		/* Anything goes unless it is a REG and really has a hard reg
		   but the hard reg is not in the class GENERAL_REGS.  */
2485 2486 2487 2488 2489 2490 2491 2492 2493 2494
		if (REG_P (op))
		  {
		    if (strict < 0
			|| GENERAL_REGS == ALL_REGS
			|| (reload_in_progress
			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
		      win = 1;
		  }
		else if (strict < 0 || general_operand (op, mode))
Richard Kenner committed
2495 2496 2497 2498
		  win = 1;
		break;

	      case 'X':
Mike Stump committed
2499 2500 2501
		/* This is used for a MATCH_SCRATCH in the cases when
		   we don't actually need anything.  So anything goes
		   any time.  */
Richard Kenner committed
2502 2503 2504
		win = 1;
		break;

2505
	      case TARGET_MEM_CONSTRAINT:
2506 2507
		/* Memory operands must be valid, to the extent
		   required by STRICT.  */
2508
		if (MEM_P (op))
2509 2510
		  {
		    if (strict > 0
2511 2512 2513
			&& !strict_memory_address_addr_space_p
			     (GET_MODE (op), XEXP (op, 0),
			      MEM_ADDR_SPACE (op)))
2514 2515
		      break;
		    if (strict == 0
2516 2517 2518
			&& !memory_address_addr_space_p
			     (GET_MODE (op), XEXP (op, 0),
			      MEM_ADDR_SPACE (op)))
2519 2520 2521 2522 2523 2524 2525
		      break;
		    win = 1;
		  }
		/* Before reload, accept what reload can turn into mem.  */
		else if (strict < 0 && CONSTANT_P (op))
		  win = 1;
		/* During reload, accept a pseudo  */
2526
		else if (reload_in_progress && REG_P (op)
2527
			 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
Richard Kenner committed
2528 2529 2530 2531
		  win = 1;
		break;

	      case '<':
2532
		if (MEM_P (op)
Richard Kenner committed
2533 2534 2535 2536 2537 2538
		    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
			|| GET_CODE (XEXP (op, 0)) == POST_DEC))
		  win = 1;
		break;

	      case '>':
2539
		if (MEM_P (op)
Richard Kenner committed
2540 2541 2542 2543 2544 2545 2546
		    && (GET_CODE (XEXP (op, 0)) == PRE_INC
			|| GET_CODE (XEXP (op, 0)) == POST_INC))
		  win = 1;
		break;

	      case 'E':
	      case 'F':
2547 2548 2549
		if (GET_CODE (op) == CONST_DOUBLE
		    || (GET_CODE (op) == CONST_VECTOR
			&& GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
Richard Kenner committed
2550 2551 2552 2553 2554 2555
		  win = 1;
		break;

	      case 'G':
	      case 'H':
		if (GET_CODE (op) == CONST_DOUBLE
2556
		    && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
Richard Kenner committed
2557 2558 2559 2560
		  win = 1;
		break;

	      case 's':
Shujing Zhao committed
2561
		if (CONST_INT_P (op)
Richard Kenner committed
2562 2563 2564 2565 2566 2567 2568 2569 2570
		    || (GET_CODE (op) == CONST_DOUBLE
			&& GET_MODE (op) == VOIDmode))
		  break;
	      case 'i':
		if (CONSTANT_P (op))
		  win = 1;
		break;

	      case 'n':
Shujing Zhao committed
2571
		if (CONST_INT_P (op)
Richard Kenner committed
2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584
		    || (GET_CODE (op) == CONST_DOUBLE
			&& GET_MODE (op) == VOIDmode))
		  win = 1;
		break;

	      case 'I':
	      case 'J':
	      case 'K':
	      case 'L':
	      case 'M':
	      case 'N':
	      case 'O':
	      case 'P':
Shujing Zhao committed
2585
		if (CONST_INT_P (op)
2586
		    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
Richard Kenner committed
2587 2588 2589 2590
		  win = 1;
		break;

	      case 'V':
2591
		if (MEM_P (op)
2592 2593
		    && ((strict > 0 && ! offsettable_memref_p (op))
			|| (strict < 0
2594
			    && !(CONSTANT_P (op) || MEM_P (op)))
2595
			|| (reload_in_progress
2596
			    && !(REG_P (op)
2597
				 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
Richard Kenner committed
2598 2599 2600 2601 2602 2603 2604 2605
		  win = 1;
		break;

	      case 'o':
		if ((strict > 0 && offsettable_memref_p (op))
		    || (strict == 0 && offsettable_nonstrict_memref_p (op))
		    /* Before reload, accept what reload can handle.  */
		    || (strict < 0
2606
			&& (CONSTANT_P (op) || MEM_P (op)))
2607
		    /* During reload, accept a pseudo  */
2608
		    || (reload_in_progress && REG_P (op)
2609
			&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
Richard Kenner committed
2610 2611 2612 2613
		  win = 1;
		break;

	      default:
2614
		{
2615
		  enum reg_class cl;
2616

2617
		  cl = (c == 'r'
2618
			   ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2619
		  if (cl != NO_REGS)
2620 2621 2622
		    {
		      if (strict < 0
			  || (strict == 0
2623
			      && REG_P (op)
2624 2625
			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2626
			  || (REG_P (op)
2627
			      && reg_fits_class_p (op, cl, offset, mode)))
2628 2629
		        win = 1;
		    }
2630 2631
#ifdef EXTRA_CONSTRAINT_STR
		  else if (EXTRA_CONSTRAINT_STR (op, c, p))
2632
		    win = 1;
2633

2634 2635
		  else if (EXTRA_MEMORY_CONSTRAINT (c, p)
			   /* Every memory operand can be reloaded to fit.  */
2636
			   && ((strict < 0 && MEM_P (op))
2637 2638 2639 2640
			       /* Before reload, accept what reload can turn
				  into mem.  */
			       || (strict < 0 && CONSTANT_P (op))
			       /* During reload, accept a pseudo  */
2641
			       || (reload_in_progress && REG_P (op)
2642 2643 2644 2645 2646 2647
				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
		    win = 1;
		  else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
			   /* Every address operand can be reloaded to fit.  */
			   && strict < 0)
		    win = 1;
2648 2649 2650
#endif
		  break;
		}
Richard Kenner committed
2651
	      }
2652
	  while (p += len, c);
Richard Kenner committed
2653 2654 2655 2656 2657 2658 2659 2660 2661 2662 2663

	  constraints[opno] = p;
	  /* If this operand did not win somehow,
	     this alternative loses.  */
	  if (! win)
	    lose = 1;
	}
      /* This alternative won; the operands are ok.
	 Change whichever operands this alternative says to change.  */
      if (! lose)
	{
2664 2665 2666 2667 2668
	  int opno, eopno;

	  /* See if any earlyclobber operand conflicts with some other
	     operand.  */

2669 2670 2671 2672
	  if (strict > 0  && seen_earlyclobber_at >= 0)
	    for (eopno = seen_earlyclobber_at;
		 eopno < recog_data.n_operands;
		 eopno++)
2673 2674 2675 2676
	      /* Ignore earlyclobber operands now in memory,
		 because we would often report failure when we have
		 two memory operands, one of which was formerly a REG.  */
	      if (earlyclobber[eopno]
2677
		  && REG_P (recog_data.operand[eopno]))
2678
		for (opno = 0; opno < recog_data.n_operands; opno++)
2679
		  if ((MEM_P (recog_data.operand[opno])
2680
		       || recog_data.operand_type[opno] != OP_OUT)
2681
		      && opno != eopno
Mike Stump committed
2682
		      /* Ignore things like match_operator operands.  */
2683
		      && *recog_data.constraints[opno] != 0
2684
		      && ! (matching_operands[opno] == eopno
2685 2686 2687 2688
			    && operands_match_p (recog_data.operand[opno],
						 recog_data.operand[eopno]))
		      && ! safe_from_earlyclobber (recog_data.operand[opno],
						   recog_data.operand[eopno]))
2689 2690 2691
		    lose = 1;

	  if (! lose)
Richard Kenner committed
2692
	    {
2693 2694
	      while (--funny_match_index >= 0)
		{
2695
		  recog_data.operand[funny_match[funny_match_index].other]
2696
		    = recog_data.operand[funny_match[funny_match_index].this_op];
2697 2698 2699
		}

	      return 1;
Richard Kenner committed
2700 2701 2702 2703 2704
	    }
	}

      which_alternative++;
    }
2705
  while (which_alternative < recog_data.n_alternatives);
Richard Kenner committed
2706

2707
  which_alternative = -1;
Richard Kenner committed
2708 2709 2710
  /* If we are about to reject this, but we are not to test strictly,
     try a very loose test.  Only return failure if it fails also.  */
  if (strict == 0)
2711
    return constrain_operands (-1);
Richard Kenner committed
2712 2713 2714 2715 2716
  else
    return 0;
}

/* Return 1 iff OPERAND (assumed to be a REG rtx)
2717
   is a hard reg in class CLASS when its regno is offset by OFFSET
Richard Kenner committed
2718 2719 2720 2721
   and changed to mode MODE.
   If REG occupies multiple hard regs, all of them must be in CLASS.  */

int
2722
reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2723
		  enum machine_mode mode)
Richard Kenner committed
2724
{
2725
  int regno = REGNO (operand);
2726 2727 2728 2729

  if (cl == NO_REGS)
    return 0;

2730 2731 2732
  return (regno < FIRST_PSEUDO_REGISTER
	  && in_hard_reg_set_p (reg_class_contents[(int) cl],
				mode, regno + offset));
Richard Kenner committed
2733
}
2734

2735 2736 2737 2738
/* Split single instruction.  Helper function for split_all_insns and
   split_all_insns_noflow.  Return last insn in the sequence if successful,
   or NULL if unsuccessful.  */

2739
static rtx
2740
split_insn (rtx insn)
2741
{
2742 2743 2744
  /* Split insns here to get max fine-grain parallelism.  */
  rtx first = PREV_INSN (insn);
  rtx last = try_split (PATTERN (insn), insn, 1);
2745
  rtx insn_set, last_set, note;
2746 2747 2748 2749

  if (last == insn)
    return NULL_RTX;

2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767
  /* If the original instruction was a single set that was known to be
     equivalent to a constant, see if we can say the same about the last
     instruction in the split sequence.  The two instructions must set
     the same destination.  */
  insn_set = single_set (insn);
  if (insn_set)
    {
      last_set = single_set (last);
      if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
	{
	  note = find_reg_equal_equiv_note (insn);
	  if (note && CONSTANT_P (XEXP (note, 0)))
	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
	  else if (CONSTANT_P (SET_SRC (insn_set)))
	    set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
	}
    }

2768
  /* try_split returns the NOTE that INSN became.  */
2769
  SET_INSN_DELETED (insn);
2770

2771 2772 2773 2774 2775 2776
  /* ??? Coddle to md files that generate subregs in post-reload
     splitters instead of computing the proper hard register.  */
  if (reload_completed && first != last)
    {
      first = NEXT_INSN (first);
      for (;;)
2777
	{
2778 2779 2780 2781 2782
	  if (INSN_P (first))
	    cleanup_subreg_operands (first);
	  if (first == last)
	    break;
	  first = NEXT_INSN (first);
2783 2784
	}
    }
2785

2786
  return last;
2787
}
2788

2789
/* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2790 2791

void
2792
split_all_insns (void)
2793
{
2794
  sbitmap blocks;
2795
  bool changed;
2796
  basic_block bb;
2797

2798
  blocks = sbitmap_alloc (last_basic_block);
2799
  sbitmap_zero (blocks);
2800
  changed = false;
2801

2802
  FOR_EACH_BB_REVERSE (bb)
2803
    {
2804
      rtx insn, next;
2805
      bool finish = false;
2806

2807
      rtl_profile_for_bb (bb);
2808
      for (insn = BB_HEAD (bb); !finish ; insn = next)
2809
	{
2810 2811 2812
	  /* Can't use `next_real_insn' because that might go across
	     CODE_LABELS and short-out basic blocks.  */
	  next = NEXT_INSN (insn);
2813
	  finish = (insn == BB_END (bb));
2814
	  if (INSN_P (insn))
2815
	    {
2816 2817 2818 2819
	      rtx set = single_set (insn);

	      /* Don't split no-op move insns.  These should silently
		 disappear later in final.  Splitting such insns would
2820
		 break the code that handles LIBCALL blocks.  */
2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839
	      if (set && set_noop_p (set))
		{
		  /* Nops get in the way while scheduling, so delete them
		     now if register allocation has already been done.  It
		     is too risky to try to do this before register
		     allocation, and there are unlikely to be very many
		     nops then anyways.  */
		  if (reload_completed)
		      delete_insn_and_edges (insn);
		}
	      else
		{
		  rtx last = split_insn (insn);
		  if (last)
		    {
		      /* The split sequence may include barrier, but the
			 BB boundary we are interested in will be set to
			 previous one.  */

2840
		      while (BARRIER_P (last))
2841 2842 2843 2844 2845
			last = PREV_INSN (last);
		      SET_BIT (blocks, bb->index);
		      changed = true;
		    }
		}
2846 2847 2848
	    }
	}
    }
2849

2850
  default_rtl_profile ();
2851
  if (changed)
2852
    find_many_sub_basic_blocks (blocks);
2853

2854 2855 2856
#ifdef ENABLE_CHECKING
  verify_flow_info ();
#endif
2857 2858

  sbitmap_free (blocks);
2859
}
2860

2861
/* Same as split_all_insns, but do not expect CFG to be available.
2862
   Used by machine dependent reorg passes.  */
2863

2864
unsigned int
2865
split_all_insns_noflow (void)
2866 2867 2868 2869 2870 2871
{
  rtx next, insn;

  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);
2872 2873 2874 2875
      if (INSN_P (insn))
	{
	  /* Don't split no-op move insns.  These should silently
	     disappear later in final.  Splitting such insns would
2876
	     break the code that handles LIBCALL blocks.  */
2877 2878 2879 2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892
	  rtx set = single_set (insn);
	  if (set && set_noop_p (set))
	    {
	      /* Nops get in the way while scheduling, so delete them
		 now if register allocation has already been done.  It
		 is too risky to try to do this before register
		 allocation, and there are unlikely to be very many
		 nops then anyways.

		 ??? Should we use delete_insn when the CFG isn't valid?  */
	      if (reload_completed)
		delete_insn_and_edges (insn);
	    }
	  else
	    split_insn (insn);
	}
2893
    }
2894
  return 0;
2895
}
2896 2897

#ifdef HAVE_peephole2
2898 2899 2900 2901 2902 2903 2904 2905
struct peep2_insn_data
{
  rtx insn;
  regset live_before;
};

static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
static int peep2_current;
2906 2907
/* The number of instructions available to match a peep2.  */
int peep2_current_count;
2908 2909 2910

/* A non-insn marker indicating the last insn of the block.
   The live_before regset for this element is correct, indicating
2911
   DF_LIVE_OUT for the block.  */
2912 2913 2914 2915 2916
#define PEEP2_EOB	pc_rtx

/* Return the Nth non-note insn after `current', or return NULL_RTX if it
   does not exist.  Used by the recognizer to find the next insn to match
   in a multi-insn pattern.  */
2917

2918
rtx
2919
peep2_next_insn (int n)
2920
{
2921
  gcc_assert (n <= peep2_current_count);
2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933

  n += peep2_current;
  if (n >= MAX_INSNS_PER_PEEP2 + 1)
    n -= MAX_INSNS_PER_PEEP2 + 1;

  return peep2_insn_data[n].insn;
}

/* Return true if REGNO is dead before the Nth non-note insn
   after `current'.  */

int
2934
peep2_regno_dead_p (int ofs, int regno)
2935
{
2936
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2937 2938 2939 2940 2941

  ofs += peep2_current;
  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
    ofs -= MAX_INSNS_PER_PEEP2 + 1;

2942
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2943 2944 2945 2946 2947 2948 2949

  return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
}

/* Similarly for a REG.  */

int
2950
peep2_reg_dead_p (int ofs, rtx reg)
2951 2952 2953
{
  int regno, n;

2954
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2955 2956 2957 2958 2959

  ofs += peep2_current;
  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
    ofs -= MAX_INSNS_PER_PEEP2 + 1;

2960
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2961 2962

  regno = REGNO (reg);
2963
  n = hard_regno_nregs[regno][GET_MODE (reg)];
2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981
  while (--n >= 0)
    if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
      return 0;
  return 1;
}

/* Try to find a hard register of mode MODE, matching the register class in
   CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
   remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
   in which case the only condition is that the register must be available
   before CURRENT_INSN.
   Registers that already have bits set in REG_SET will not be considered.

   If an appropriate register is available, it will be returned and the
   corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
   returned.  */

rtx
2982 2983
peep2_find_free_register (int from, int to, const char *class_str,
			  enum machine_mode mode, HARD_REG_SET *reg_set)
2984 2985
{
  static int search_ofs;
2986
  enum reg_class cl;
2987 2988 2989
  HARD_REG_SET live;
  int i;

2990 2991
  gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
  gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2992 2993 2994 2995 2996 2997 2998 2999

  from += peep2_current;
  if (from >= MAX_INSNS_PER_PEEP2 + 1)
    from -= MAX_INSNS_PER_PEEP2 + 1;
  to += peep2_current;
  if (to >= MAX_INSNS_PER_PEEP2 + 1)
    to -= MAX_INSNS_PER_PEEP2 + 1;

3000
  gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3001 3002 3003
  REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);

  while (from != to)
3004
    {
3005 3006 3007 3008
      HARD_REG_SET this_live;

      if (++from >= MAX_INSNS_PER_PEEP2 + 1)
	from = 0;
3009
      gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3010 3011 3012 3013
      REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
      IOR_HARD_REG_SET (live, this_live);
    }

3014
  cl = (class_str[0] == 'r' ? GENERAL_REGS
3015
	   : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3016 3017 3018 3019 3020 3021 3022 3023 3024 3025 3026 3027 3028 3029 3030 3031 3032 3033

  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    {
      int raw_regno, regno, success, j;

      /* Distribute the free registers as much as possible.  */
      raw_regno = search_ofs + i;
      if (raw_regno >= FIRST_PSEUDO_REGISTER)
	raw_regno -= FIRST_PSEUDO_REGISTER;
#ifdef REG_ALLOC_ORDER
      regno = reg_alloc_order[raw_regno];
#else
      regno = raw_regno;
#endif

      /* Don't allocate fixed registers.  */
      if (fixed_regs[regno])
	continue;
3034 3035 3036
      /* Don't allocate global registers.  */
      if (global_regs[regno])
	continue;
3037
      /* Make sure the register is of the right class.  */
3038
      if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3039 3040 3041 3042 3043
	continue;
      /* And can support the mode we need.  */
      if (! HARD_REGNO_MODE_OK (regno, mode))
	continue;
      /* And that we don't create an extra save/restore.  */
3044
      if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3045
	continue;
3046 3047 3048
      if (! targetm.hard_regno_scratch_ok (regno))
	continue;

3049 3050 3051 3052 3053 3054
      /* And we don't clobber traceback for noreturn functions.  */
      if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
	  && (! reload_completed || frame_pointer_needed))
	continue;

      success = 1;
3055
      for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3056 3057 3058 3059 3060 3061 3062 3063 3064
	{
	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
	      || TEST_HARD_REG_BIT (live, regno + j))
	    {
	      success = 0;
	      break;
	    }
	}
      if (success)
3065
	{
3066
	  add_to_hard_reg_set (reg_set, mode, regno);
3067

3068 3069 3070 3071
	  /* Start the next search with the next register.  */
	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
	    raw_regno = 0;
	  search_ofs = raw_regno;
3072

3073
	  return gen_rtx_REG (mode, regno);
3074
	}
3075 3076
    }

3077 3078
  search_ofs = 0;
  return NULL_RTX;
3079 3080
}

3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100
/* Forget all currently tracked instructions, only remember current
   LIVE regset.  */

static void
peep2_reinit_state (regset live)
{
  int i;

  /* Indicate that all slots except the last holds invalid data.  */
  for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
    peep2_insn_data[i].insn = NULL_RTX;
  peep2_current_count = 0;

  /* Indicate that the last slot contains live_after data.  */
  peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
  peep2_current = MAX_INSNS_PER_PEEP2;

  COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
}

3101
/* Perform the peephole2 optimization pass.  */
3102

3103
static void
3104
peephole2_optimize (void)
3105
{
3106
  rtx insn, prev;
3107
  bitmap live;
3108 3109
  int i;
  basic_block bb;
3110
  bool do_cleanup_cfg = false;
3111
  bool do_rebuild_jump_labels = false;
3112

3113 3114 3115
  df_set_flags (DF_LR_RUN_DCE);
  df_analyze ();

3116 3117
  /* Initialize the regsets we're going to use.  */
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3118 3119
    peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
  live = BITMAP_ALLOC (&reg_obstack);
3120

3121
  FOR_EACH_BB_REVERSE (bb)
3122
    {
3123
      rtl_profile_for_bb (bb);
3124

3125
      /* Start up propagation.  */
3126
      bitmap_copy (live, DF_LR_OUT (bb));
3127
      df_simulate_initialize_backwards (bb, live);
3128
      peep2_reinit_state (live);
3129

3130
      for (insn = BB_END (bb); ; insn = prev)
3131 3132
	{
	  prev = PREV_INSN (insn);
3133
	  if (NONDEBUG_INSN_P (insn))
3134
	    {
3135
	      rtx attempt, before_try, x;
3136
	      int match_len;
3137
	      rtx note;
3138
	      bool was_call = false;
3139 3140 3141 3142

	      /* Record this insn.  */
	      if (--peep2_current < 0)
		peep2_current = MAX_INSNS_PER_PEEP2;
3143 3144
	      if (peep2_current_count < MAX_INSNS_PER_PEEP2
		  && peep2_insn_data[peep2_current].insn == NULL_RTX)
3145
		peep2_current_count++;
3146
	      peep2_insn_data[peep2_current].insn = insn;
3147
	      df_simulate_one_insn_backwards (bb, insn, live);
3148 3149
	      COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);

3150 3151 3152 3153 3154
	      if (RTX_FRAME_RELATED_P (insn))
		{
		  /* If an insn has RTX_FRAME_RELATED_P set, peephole
		     substitution would lose the
		     REG_FRAME_RELATED_EXPR that is attached.  */
3155
		  peep2_reinit_state (live);
3156
		  attempt = NULL;
3157 3158 3159
		}
	      else
		/* Match the peephole.  */
3160
		attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3161

3162
	      if (attempt != NULL)
3163
		{
3164 3165 3166 3167 3168
		  /* If we are splitting a CALL_INSN, look for the CALL_INSN
		     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
		     cfg-related call notes.  */
		  for (i = 0; i <= match_len; ++i)
		    {
3169
		      int j;
3170 3171 3172 3173 3174 3175
		      rtx old_insn, new_insn, note;

		      j = i + peep2_current;
		      if (j >= MAX_INSNS_PER_PEEP2 + 1)
			j -= MAX_INSNS_PER_PEEP2 + 1;
		      old_insn = peep2_insn_data[j].insn;
3176
		      if (!CALL_P (old_insn))
3177
			continue;
3178
		      was_call = true;
3179

3180
		      new_insn = attempt;
3181 3182
		      while (new_insn != NULL_RTX)
			{
3183
			  if (CALL_P (new_insn))
3184 3185 3186 3187
			    break;
			  new_insn = NEXT_INSN (new_insn);
			}

3188
		      gcc_assert (new_insn != NULL_RTX);
3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199

		      CALL_INSN_FUNCTION_USAGE (new_insn)
			= CALL_INSN_FUNCTION_USAGE (old_insn);

		      for (note = REG_NOTES (old_insn);
			   note;
			   note = XEXP (note, 1))
			switch (REG_NOTE_KIND (note))
			  {
			  case REG_NORETURN:
			  case REG_SETJMP:
3200 3201 3202
			    add_reg_note (new_insn, REG_NOTE_KIND (note),
					  XEXP (note, 0));
			    break;
3203
			  default:
3204
			    /* Discard all other reg notes.  */
3205 3206 3207 3208 3209 3210 3211 3212 3213 3214
			    break;
			  }

		      /* Croak if there is another call in the sequence.  */
		      while (++i <= match_len)
			{
			  j = i + peep2_current;
			  if (j >= MAX_INSNS_PER_PEEP2 + 1)
			    j -= MAX_INSNS_PER_PEEP2 + 1;
			  old_insn = peep2_insn_data[j].insn;
3215
			  gcc_assert (!CALL_P (old_insn));
3216 3217 3218 3219
			}
		      break;
		    }

3220 3221 3222 3223
		  i = match_len + peep2_current;
		  if (i >= MAX_INSNS_PER_PEEP2 + 1)
		    i -= MAX_INSNS_PER_PEEP2 + 1;

3224
		  note = find_reg_note (peep2_insn_data[i].insn,
3225 3226
					REG_EH_REGION, NULL_RTX);

3227
		  /* Replace the old sequence with the new.  */
3228 3229 3230
		  attempt = emit_insn_after_setloc (attempt,
						    peep2_insn_data[i].insn,
				       INSN_LOCATOR (peep2_insn_data[i].insn));
3231
		  before_try = PREV_INSN (insn);
3232
		  delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3233

3234
		  /* Re-insert the EH_REGION notes.  */
3235
		  if (note || (was_call && nonlocal_goto_handler_labels))
3236
		    {
3237
		      edge eh_edge;
3238
		      edge_iterator ei;
3239

3240
		      FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3241
			if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3242 3243
			  break;

3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269 3270 3271 3272
		      if (note)
			copy_reg_eh_region_note_backward (note, attempt,
							  before_try);

		      if (eh_edge)
			for (x = attempt ; x != before_try ; x = PREV_INSN (x))
			  if (x != BB_END (bb)
			      && (can_throw_internal (x)
				  || can_nonlocal_goto (x)))
			    {
			      edge nfte, nehe;
			      int flags;

			      nfte = split_block (bb, x);
			      flags = (eh_edge->flags
				       & (EDGE_EH | EDGE_ABNORMAL));
			      if (CALL_P (x))
				flags |= EDGE_ABNORMAL_CALL;
			      nehe = make_edge (nfte->src, eh_edge->dest,
						flags);

			      nehe->probability = eh_edge->probability;
			      nfte->probability
				= REG_BR_PROB_BASE - nehe->probability;

			      do_cleanup_cfg |= purge_dead_edges (nfte->dest);
			      bb = nfte->src;
			      eh_edge = nehe;
			    }
3273 3274 3275 3276

		      /* Converting possibly trapping insn to non-trapping is
			 possible.  Zap dummy outgoing edges.  */
		      do_cleanup_cfg |= purge_dead_edges (bb);
3277 3278
		    }

3279
		  if (targetm.have_conditional_execution ())
3280
		    {
3281 3282 3283 3284 3285 3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296
		      for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
			peep2_insn_data[i].insn = NULL_RTX;
		      peep2_insn_data[peep2_current].insn = PEEP2_EOB;
		      peep2_current_count = 0;
		    }
		  else
		    {
		      /* Back up lifetime information past the end of the
			 newly created sequence.  */
		      if (++i >= MAX_INSNS_PER_PEEP2 + 1)
			i = 0;
		      bitmap_copy (live, peep2_insn_data[i].live_before);

		      /* Update life information for the new sequence.  */
		      x = attempt;
		      do
3297
			{
3298 3299 3300 3301 3302 3303 3304 3305 3306 3307 3308 3309 3310 3311
			  if (INSN_P (x))
			    {
			      if (--i < 0)
				i = MAX_INSNS_PER_PEEP2;
			      if (peep2_current_count < MAX_INSNS_PER_PEEP2
				  && peep2_insn_data[i].insn == NULL_RTX)
				peep2_current_count++;
			      peep2_insn_data[i].insn = x;
			      df_insn_rescan (x);
			      df_simulate_one_insn_backwards (bb, x, live);
			      bitmap_copy (peep2_insn_data[i].live_before,
					   live);
			    }
			  x = PREV_INSN (x);
3312
			}
3313
		      while (x != prev);
3314

3315 3316
		      peep2_current = i;
		    }
3317 3318 3319

		  /* If we generated a jump instruction, it won't have
		     JUMP_LABEL set.  Recompute after we're done.  */
3320
		  for (x = attempt; x != before_try; x = PREV_INSN (x))
3321
		    if (JUMP_P (x))
3322 3323 3324 3325
		      {
		        do_rebuild_jump_labels = true;
			break;
		      }
3326
		}
3327
	    }
3328

3329
	  if (insn == BB_HEAD (bb))
3330
	    break;
3331 3332 3333
	}
    }

3334
  default_rtl_profile ();
3335
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3336 3337
    BITMAP_FREE (peep2_insn_data[i].live_before);
  BITMAP_FREE (live);
3338 3339
  if (do_rebuild_jump_labels)
    rebuild_jump_labels (get_insns ());
3340 3341
}
#endif /* HAVE_peephole2 */
3342 3343 3344 3345

/* Common predicates for use with define_bypass.  */

/* True if the dependency between OUT_INSN and IN_INSN is on the store
3346 3347
   data not the address operand(s) of the store.  IN_INSN and OUT_INSN
   must be either a single_set or a PARALLEL with SETs inside.  */
3348 3349

int
3350
store_data_bypass_p (rtx out_insn, rtx in_insn)
3351 3352
{
  rtx out_set, in_set;
3353 3354 3355
  rtx out_pat, in_pat;
  rtx out_exp, in_exp;
  int i, j;
3356 3357

  in_set = single_set (in_insn);
3358
  if (in_set)
3359
    {
3360
      if (!MEM_P (SET_DEST (in_set)))
3361
	return false;
3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385 3386 3387 3388

      out_set = single_set (out_insn);
      if (out_set)
        {
          if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
            return false;
        }
      else
        {
          out_pat = PATTERN (out_insn);

	  if (GET_CODE (out_pat) != PARALLEL)
	    return false;

          for (i = 0; i < XVECLEN (out_pat, 0); i++)
          {
            out_exp = XVECEXP (out_pat, 0, i);

            if (GET_CODE (out_exp) == CLOBBER)
              continue;

            gcc_assert (GET_CODE (out_exp) == SET);

            if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
              return false;
          }
      }
3389 3390 3391
    }
  else
    {
3392 3393
      in_pat = PATTERN (in_insn);
      gcc_assert (GET_CODE (in_pat) == PARALLEL);
3394

3395
      for (i = 0; i < XVECLEN (in_pat, 0); i++)
3396
	{
3397
	  in_exp = XVECEXP (in_pat, 0, i);
3398

3399
	  if (GET_CODE (in_exp) == CLOBBER)
3400 3401
	    continue;

3402
	  gcc_assert (GET_CODE (in_exp) == SET);
3403

3404
	  if (!MEM_P (SET_DEST (in_exp)))
3405
	    return false;
3406 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417 3418 3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431

          out_set = single_set (out_insn);
          if (out_set)
            {
              if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
                return false;
            }
          else
            {
              out_pat = PATTERN (out_insn);
              gcc_assert (GET_CODE (out_pat) == PARALLEL);

              for (j = 0; j < XVECLEN (out_pat, 0); j++)
                {
                  out_exp = XVECEXP (out_pat, 0, j);

                  if (GET_CODE (out_exp) == CLOBBER)
                    continue;

                  gcc_assert (GET_CODE (out_exp) == SET);

                  if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
                    return false;
                }
            }
        }
3432
    }
3433 3434 3435 3436

  return true;
}

3437 3438 3439 3440
/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
   condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
   or multiple set; IN_INSN should be single_set for truth, but for convenience
   of insn categorization may be any JUMP or CALL insn.  */
3441 3442

int
3443
if_test_bypass_p (rtx out_insn, rtx in_insn)
3444 3445 3446 3447 3448
{
  rtx out_set, in_set;

  in_set = single_set (in_insn);
  if (! in_set)
3449
    {
3450 3451
      gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
      return false;
3452
    }
3453 3454 3455

  if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
    return false;
3456
  in_set = SET_SRC (in_set);
3457

3458 3459 3460 3461 3462
  out_set = single_set (out_insn);
  if (out_set)
    {
      if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3463
	return false;
3464 3465 3466 3467 3468 3469 3470
    }
  else
    {
      rtx out_pat;
      int i;

      out_pat = PATTERN (out_insn);
3471
      gcc_assert (GET_CODE (out_pat) == PARALLEL);
3472 3473 3474 3475 3476 3477 3478 3479

      for (i = 0; i < XVECLEN (out_pat, 0); i++)
	{
	  rtx exp = XVECEXP (out_pat, 0, i);

	  if (GET_CODE (exp) == CLOBBER)
	    continue;

3480
	  gcc_assert (GET_CODE (exp) == SET);
3481 3482 3483 3484 3485 3486

	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
	    return false;
	}
    }
3487 3488 3489

  return true;
}
3490 3491 3492 3493 3494 3495 3496

static bool
gate_handle_peephole2 (void)
{
  return (optimize > 0 && flag_peephole2);
}

3497
static unsigned int
3498 3499 3500
rest_of_handle_peephole2 (void)
{
#ifdef HAVE_peephole2
3501
  peephole2_optimize ();
3502
#endif
3503
  return 0;
3504 3505
}

3506
struct rtl_opt_pass pass_peephole2 =
3507
{
3508 3509
 {
  RTL_PASS,
3510 3511 3512 3513 3514 3515 3516 3517 3518 3519 3520
  "peephole2",                          /* name */
  gate_handle_peephole2,                /* gate */
  rest_of_handle_peephole2,             /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_PEEPHOLE2,                         /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3521
  TODO_df_finish | TODO_verify_rtl_sharing |
3522 3523
  TODO_dump_func                       /* todo_flags_finish */
 }
3524 3525
};

3526
static unsigned int
3527 3528
rest_of_handle_split_all_insns (void)
{
3529
  split_all_insns ();
3530
  return 0;
3531 3532
}

3533
struct rtl_opt_pass pass_split_all_insns =
3534
{
3535 3536
 {
  RTL_PASS,
3537
  "split1",                             /* name */
3538 3539 3540 3541 3542
  NULL,                                 /* gate */
  rest_of_handle_split_all_insns,       /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3543
  TV_NONE,                              /* tv_id */
3544 3545 3546 3547
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3548 3549
  TODO_dump_func                        /* todo_flags_finish */
 }
3550 3551
};

3552 3553
static unsigned int
rest_of_handle_split_after_reload (void)
3554
{
3555 3556 3557 3558 3559
  /* If optimizing, then go ahead and split insns now.  */
#ifndef STACK_REGS
  if (optimize > 0)
#endif
    split_all_insns ();
3560 3561 3562
  return 0;
}

3563
struct rtl_opt_pass pass_split_after_reload =
3564
{
3565 3566
 {
  RTL_PASS,
3567 3568 3569
  "split2",                             /* name */
  NULL,                                 /* gate */
  rest_of_handle_split_after_reload,    /* execute */
3570 3571 3572
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3573
  TV_NONE,                              /* tv_id */
3574 3575 3576 3577
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3578 3579
  TODO_dump_func                        /* todo_flags_finish */
 }
3580 3581 3582 3583 3584 3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597 3598 3599
};

static bool
gate_handle_split_before_regstack (void)
{
#if defined (HAVE_ATTR_length) && defined (STACK_REGS)
  /* If flow2 creates new instructions which need splitting
     and scheduling after reload is not done, they might not be
     split until final which doesn't allow splitting
     if HAVE_ATTR_length.  */
# ifdef INSN_SCHEDULING
  return (optimize && !flag_schedule_insns_after_reload);
# else
  return (optimize);
# endif
#else
  return 0;
#endif
}

3600 3601 3602 3603 3604 3605 3606
static unsigned int
rest_of_handle_split_before_regstack (void)
{
  split_all_insns ();
  return 0;
}

3607
struct rtl_opt_pass pass_split_before_regstack =
3608
{
3609 3610
 {
  RTL_PASS,
3611
  "split3",                             /* name */
3612
  gate_handle_split_before_regstack,    /* gate */
3613 3614 3615 3616
  rest_of_handle_split_before_regstack, /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3617
  TV_NONE,                              /* tv_id */
3618 3619 3620 3621
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3622 3623
  TODO_dump_func                        /* todo_flags_finish */
 }
3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644
};

static bool
gate_handle_split_before_sched2 (void)
{
#ifdef INSN_SCHEDULING
  return optimize > 0 && flag_schedule_insns_after_reload;
#else
  return 0;
#endif
}

static unsigned int
rest_of_handle_split_before_sched2 (void)
{
#ifdef INSN_SCHEDULING
  split_all_insns ();
#endif
  return 0;
}

3645
struct rtl_opt_pass pass_split_before_sched2 =
3646
{
3647 3648
 {
  RTL_PASS,
3649 3650 3651 3652 3653 3654
  "split4",                             /* name */
  gate_handle_split_before_sched2,      /* gate */
  rest_of_handle_split_before_sched2,   /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3655
  TV_NONE,                              /* tv_id */
3656 3657 3658 3659 3660
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
  TODO_verify_flow |
3661 3662
  TODO_dump_func                        /* todo_flags_finish */
 }
3663 3664 3665 3666 3667 3668 3669 3670 3671 3672 3673
};

/* The placement of the splitting that we do for shorten_branches
   depends on whether regstack is used by the target or not.  */
static bool
gate_do_final_split (void)
{
#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
  return 1;
#else
  return 0;
H.J. Lu committed
3674
#endif
3675 3676
}

3677
struct rtl_opt_pass pass_split_for_shorten_branches =
3678
{
3679 3680
 {
  RTL_PASS,
3681 3682 3683
  "split5",                             /* name */
  gate_do_final_split,                  /* gate */
  split_all_insns_noflow,               /* execute */
3684 3685 3686
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
3687
  TV_NONE,                              /* tv_id */
3688 3689 3690 3691
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
3692 3693
  TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
 }
3694
};