recog.c 112 KB
Newer Older
Richard Kenner committed
1
/* Subroutines used by or related to instruction recognition.
2
   Copyright (C) 1987-2018 Free Software Foundation, Inc.
Richard Kenner committed
3

4
This file is part of GCC.
Richard Kenner committed
5

6 7
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9
version.
Richard Kenner committed
10

11 12 13 14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
15 16

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
19 20 21


#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "backend.h"
25
#include "target.h"
26
#include "rtl.h"
27 28
#include "tree.h"
#include "cfghooks.h"
29
#include "df.h"
30
#include "memmodel.h"
31
#include "tm_p.h"
Richard Kenner committed
32 33
#include "insn-config.h"
#include "regs.h"
34 35 36
#include "emit-rtl.h"
#include "recog.h"
#include "insn-attr.h"
37
#include "addresses.h"
38 39 40
#include "cfgrtl.h"
#include "cfgbuild.h"
#include "cfgcleanup.h"
41
#include "reload.h"
42
#include "tree-pass.h"
Richard Kenner committed
43

44
#ifndef STACK_POP_CODE
45
#if STACK_GROWS_DOWNWARD
46 47 48 49 50 51
#define STACK_POP_CODE POST_INC
#else
#define STACK_POP_CODE POST_DEC
#endif
#endif

52
static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
53
static void validate_replace_src_1 (rtx *, void *);
54
static rtx_insn *split_insn (rtx_insn *);
Richard Kenner committed
55

56 57 58 59 60
struct target_recog default_target_recog;
#if SWITCHABLE_TARGET
struct target_recog *this_target_recog = &default_target_recog;
#endif

Richard Kenner committed
61 62 63 64
/* Nonzero means allow operands to be volatile.
   This should be 0 if you are generating rtl, such as if you are calling
   the functions in optabs.c and expmed.c (most of the time).
   This should be 1 if all valid insns need to be recognized,
65
   such as in reginfo.c and final.c and reload.c.
Richard Kenner committed
66 67 68 69 70

   init_recog and init_recog_no_volatile are responsible for setting this.  */

int volatile_ok;

71
struct recog_data_d recog_data;
72

73 74
/* Contains a vector of operand_alternative structures, such that
   operand OP of alternative A is at index A * n_operands + OP.
75
   Set up by preprocess_constraints.  */
76 77 78 79 80
const operand_alternative *recog_op_alt;

/* Used to provide recog_op_alt for asms.  */
static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
				      * MAX_RECOG_ALTERNATIVES];
81

Richard Kenner committed
82 83 84 85 86 87 88 89 90 91 92
/* On return from `constrain_operands', indicate which alternative
   was satisfied.  */

int which_alternative;

/* Nonzero after end of reload pass.
   Set to 1 or 0 by toplev.c.
   Controls the significance of (SUBREG (MEM)).  */

int reload_completed;

Stephen Clarke committed
93 94 95
/* Nonzero after thread_prologue_and_epilogue_insns has run.  */
int epilogue_completed;

Richard Kenner committed
96 97 98 99 100
/* Initialize data used by the function `recog'.
   This must be called once in the compilation of a function
   before any insn recognition may be done in the function.  */

void
101
init_recog_no_volatile (void)
Richard Kenner committed
102 103 104 105
{
  volatile_ok = 0;
}

106
void
107
init_recog (void)
Richard Kenner committed
108 109 110 111 112
{
  volatile_ok = 1;
}


113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
/* Return true if labels in asm operands BODY are LABEL_REFs.  */

static bool
asm_labels_ok (rtx body)
{
  rtx asmop;
  int i;

  asmop = extract_asm_operands (body);
  if (asmop == NULL_RTX)
    return true;

  for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
    if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
      return false;

  return true;
}

Richard Kenner committed
132 133 134 135
/* Check that X is an insn-body for an `asm' with operands
   and that the operands mentioned in it are legitimate.  */

int
136
check_asm_operands (rtx x)
Richard Kenner committed
137
{
138
  int noperands;
Richard Kenner committed
139
  rtx *operands;
140
  const char **constraints;
Richard Kenner committed
141 142
  int i;

143 144 145
  if (!asm_labels_ok (x))
    return 0;

146 147 148 149
  /* Post-reload, be more strict with things.  */
  if (reload_completed)
    {
      /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
150 151 152
      rtx_insn *insn = make_insn_raw (x);
      extract_insn (insn);
      constrain_operands (1, get_enabled_alternatives (insn));
153 154 155 156
      return which_alternative >= 0;
    }

  noperands = asm_noperands (x);
Richard Kenner committed
157 158 159 160 161
  if (noperands < 0)
    return 0;
  if (noperands == 0)
    return 1;

162 163
  operands = XALLOCAVEC (rtx, noperands);
  constraints = XALLOCAVEC (const char *, noperands);
164

165
  decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
Richard Kenner committed
166 167

  for (i = 0; i < noperands; i++)
168
    {
169
      const char *c = constraints[i];
170 171
      if (c[0] == '%')
	c++;
172
      if (! asm_operand_ok (operands[i], c, constraints))
173
	return 0;
174
    }
Richard Kenner committed
175 176 177 178

  return 1;
}

179
/* Static data for the next two routines.  */
Richard Kenner committed
180

181
struct change_t
182 183 184
{
  rtx object;
  int old_code;
185
  bool unshare;
186 187
  rtx *loc;
  rtx old;
188
};
Richard Kenner committed
189

190 191
static change_t *changes;
static int changes_allocated;
Richard Kenner committed
192 193 194

static int num_changes = 0;

195
/* Validate a proposed change to OBJECT.  LOC is the location in the rtl
196
   at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
Richard Kenner committed
197 198 199 200 201 202 203
   the change is simply made.

   Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
   will be called with the address and mode as parameters.  If OBJECT is
   an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
   the change in place.

204
   IN_GROUP is nonzero if this is part of a group of changes that must be
Richard Kenner committed
205 206 207 208 209 210 211 212
   performed as a group.  In that case, the changes will be stored.  The
   function `apply_change_group' will validate and apply the changes.

   If IN_GROUP is zero, this is a single change.  Try to recognize the insn
   or validate the memory reference with the change applied.  If the result
   is not valid for the machine, suppress the change and return zero.
   Otherwise, perform the change and return 1.  */

213
static bool
214
validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
Richard Kenner committed
215 216 217
{
  rtx old = *loc;

218
  if (old == new_rtx || rtx_equal_p (old, new_rtx))
Richard Kenner committed
219 220
    return 1;

221
  gcc_assert (in_group != 0 || num_changes == 0);
Richard Kenner committed
222

223
  *loc = new_rtx;
Richard Kenner committed
224 225

  /* Save the information describing this change.  */
226 227 228 229 230 231 232 233 234
  if (num_changes >= changes_allocated)
    {
      if (changes_allocated == 0)
	/* This value allows for repeated substitutions inside complex
	   indexed addresses, or changes in up to 5 insns.  */
	changes_allocated = MAX_RECOG_OPERANDS * 5;
      else
	changes_allocated *= 2;

235
      changes = XRESIZEVEC (change_t, changes, changes_allocated);
236
    }
237

238 239 240
  changes[num_changes].object = object;
  changes[num_changes].loc = loc;
  changes[num_changes].old = old;
241
  changes[num_changes].unshare = unshare;
Richard Kenner committed
242

243
  if (object && !MEM_P (object))
Richard Kenner committed
244 245 246
    {
      /* Set INSN_CODE to force rerecognition of insn.  Save old code in
	 case invalid.  */
247
      changes[num_changes].old_code = INSN_CODE (object);
Richard Kenner committed
248 249 250 251 252 253 254 255 256 257 258 259 260 261
      INSN_CODE (object) = -1;
    }

  num_changes++;

  /* If we are making a group of changes, return 1.  Otherwise, validate the
     change group we made.  */

  if (in_group)
    return 1;
  else
    return apply_change_group ();
}

262 263 264 265
/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
   UNSHARE to false.  */

bool
266
validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
267
{
268
  return validate_change_1 (object, loc, new_rtx, in_group, false);
269 270 271 272 273 274
}

/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
   UNSHARE to true.  */

bool
275
validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
276
{
277
  return validate_change_1 (object, loc, new_rtx, in_group, true);
278 279 280
}


Paolo Bonzini committed
281 282 283 284 285 286
/* Keep X canonicalized if some changes have made it non-canonical; only
   modifies the operands of X, not (for example) its code.  Simplifications
   are not the job of this routine.

   Return true if anything was changed.  */
bool
287
canonicalize_change_group (rtx_insn *insn, rtx x)
Paolo Bonzini committed
288 289 290 291 292 293 294
{
  if (COMMUTATIVE_P (x)
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
    {
      /* Oops, the caller has made X no longer canonical.
	 Let's redo the changes in the correct order.  */
      rtx tem = XEXP (x, 0);
295 296
      validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
      validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
Paolo Bonzini committed
297 298 299 300 301
      return true;
    }
  else
    return false;
}
H.J. Lu committed
302

303

304
/* This subroutine of apply_change_group verifies whether the changes to INSN
305 306 307 308 309
   were valid; i.e. whether INSN can still be recognized.

   If IN_GROUP is true clobbers which have to be added in order to
   match the instructions will be added to the current change group.
   Otherwise the changes will take effect immediately.  */
310

311
int
312
insn_invalid_p (rtx_insn *insn, bool in_group)
313
{
314 315 316 317 318 319
  rtx pat = PATTERN (insn);
  int num_clobbers = 0;
  /* If we are before reload and the pattern is a SET, see if we can add
     clobbers.  */
  int icode = recog (pat, insn,
		     (GET_CODE (pat) == SET
320
		      && ! reload_completed 
321
                      && ! reload_in_progress)
322
		     ? &num_clobbers : 0);
323 324
  int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;

325

326 327 328 329
  /* If this is an asm and the operand aren't legal, then fail.  Likewise if
     this is not an asm and the insn wasn't recognized.  */
  if ((is_asm && ! check_asm_operands (PATTERN (insn)))
      || (!is_asm && icode < 0))
330 331
    return 1;

332 333 334 335 336 337 338 339 340 341 342 343 344
  /* If we have to add CLOBBERs, fail if we have to add ones that reference
     hard registers since our callers can't know if they are live or not.
     Otherwise, add them.  */
  if (num_clobbers > 0)
    {
      rtx newpat;

      if (added_clobbers_hard_reg_p (icode))
	return 1;

      newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
      XVECEXP (newpat, 0, 0) = pat;
      add_clobbers (newpat, icode);
345 346 347 348
      if (in_group)
	validate_change (insn, &PATTERN (insn), newpat, 1);
      else
	PATTERN (insn) = pat = newpat;
349 350
    }

351 352 353
  /* After reload, verify that all constraints are satisfied.  */
  if (reload_completed)
    {
354
      extract_insn (insn);
355

356
      if (! constrain_operands (1, get_preferred_alternatives (insn)))
357 358 359
	return 1;
    }

360
  INSN_CODE (insn) = icode;
361 362 363
  return 0;
}

364 365
/* Return number of changes made and not validated yet.  */
int
366
num_changes_pending (void)
367 368 369 370
{
  return num_changes;
}

371
/* Tentatively apply the changes numbered NUM and up.
Richard Kenner committed
372 373
   Return 1 if all changes are valid, zero otherwise.  */

374
int
375
verify_changes (int num)
Richard Kenner committed
376 377
{
  int i;
378
  rtx last_validated = NULL_RTX;
Richard Kenner committed
379 380 381 382 383 384 385 386

  /* The changes have been applied and all INSN_CODEs have been reset to force
     rerecognition.

     The changes are valid if we aren't given an object, or if we are
     given a MEM and it still is a valid address, or if this is in insn
     and it is recognized.  In the latter case, if reload has completed,
     we also require that the operands meet the constraints for
387
     the insn.  */
Richard Kenner committed
388

389
  for (i = num; i < num_changes; i++)
Richard Kenner committed
390
    {
391
      rtx object = changes[i].object;
Richard Kenner committed
392

393
      /* If there is no object to test or if it is the same as the one we
394 395
         already tested, ignore it.  */
      if (object == 0 || object == last_validated)
Richard Kenner committed
396 397
	continue;

398
      if (MEM_P (object))
Richard Kenner committed
399
	{
400 401 402
	  if (! memory_address_addr_space_p (GET_MODE (object),
					     XEXP (object, 0),
					     MEM_ADDR_SPACE (object)))
Richard Kenner committed
403 404
	    break;
	}
405 406 407 408
      else if (/* changes[i].old might be zero, e.g. when putting a
	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
	       changes[i].old
	       && REG_P (changes[i].old)
409 410
	       && asm_noperands (PATTERN (object)) > 0
	       && REG_EXPR (changes[i].old) != NULL_TREE
411
	       && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
412 413 414 415 416 417 418
	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
	{
	  /* Don't allow changes of hard register operands to inline
	     assemblies if they have been defined as register asm ("x").  */
	  break;
	}
419 420
      else if (DEBUG_INSN_P (object))
	continue;
421
      else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
Richard Kenner committed
422 423 424 425 426 427 428 429 430 431 432 433
	{
	  rtx pat = PATTERN (object);

	  /* Perhaps we couldn't recognize the insn because there were
	     extra CLOBBERs at the end.  If so, try to re-recognize
	     without the last CLOBBER (later iterations will cause each of
	     them to be eliminated, in turn).  But don't do this if we
	     have an ASM_OPERAND.  */
	  if (GET_CODE (pat) == PARALLEL
	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
	      && asm_noperands (PATTERN (object)) < 0)
	    {
434 435 436 437 438 439 440 441 442
	      rtx newpat;

	      if (XVECLEN (pat, 0) == 2)
		newpat = XVECEXP (pat, 0, 0);
	      else
		{
		  int j;

		  newpat
443
		    = gen_rtx_PARALLEL (VOIDmode,
444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460
					rtvec_alloc (XVECLEN (pat, 0) - 1));
		  for (j = 0; j < XVECLEN (newpat, 0); j++)
		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
		}

	      /* Add a new change to this group to replace the pattern
		 with this new pattern.  Then consider this change
		 as having succeeded.  The change we added will
		 cause the entire call to fail if things remain invalid.

		 Note that this can lose if a later change than the one
		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
		 but this shouldn't occur.  */

	      validate_change (object, &PATTERN (object), newpat, 1);
	      continue;
	    }
461 462
	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
		   || GET_CODE (pat) == VAR_LOCATION)
Richard Kenner committed
463 464 465 466 467 468
	    /* If this insn is a CLOBBER or USE, it is always valid, but is
	       never recognized.  */
	    continue;
	  else
	    break;
	}
469
      last_validated = object;
Richard Kenner committed
470 471
    }

472 473 474
  return (i == num_changes);
}

475 476 477
/* A group of changes has previously been issued with validate_change
   and verified with verify_changes.  Call df_insn_rescan for each of
   the insn changed and clear num_changes.  */
478 479 480 481 482

void
confirm_change_group (void)
{
  int i;
483
  rtx last_object = NULL;
484

485
  for (i = 0; i < num_changes; i++)
486 487
    {
      rtx object = changes[i].object;
488 489 490 491

      if (changes[i].unshare)
	*changes[i].loc = copy_rtx (*changes[i].loc);

492
      /* Avoid unnecessary rescanning when multiple changes to same instruction
493 494 495 496
         are made.  */
      if (object)
	{
	  if (object != last_object && last_object && INSN_P (last_object))
497
	    df_insn_rescan (as_a <rtx_insn *> (last_object));
498 499
	  last_object = object;
	}
500
    }
501

502
  if (last_object && INSN_P (last_object))
503
    df_insn_rescan (as_a <rtx_insn *> (last_object));
504 505 506 507 508 509
  num_changes = 0;
}

/* Apply a group of changes previously issued with `validate_change'.
   If all changes are valid, call confirm_change_group and return 1,
   otherwise, call cancel_changes and return 0.  */
510

511 512 513 514 515 516
int
apply_change_group (void)
{
  if (verify_changes (0))
    {
      confirm_change_group ();
Richard Kenner committed
517 518 519 520 521 522 523 524 525
      return 1;
    }
  else
    {
      cancel_changes (0);
      return 0;
    }
}

526

Kazu Hirata committed
527
/* Return the number of changes so far in the current group.  */
Richard Kenner committed
528 529

int
530
num_validated_changes (void)
Richard Kenner committed
531 532 533 534 535 536 537
{
  return num_changes;
}

/* Retract the changes numbered NUM and up.  */

void
538
cancel_changes (int num)
Richard Kenner committed
539 540 541 542 543 544 545
{
  int i;

  /* Back out all the changes.  Do this in the opposite order in which
     they were made.  */
  for (i = num_changes - 1; i >= num; i--)
    {
546
      *changes[i].loc = changes[i].old;
547
      if (changes[i].object && !MEM_P (changes[i].object))
548
	INSN_CODE (changes[i].object) = changes[i].old_code;
Richard Kenner committed
549 550 551 552
    }
  num_changes = num;
}

553
/* Reduce conditional compilation elsewhere.  */
554 555
/* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
   rtx.  */
Richard Kenner committed
556 557

static void
558
simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
559
                          machine_mode op0_mode)
Richard Kenner committed
560
{
561
  rtx x = *loc;
562
  enum rtx_code code = GET_CODE (x);
563
  rtx new_rtx = NULL_RTX;
564
  scalar_int_mode is_mode;
Richard Kenner committed
565

566
  if (SWAPPABLE_OPERANDS_P (x)
567 568
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
    {
569 570 571 572 573
      validate_unshare_change (object, loc,
			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
					       : swap_condition (code),
					       GET_MODE (x), XEXP (x, 1),
					       XEXP (x, 0)), 1);
574 575 576
      x = *loc;
      code = GET_CODE (x);
    }
577

578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606
  /* Canonicalize arithmetics with all constant operands.  */
  switch (GET_RTX_CLASS (code))
    {
    case RTX_UNARY:
      if (CONSTANT_P (XEXP (x, 0)))
	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
					    op0_mode);
      break;
    case RTX_COMM_ARITH:
    case RTX_BIN_ARITH:
      if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
					     XEXP (x, 1));
      break;
    case RTX_COMPARE:
    case RTX_COMM_COMPARE:
      if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
						 XEXP (x, 0), XEXP (x, 1));
      break;
    default:
      break;
    }
  if (new_rtx)
    {
      validate_change (object, loc, new_rtx, 1);
      return;
    }

Richard Kenner committed
607 608 609
  switch (code)
    {
    case PLUS:
Jeff Law committed
610
      /* If we have a PLUS whose second operand is now a CONST_INT, use
611
         simplify_gen_binary to try to simplify it.
612 613
         ??? We may want later to remove this, once simplification is
         separated from this function.  */
Shujing Zhao committed
614
      if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
615
	validate_change (object, loc,
616 617
			 simplify_gen_binary
			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
618
      break;
619
    case MINUS:
620
      if (CONST_SCALAR_INT_P (XEXP (x, 1)))
621 622 623 624
	validate_change (object, loc,
			 simplify_gen_binary
			 (PLUS, GET_MODE (x), XEXP (x, 0),
			  simplify_gen_unary (NEG,
625 626
					      GET_MODE (x), XEXP (x, 1),
					      GET_MODE (x))), 1);
627
      break;
Richard Kenner committed
628 629
    case ZERO_EXTEND:
    case SIGN_EXTEND:
630
      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
Richard Kenner committed
631
	{
632
	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
633
				    op0_mode);
634 635
	  /* If any of the above failed, substitute in something that
	     we know won't be recognized.  */
636 637 638
	  if (!new_rtx)
	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
	  validate_change (object, loc, new_rtx, 1);
Richard Kenner committed
639 640 641
	}
      break;
    case SUBREG:
642
      /* All subregs possible to simplify should be simplified.  */
643
      new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
644 645
			     SUBREG_BYTE (x));

646
      /* Subregs of VOIDmode operands are incorrect.  */
647 648 649 650
      if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
      if (new_rtx)
	validate_change (object, loc, new_rtx, 1);
Richard Kenner committed
651 652 653 654
      break;
    case ZERO_EXTRACT:
    case SIGN_EXTRACT:
      /* If we are replacing a register with memory, try to change the memory
655 656 657
         to be the mode required for memory in extract operations (this isn't
         likely to be an insertion operation; if it was, nothing bad will
         happen, we might just fail in some cases).  */
Richard Kenner committed
658

659
      if (MEM_P (XEXP (x, 0))
660
	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
Shujing Zhao committed
661 662
	  && CONST_INT_P (XEXP (x, 1))
	  && CONST_INT_P (XEXP (x, 2))
663 664
	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
					MEM_ADDR_SPACE (XEXP (x, 0)))
665
	  && !MEM_VOLATILE_P (XEXP (x, 0)))
Richard Kenner committed
666 667
	{
	  int pos = INTVAL (XEXP (x, 2));
668
	  machine_mode new_mode = is_mode;
669
	  if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
670
	    new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
671
	  else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
672 673 674 675
	    new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
	  scalar_int_mode wanted_mode = (new_mode == VOIDmode
					 ? word_mode
					 : as_a <scalar_int_mode> (new_mode));
Richard Kenner committed
676

677
	  /* If we have a narrower mode, we can do something.  */
678
	  if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
Richard Kenner committed
679 680 681 682
	    {
	      int offset = pos / BITS_PER_UNIT;
	      rtx newmem;

683
	      /* If the bytes and bits are counted differently, we
684
	         must adjust the offset.  */
685
	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
686 687 688
		offset =
		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
		   offset);
Richard Kenner committed
689

690 691
	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
			  == GET_MODE_BITSIZE (wanted_mode));
Richard Kenner committed
692 693
	      pos %= GET_MODE_BITSIZE (wanted_mode);

Richard Kenner committed
694
	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
Richard Kenner committed
695

Charles Hannum committed
696
	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
Richard Kenner committed
697 698 699 700 701
	      validate_change (object, &XEXP (x, 0), newmem, 1);
	    }
	}

      break;
702

703 704
    default:
      break;
Richard Kenner committed
705 706 707
    }
}

708 709 710 711
/* Replace every occurrence of FROM in X with TO.  Mark each change with
   validate_change passing OBJECT.  */

static void
712
validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
713 714 715 716 717 718
                        bool simplify)
{
  int i, j;
  const char *fmt;
  rtx x = *loc;
  enum rtx_code code;
719
  machine_mode op0_mode = VOIDmode;
720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747
  int prev_changes = num_changes;

  if (!x)
    return;

  code = GET_CODE (x);
  fmt = GET_RTX_FORMAT (code);
  if (fmt[0] == 'e')
    op0_mode = GET_MODE (XEXP (x, 0));

  /* X matches FROM if it is the same rtx or they are both referring to the
     same register in the same mode.  Avoid calling rtx_equal_p unless the
     operands look similar.  */

  if (x == from
      || (REG_P (x) && REG_P (from)
	  && GET_MODE (x) == GET_MODE (from)
	  && REGNO (x) == REGNO (from))
      || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
	  && rtx_equal_p (x, from)))
    {
      validate_unshare_change (object, loc, to, 1);
      return;
    }

  /* Call ourself recursively to perform the replacements.
     We must not replace inside already replaced expression, otherwise we
     get infinite recursion for replacements like (reg X)->(subreg (reg X))
748
     so we must special case shared ASM_OPERANDS.  */
749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764

  if (GET_CODE (x) == PARALLEL)
    {
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
	{
	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
	    {
	      /* Verify that operands are really shared.  */
	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
							      (x, 0, j))));
	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
				      from, to, object, simplify);
	    }
	  else
H.J. Lu committed
765
	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
766 767 768 769 770 771 772 773 774 775
                                    simplify);
	}
    }
  else
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
      {
	if (fmt[i] == 'e')
	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
	else if (fmt[i] == 'E')
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
H.J. Lu committed
776
	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
777 778 779 780 781 782 783
                                    simplify);
      }

  /* If we didn't substitute, there is nothing more to do.  */
  if (num_changes == prev_changes)
    return;

784
  /* ??? The regmove is no more, so is this aberration still necessary?  */
785 786 787 788 789 790 791 792 793 794 795
  /* Allow substituted expression to have different mode.  This is used by
     regmove to change mode of pseudo register.  */
  if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
    op0_mode = GET_MODE (XEXP (x, 0));

  /* Do changes needed to keep rtx consistent.  Don't do any other
     simplifications, as it is not our job.  */
  if (simplify)
    simplify_while_replacing (loc, to, object, op0_mode);
}

796 797 798 799 800
/* Try replacing every occurrence of FROM in subexpression LOC of INSN
   with TO.  After all changes have been made, validate by seeing
   if INSN is still valid.  */

int
801
validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
802 803 804 805 806
{
  validate_replace_rtx_1 (loc, from, to, insn, true);
  return apply_change_group ();
}

Richard Kenner committed
807 808 809 810
/* Try replacing every occurrence of FROM in INSN with TO.  After all
   changes have been made, validate by seeing if INSN is still valid.  */

int
811
validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
Richard Kenner committed
812
{
813
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
Richard Kenner committed
814 815
  return apply_change_group ();
}
816

817
/* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
H.J. Lu committed
818 819 820
   is a part of INSN.  After all changes have been made, validate by seeing if
   INSN is still valid.
   validate_replace_rtx (from, to, insn) is equivalent to
821 822 823
   validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */

int
824
validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
825 826 827 828 829 830
{
  validate_replace_rtx_1 (where, from, to, insn, true);
  return apply_change_group ();
}

/* Same as above, but do not simplify rtx afterwards.  */
H.J. Lu committed
831 832
int
validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
833
				      rtx_insn *insn)
834 835 836 837 838 839
{
  validate_replace_rtx_1 (where, from, to, insn, false);
  return apply_change_group ();

}

840 841
/* Try replacing every occurrence of FROM in INSN with TO.  This also
   will replace in REG_EQUAL and REG_EQUIV notes.  */
842 843

void
844
validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
845
{
846
  rtx note;
847
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
848 849 850 851
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
    if (REG_NOTE_KIND (note) == REG_EQUAL
	|| REG_NOTE_KIND (note) == REG_EQUIV)
      validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
852 853
}

854 855
/* Function called by note_uses to replace used subexpressions.  */
struct validate_replace_src_data
856 857 858
{
  rtx from;			/* Old RTX */
  rtx to;			/* New RTX */
859
  rtx_insn *insn;			/* Insn in which substitution is occurring.  */
860
};
861 862

static void
863
validate_replace_src_1 (rtx *x, void *data)
864 865 866 867
{
  struct validate_replace_src_data *d
    = (struct validate_replace_src_data *) data;

868
  validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
869 870
}

871
/* Try replacing every occurrence of FROM in INSN with TO, avoiding
872
   SET_DESTs.  */
873

874
void
875
validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
876
{
877
  struct validate_replace_src_data d;
878

879 880 881 882
  d.from = from;
  d.to = to;
  d.insn = insn;
  note_uses (&PATTERN (insn), validate_replace_src_1, &d);
883
}
Razya Ladelsky committed
884 885 886 887 888 889

/* Try simplify INSN.
   Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
   pattern and return true if something was simplified.  */

bool
890
validate_simplify_insn (rtx_insn *insn)
Razya Ladelsky committed
891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923
{
  int i;
  rtx pat = NULL;
  rtx newpat = NULL;

  pat = PATTERN (insn);

  if (GET_CODE (pat) == SET)
    {
      newpat = simplify_rtx (SET_SRC (pat));
      if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
	validate_change (insn, &SET_SRC (pat), newpat, 1);
      newpat = simplify_rtx (SET_DEST (pat));
      if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
	validate_change (insn, &SET_DEST (pat), newpat, 1);
    }
  else if (GET_CODE (pat) == PARALLEL)
    for (i = 0; i < XVECLEN (pat, 0); i++)
      {
	rtx s = XVECEXP (pat, 0, i);

	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
	  {
	    newpat = simplify_rtx (SET_SRC (s));
	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
	      validate_change (insn, &SET_SRC (s), newpat, 1);
	    newpat = simplify_rtx (SET_DEST (s));
	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
	      validate_change (insn, &SET_DEST (s), newpat, 1);
	  }
      }
  return ((num_changes_pending () > 0) && (apply_change_group () > 0));
}
Richard Kenner committed
924 925 926 927 928 929

/* Return 1 if the insn using CC0 set by INSN does not contain
   any ordered tests applied to the condition codes.
   EQ and NE tests do not count.  */

int
930
next_insn_tests_no_inequality (rtx_insn *insn)
Richard Kenner committed
931
{
932
  rtx_insn *next = next_cc0_user (insn);
Richard Kenner committed
933 934 935 936 937

  /* If there is no next insn, we have to take the conservative choice.  */
  if (next == 0)
    return 0;

938
  return (INSN_P (next)
Richard Kenner committed
939 940 941 942 943 944 945 946 947 948 949 950 951 952 953
	  && ! inequality_comparisons_p (PATTERN (next)));
}

/* Return 1 if OP is a valid general operand for machine mode MODE.
   This is either a register reference, a memory reference,
   or a constant.  In the case of a memory reference, the address
   is checked for general validity for the target machine.

   Register and memory references must have mode MODE in order to be valid,
   but some constants have no machine mode and are valid for any mode.

   If MODE is VOIDmode, OP is checked for validity for whatever mode
   it has.

   The main use of this function is as a predicate in match_operand
954
   expressions in the machine description.  */
Richard Kenner committed
955 956

int
957
general_operand (rtx op, machine_mode mode)
Richard Kenner committed
958
{
959
  enum rtx_code code = GET_CODE (op);
Richard Kenner committed
960 961 962 963 964 965 966

  if (mode == VOIDmode)
    mode = GET_MODE (op);

  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
967 968
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
969 970
    return 0;

Shujing Zhao committed
971
  if (CONST_INT_P (op)
972
      && mode != VOIDmode
973 974 975
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

Richard Kenner committed
976
  if (CONSTANT_P (op))
977 978
    return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
	     || mode == VOIDmode)
Richard Kenner committed
979
	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
980 981 982
	    && targetm.legitimate_constant_p (mode == VOIDmode
					      ? GET_MODE (op)
					      : mode, op));
Richard Kenner committed
983 984 985 986 987 988 989 990 991

  /* Except for certain constants with VOIDmode, already checked for,
     OP's mode must match MODE if MODE specifies a mode.  */

  if (GET_MODE (op) != mode)
    return 0;

  if (code == SUBREG)
    {
992 993
      rtx sub = SUBREG_REG (op);

Richard Kenner committed
994 995
#ifdef INSN_SCHEDULING
      /* On machines that have insn scheduling, we want all memory
996 997 998 999
	 reference to be explicit, so outlaw paradoxical SUBREGs.
	 However, we must allow them after reload so that they can
	 get cleaned up by cleanup_subreg_operands.  */
      if (!reload_completed && MEM_P (sub)
1000
	  && paradoxical_subreg_p (op))
Richard Kenner committed
1001 1002
	return 0;
#endif
1003 1004
      /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
         may result in incorrect reference.  We should simplify all valid
1005
         subregs of MEM anyway.  But allow this after reload because we
1006
	 might be called from cleanup_subreg_operands.
1007 1008

	 ??? This is a kludge.  */
1009 1010
      if (!reload_completed
	  && maybe_ne (SUBREG_BYTE (op), 0)
1011
	  && MEM_P (sub))
1012 1013
	return 0;

1014 1015
      if (REG_P (sub)
	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1016
	  && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1017 1018 1019 1020 1021 1022 1023 1024
	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
	  /* LRA can generate some invalid SUBREGS just for matched
	     operand reload presentation.  LRA needs to treat them as
	     valid.  */
	  && ! LRA_SUBREG_P (op))
	return 0;

1025
      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1026
	 create such rtl, and we must reject it.  */
1027
      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1028 1029 1030 1031 1032 1033
	  /* LRA can use subreg to store a floating point value in an
	     integer mode.  Although the floating point and the
	     integer modes need the same number of hard registers, the
	     size of floating point mode can be less than the integer
	     mode.  */
	  && ! lra_in_progress 
1034
	  && paradoxical_subreg_p (op))
1035
	return 0;
Richard Kenner committed
1036

1037
      op = sub;
Richard Kenner committed
1038 1039 1040 1041 1042
      code = GET_CODE (op);
    }

  if (code == REG)
    return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1043
	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
Richard Kenner committed
1044 1045 1046

  if (code == MEM)
    {
1047
      rtx y = XEXP (op, 0);
Aldy Hernandez committed
1048

Richard Kenner committed
1049 1050
      if (! volatile_ok && MEM_VOLATILE_P (op))
	return 0;
Aldy Hernandez committed
1051

1052 1053 1054 1055 1056 1057
      /* Use the mem's mode, since it will be reloaded thus.  LRA can
	 generate move insn with invalid addresses which is made valid
	 and efficiently calculated by LRA through further numerous
	 transformations.  */
      if (lra_in_progress
	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1058
	return 1;
Richard Kenner committed
1059
    }
1060

Richard Kenner committed
1061 1062 1063 1064 1065 1066 1067 1068 1069 1070
  return 0;
}

/* Return 1 if OP is a valid memory address for a memory reference
   of mode MODE.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1071
address_operand (rtx op, machine_mode mode)
Richard Kenner committed
1072 1073 1074 1075 1076 1077 1078 1079
{
  return memory_address_p (mode, op);
}

/* Return 1 if OP is a register reference of mode MODE.
   If MODE is VOIDmode, accept a register in any mode.

   The main use of this function is as a predicate in match_operand
1080
   expressions in the machine description.  */
Richard Kenner committed
1081 1082

int
1083
register_operand (rtx op, machine_mode mode)
Richard Kenner committed
1084 1085 1086
{
  if (GET_CODE (op) == SUBREG)
    {
1087 1088
      rtx sub = SUBREG_REG (op);

Richard Kenner committed
1089 1090 1091 1092 1093 1094
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
	 because it is guaranteed to be reloaded into one.
	 Just make sure the MEM is valid in itself.
	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
	 but currently it does result from (SUBREG (REG)...) where the
	 reg went on the stack.)  */
1095
      if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1096
	return 0;
Richard Kenner committed
1097
    }
1098 1099 1100
  else if (!REG_P (op))
    return 0;
  return general_operand (op, mode);
Richard Kenner committed
1101 1102
}

1103 1104 1105
/* Return 1 for a register in Pmode; ignore the tested mode.  */

int
1106
pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1107 1108 1109 1110
{
  return register_operand (op, Pmode);
}

Richard Kenner committed
1111 1112 1113 1114
/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
   or a hard register.  */

int
1115
scratch_operand (rtx op, machine_mode mode)
Richard Kenner committed
1116
{
1117 1118 1119 1120
  if (GET_MODE (op) != mode && mode != VOIDmode)
    return 0;

  return (GET_CODE (op) == SCRATCH
1121
	  || (REG_P (op)
1122 1123 1124
	      && (lra_in_progress
		  || (REGNO (op) < FIRST_PSEUDO_REGISTER
		      && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
Richard Kenner committed
1125 1126 1127 1128 1129 1130 1131 1132
}

/* Return 1 if OP is a valid immediate operand for mode MODE.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1133
immediate_operand (rtx op, machine_mode mode)
Richard Kenner committed
1134 1135 1136 1137
{
  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1138 1139
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1140 1141
    return 0;

Shujing Zhao committed
1142
  if (CONST_INT_P (op)
1143
      && mode != VOIDmode
1144 1145 1146
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

Richard Kenner committed
1147 1148 1149 1150
  return (CONSTANT_P (op)
	  && (GET_MODE (op) == mode || mode == VOIDmode
	      || GET_MODE (op) == VOIDmode)
	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1151 1152 1153
	  && targetm.legitimate_constant_p (mode == VOIDmode
					    ? GET_MODE (op)
					    : mode, op));
Richard Kenner committed
1154 1155
}

Kenneth Zadeck committed
1156
/* Returns 1 if OP is an operand that is a CONST_INT of mode MODE.  */
Richard Kenner committed
1157 1158

int
1159
const_int_operand (rtx op, machine_mode mode)
Richard Kenner committed
1160
{
Shujing Zhao committed
1161
  if (!CONST_INT_P (op))
1162 1163 1164 1165 1166 1167 1168
    return 0;

  if (mode != VOIDmode
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
    return 0;

  return 1;
Richard Kenner committed
1169 1170
}

Kenneth Zadeck committed
1171 1172 1173 1174
#if TARGET_SUPPORTS_WIDE_INT
/* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
   of mode MODE.  */
int
1175
const_scalar_int_operand (rtx op, machine_mode mode)
Kenneth Zadeck committed
1176 1177 1178 1179 1180 1181 1182 1183 1184
{
  if (!CONST_SCALAR_INT_P (op))
    return 0;

  if (CONST_INT_P (op))
    return const_int_operand (op, mode);

  if (mode != VOIDmode)
    {
1185 1186 1187
      scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
      int prec = GET_MODE_PRECISION (int_mode);
      int bitsize = GET_MODE_BITSIZE (int_mode);
Kenneth Zadeck committed
1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204

      if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
	return 0;

      if (prec == bitsize)
	return 1;
      else
	{
	  /* Multiword partial int.  */
	  HOST_WIDE_INT x
	    = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
	  return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
	}
    }
  return 1;
}

Richard Kenner committed
1205
/* Returns 1 if OP is an operand that is a constant integer or constant
Kenneth Zadeck committed
1206 1207 1208
   floating-point number of MODE.  */

int
1209
const_double_operand (rtx op, machine_mode mode)
Kenneth Zadeck committed
1210 1211 1212 1213 1214 1215 1216
{
  return (GET_CODE (op) == CONST_DOUBLE)
	  && (GET_MODE (op) == mode || mode == VOIDmode);
}
#else
/* Returns 1 if OP is an operand that is a constant integer or constant
   floating-point number of MODE.  */
Richard Kenner committed
1217 1218

int
1219
const_double_operand (rtx op, machine_mode mode)
Richard Kenner committed
1220 1221 1222 1223
{
  /* Don't accept CONST_INT or anything similar
     if the caller wants something floating.  */
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1224 1225
      && GET_MODE_CLASS (mode) != MODE_INT
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
Richard Kenner committed
1226 1227
    return 0;

1228
  return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
Richard Kenner committed
1229 1230 1231
	  && (mode == VOIDmode || GET_MODE (op) == mode
	      || GET_MODE (op) == VOIDmode));
}
Kenneth Zadeck committed
1232 1233 1234
#endif
/* Return 1 if OP is a general operand that is not an immediate
   operand of mode MODE.  */
Richard Kenner committed
1235 1236

int
1237
nonimmediate_operand (rtx op, machine_mode mode)
Richard Kenner committed
1238 1239 1240 1241 1242 1243 1244
{
  return (general_operand (op, mode) && ! CONSTANT_P (op));
}

/* Return 1 if OP is a register reference or immediate value of mode MODE.  */

int
1245
nonmemory_operand (rtx op, machine_mode mode)
Richard Kenner committed
1246 1247
{
  if (CONSTANT_P (op))
1248
    return immediate_operand (op, mode);
1249
  return register_operand (op, mode);
Richard Kenner committed
1250 1251 1252 1253 1254 1255 1256 1257 1258
}

/* Return 1 if OP is a valid operand that stands for pushing a
   value of mode MODE onto the stack.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1259
push_operand (rtx op, machine_mode mode)
Richard Kenner committed
1260
{
1261
  if (!MEM_P (op))
Richard Kenner committed
1262 1263
    return 0;

1264
  if (mode != VOIDmode && GET_MODE (op) != mode)
Richard Kenner committed
1265 1266
    return 0;

1267 1268 1269 1270 1271 1272
  poly_int64 rounded_size = GET_MODE_SIZE (mode);

#ifdef PUSH_ROUNDING
  rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
#endif

Richard Kenner committed
1273 1274
  op = XEXP (op, 0);

1275
  if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1276 1277 1278 1279 1280 1281
    {
      if (GET_CODE (op) != STACK_PUSH_CODE)
	return 0;
    }
  else
    {
1282
      poly_int64 offset;
1283 1284 1285
      if (GET_CODE (op) != PRE_MODIFY
	  || GET_CODE (XEXP (op, 1)) != PLUS
	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1286 1287 1288 1289
	  || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
	  || (STACK_GROWS_DOWNWARD
	      ? maybe_ne (offset, -rounded_size)
	      : maybe_ne (offset, rounded_size)))
1290 1291
	return 0;
    }
Richard Kenner committed
1292 1293 1294 1295

  return XEXP (op, 0) == stack_pointer_rtx;
}

1296 1297 1298 1299 1300 1301 1302
/* Return 1 if OP is a valid operand that stands for popping a
   value of mode MODE off the stack.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1303
pop_operand (rtx op, machine_mode mode)
1304
{
1305
  if (!MEM_P (op))
1306 1307
    return 0;

1308
  if (mode != VOIDmode && GET_MODE (op) != mode)
1309 1310 1311 1312 1313 1314 1315 1316 1317 1318
    return 0;

  op = XEXP (op, 0);

  if (GET_CODE (op) != STACK_POP_CODE)
    return 0;

  return XEXP (op, 0) == stack_pointer_rtx;
}

1319 1320
/* Return 1 if ADDR is a valid memory address
   for mode MODE in address space AS.  */
Richard Kenner committed
1321 1322

int
1323
memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1324
			     rtx addr, addr_space_t as)
Richard Kenner committed
1325
{
1326
#ifdef GO_IF_LEGITIMATE_ADDRESS
1327
  gcc_assert (ADDR_SPACE_GENERIC_P (as));
Richard Kenner committed
1328 1329 1330 1331 1332
  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
  return 0;

 win:
  return 1;
1333
#else
1334
  return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1335
#endif
Richard Kenner committed
1336 1337 1338 1339 1340 1341 1342 1343 1344
}

/* Return 1 if OP is a valid memory reference with mode MODE,
   including a valid address.

   The main use of this function is as a predicate in match_operand
   expressions in the machine description.  */

int
1345
memory_operand (rtx op, machine_mode mode)
Richard Kenner committed
1346 1347 1348 1349 1350 1351
{
  rtx inner;

  if (! reload_completed)
    /* Note that no SUBREG is a memory operand before end of reload pass,
       because (SUBREG (MEM...)) forces reloading into a register.  */
1352
    return MEM_P (op) && general_operand (op, mode);
Richard Kenner committed
1353 1354 1355 1356 1357 1358 1359 1360

  if (mode != VOIDmode && GET_MODE (op) != mode)
    return 0;

  inner = op;
  if (GET_CODE (inner) == SUBREG)
    inner = SUBREG_REG (inner);

1361
  return (MEM_P (inner) && general_operand (op, mode));
Richard Kenner committed
1362 1363 1364 1365 1366 1367
}

/* Return 1 if OP is a valid indirect memory reference with mode MODE;
   that is, a memory reference whose address is a general_operand.  */

int
1368
indirect_operand (rtx op, machine_mode mode)
Richard Kenner committed
1369 1370 1371
{
  /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
  if (! reload_completed
1372
      && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
Richard Kenner committed
1373
    {
1374 1375 1376
      if (mode != VOIDmode && GET_MODE (op) != mode)
	return 0;

Richard Kenner committed
1377 1378 1379 1380
      /* The only way that we can have a general_operand as the resulting
	 address is if OFFSET is zero and the address already is an operand
	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
	 operand.  */
1381 1382 1383 1384
      poly_int64 offset;
      rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
      return (known_eq (offset + SUBREG_BYTE (op), 0)
	      && general_operand (addr, Pmode));
Richard Kenner committed
1385 1386
    }

1387
  return (MEM_P (op)
Richard Kenner committed
1388 1389 1390 1391
	  && memory_operand (op, mode)
	  && general_operand (XEXP (op, 0), Pmode));
}

1392 1393 1394 1395
/* Return 1 if this is an ordered comparison operator (not including
   ORDERED and UNORDERED).  */

int
1396
ordered_comparison_operator (rtx op, machine_mode mode)
1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417
{
  if (mode != VOIDmode && GET_MODE (op) != mode)
    return false;
  switch (GET_CODE (op))
    {
    case EQ:
    case NE:
    case LT:
    case LTU:
    case LE:
    case LEU:
    case GT:
    case GTU:
    case GE:
    case GEU:
      return true;
    default:
      return false;
    }
}

Richard Kenner committed
1418 1419 1420 1421
/* Return 1 if this is a comparison operator.  This allows the use of
   MATCH_OPERATOR to recognize all the branch insns.  */

int
1422
comparison_operator (rtx op, machine_mode mode)
Richard Kenner committed
1423 1424
{
  return ((mode == VOIDmode || GET_MODE (op) == mode)
1425
	  && COMPARISON_P (op));
Richard Kenner committed
1426 1427
}

1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463
/* If BODY is an insn body that uses ASM_OPERANDS, return it.  */

rtx
extract_asm_operands (rtx body)
{
  rtx tmp;
  switch (GET_CODE (body))
    {
    case ASM_OPERANDS:
      return body;

    case SET:
      /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
      tmp = SET_SRC (body);
      if (GET_CODE (tmp) == ASM_OPERANDS)
	return tmp;
      break;

    case PARALLEL:
      tmp = XVECEXP (body, 0, 0);
      if (GET_CODE (tmp) == ASM_OPERANDS)
	return tmp;
      if (GET_CODE (tmp) == SET)
	{
	  tmp = SET_SRC (tmp);
	  if (GET_CODE (tmp) == ASM_OPERANDS)
	    return tmp;
	}
      break;

    default:
      break;
    }
  return NULL;
}

Richard Kenner committed
1464 1465
/* If BODY is an insn body that uses ASM_OPERANDS,
   return the number of operands (both input and output) in the insn.
1466 1467
   If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
   return 0.
Richard Kenner committed
1468 1469 1470
   Otherwise return -1.  */

int
1471
asm_noperands (const_rtx body)
Richard Kenner committed
1472
{
1473
  rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1474
  int i, n_sets = 0;
1475 1476

  if (asm_op == NULL)
1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488
    {
      if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
	  && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
	{
	  /* body is [(asm_input ...) (clobber (reg ...))...].  */
	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
	      return -1;
	  return 0;
	}
      return -1;
    }
1489 1490 1491 1492

  if (GET_CODE (body) == SET)
    n_sets = 1;
  else if (GET_CODE (body) == PARALLEL)
Richard Kenner committed
1493
    {
1494
      if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
Richard Kenner committed
1495
	{
1496
	  /* Multiple output operands, or 1 output plus some clobbers:
H.J. Lu committed
1497
	     body is
1498
	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1499 1500 1501 1502 1503 1504 1505 1506
	  /* Count backwards through CLOBBERs to determine number of SETs.  */
	  for (i = XVECLEN (body, 0); i > 0; i--)
	    {
	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
		break;
	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
		return -1;
	    }
Richard Kenner committed
1507

1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523
	  /* N_SETS is now number of output operands.  */
	  n_sets = i;

	  /* Verify that all the SETs we have
	     came from a single original asm_operands insn
	     (so that invalid combinations are blocked).  */
	  for (i = 0; i < n_sets; i++)
	    {
	      rtx elt = XVECEXP (body, 0, i);
	      if (GET_CODE (elt) != SET)
		return -1;
	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
		return -1;
	      /* If these ASM_OPERANDS rtx's came from different original insns
	         then they aren't allowed together.  */
	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1524
		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1525 1526
		return -1;
	    }
Richard Kenner committed
1527
	}
1528
      else
1529 1530 1531 1532 1533 1534 1535 1536
	{
	  /* 0 outputs, but some clobbers:
	     body is [(asm_operands ...) (clobber (reg ...))...].  */
	  /* Make sure all the other parallel things really are clobbers.  */
	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
	      return -1;
	}
Richard Kenner committed
1537
    }
1538 1539 1540

  return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
Richard Kenner committed
1541 1542 1543 1544 1545 1546 1547
}

/* Assuming BODY is an insn body that uses ASM_OPERANDS,
   copy its operands (both input and output) into the vector OPERANDS,
   the locations of the operands within the insn into the vector OPERAND_LOCS,
   and the constraints for the operands into CONSTRAINTS.
   Write the modes of the operands into MODES.
1548
   Write the location info into LOC.
Richard Kenner committed
1549
   Return the assembler-template.
1550 1551
   If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
   return the basic assembly string.
Richard Kenner committed
1552

1553
   If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
Richard Kenner committed
1554 1555
   we don't store that info.  */

1556
const char *
1557
decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1558
		     const char **constraints, machine_mode *modes,
1559
		     location_t *loc)
Richard Kenner committed
1560
{
1561
  int nbase = 0, n, i;
1562
  rtx asmop;
Richard Kenner committed
1563

1564
  switch (GET_CODE (body))
Richard Kenner committed
1565
    {
1566 1567 1568 1569
    case ASM_OPERANDS:
      /* Zero output asm: BODY is (asm_operands ...).  */
      asmop = body;
      break;
Richard Kenner committed
1570

1571 1572 1573
    case SET:
      /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
      asmop = SET_SRC (body);
Richard Kenner committed
1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584

      /* The output is in the SET.
	 Its constraint is in the ASM_OPERANDS itself.  */
      if (operands)
	operands[0] = SET_DEST (body);
      if (operand_locs)
	operand_locs[0] = &SET_DEST (body);
      if (constraints)
	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
      if (modes)
	modes[0] = GET_MODE (SET_DEST (body));
1585 1586
      nbase = 1;
      break;
1587

1588 1589 1590
    case PARALLEL:
      {
	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
Richard Kenner committed
1591

1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613
	asmop = XVECEXP (body, 0, 0);
	if (GET_CODE (asmop) == SET)
	  {
	    asmop = SET_SRC (asmop);

	    /* At least one output, plus some CLOBBERs.  The outputs are in
	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
	    for (i = 0; i < nparallel; i++)
	      {
		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
		  break;		/* Past last SET */
		if (operands)
		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
		if (operand_locs)
		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
		if (constraints)
		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
		if (modes)
		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
	      }
	    nbase = i;
	  }
1614 1615 1616 1617 1618 1619
	else if (GET_CODE (asmop) == ASM_INPUT)
	  {
	    if (loc)
	      *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
	    return XSTR (asmop, 0);
	  }
1620 1621
	break;
      }
Richard Kenner committed
1622

1623 1624
    default:
      gcc_unreachable ();
Richard Kenner committed
1625
    }
1626

1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639
  n = ASM_OPERANDS_INPUT_LENGTH (asmop);
  for (i = 0; i < n; i++)
    {
      if (operand_locs)
	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
      if (operands)
	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
      if (constraints)
	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
      if (modes)
	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
    }
  nbase += n;
Richard Kenner committed
1640

1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651
  n = ASM_OPERANDS_LABEL_LENGTH (asmop);
  for (i = 0; i < n; i++)
    {
      if (operand_locs)
	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
      if (operands)
	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
      if (constraints)
	constraints[nbase + i] = "";
      if (modes)
	modes[nbase + i] = Pmode;
Richard Kenner committed
1652 1653
    }

1654
  if (loc)
1655
    *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1656 1657

  return ASM_OPERANDS_TEMPLATE (asmop);
Richard Kenner committed
1658
}
1659

1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703
/* Parse inline assembly string STRING and determine which operands are
   referenced by % markers.  For the first NOPERANDS operands, set USED[I]
   to true if operand I is referenced.

   This is intended to distinguish barrier-like asms such as:

      asm ("" : "=m" (...));

   from real references such as:

      asm ("sw\t$0, %0" : "=m" (...));  */

void
get_referenced_operands (const char *string, bool *used,
			 unsigned int noperands)
{
  memset (used, 0, sizeof (bool) * noperands);
  const char *p = string;
  while (*p)
    switch (*p)
      {
      case '%':
	p += 1;
	/* A letter followed by a digit indicates an operand number.  */
	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
	  p += 1;
	if (ISDIGIT (*p))
	  {
	    char *endptr;
	    unsigned long opnum = strtoul (p, &endptr, 10);
	    if (endptr != p && opnum < noperands)
	      used[opnum] = true;
	    p = endptr;
	  }
	else
	  p += 1;
	break;

      default:
	p++;
	break;
      }
}

1704
/* Check if an asm_operand matches its constraints.
1705
   Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1706 1707

int
1708
asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1709
{
1710
  int result = 0;
1711
  bool incdec_ok = false;
1712

1713
  /* Use constrain_operands after reload.  */
1714
  gcc_assert (!reload_completed);
1715

1716 1717 1718
  /* Empty constraint string is the same as "X,...,X", i.e. X for as
     many alternatives as required to match the other operands.  */
  if (*constraint == '\0')
1719
    result = 1;
1720

1721 1722
  while (*constraint)
    {
1723
      enum constraint_num cn;
1724 1725
      char c = *constraint;
      int len;
1726
      switch (c)
1727
	{
1728 1729 1730
	case ',':
	  constraint++;
	  continue;
1731 1732 1733

	case '0': case '1': case '2': case '3': case '4':
	case '5': case '6': case '7': case '8': case '9':
1734
	  /* If caller provided constraints pointer, look up
Ondřej Bílka committed
1735
	     the matching constraint.  Otherwise, our caller should have
1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756
	     given us the proper matching constraint, but we can't
	     actually fail the check if they didn't.  Indicate that
	     results are inconclusive.  */
	  if (constraints)
	    {
	      char *end;
	      unsigned long match;

	      match = strtoul (constraint, &end, 10);
	      if (!result)
		result = asm_operand_ok (op, constraints[match], NULL);
	      constraint = (const char *) end;
	    }
	  else
	    {
	      do
		constraint++;
	      while (ISDIGIT (*constraint));
	      if (! result)
		result = -1;
	    }
1757
	  continue;
1758

Richard Sandiford committed
1759 1760 1761 1762
	  /* The rest of the compiler assumes that reloading the address
	     of a MEM into a register will make it fit an 'o' constraint.
	     That is, if it sees a MEM operand for an 'o' constraint,
	     it assumes that (mem (base-reg)) will fit.
1763

Richard Sandiford committed
1764 1765 1766 1767 1768
	     That assumption fails on targets that don't have offsettable
	     addresses at all.  We therefore need to treat 'o' asm
	     constraints as a special case and only accept operands that
	     are already offsettable, thus proving that at least one
	     offsettable address exists.  */
1769 1770
	case 'o': /* offsettable */
	  if (offsettable_nonstrict_memref_p (op))
1771
	    result = 1;
1772 1773 1774 1775
	  break;

	case 'g':
	  if (general_operand (op, VOIDmode))
1776
	    result = 1;
1777 1778
	  break;

Richard Sandiford committed
1779 1780 1781 1782 1783 1784
	case '<':
	case '>':
	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
	     to exist, excepting those that expand_call created.  Further,
	     on some machines which do not have generalized auto inc/dec,
	     an inc/dec is not a memory_operand.
1785

Richard Sandiford committed
1786 1787
	     Match any memory and hope things are resolved after reload.  */
	  incdec_ok = true;
1788
	  /* FALLTHRU */
1789
	default:
1790 1791
	  cn = lookup_constraint (constraint);
	  switch (get_constraint_type (cn))
1792
	    {
1793
	    case CT_REGISTER:
Richard Sandiford committed
1794 1795 1796 1797 1798
	      if (!result
		  && reg_class_for_constraint (cn) != NO_REGS
		  && GET_MODE (op) != BLKmode
		  && register_operand (op, VOIDmode))
		result = 1;
1799 1800
	      break;

1801 1802 1803 1804 1805 1806 1807
	    case CT_CONST_INT:
	      if (!result
		  && CONST_INT_P (op)
		  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
		result = 1;
	      break;

1808
	    case CT_MEMORY:
1809
	    case CT_SPECIAL_MEMORY:
1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821
	      /* Every memory operand can be reloaded to fit.  */
	      result = result || memory_operand (op, VOIDmode);
	      break;

	    case CT_ADDRESS:
	      /* Every address operand can be reloaded to fit.  */
	      result = result || address_operand (op, VOIDmode);
	      break;

	    case CT_FIXED_FORM:
	      result = result || constraint_satisfied_p (op, cn);
	      break;
1822
	    }
1823 1824
	  break;
	}
1825 1826 1827
      len = CONSTRAINT_LEN (c, constraint);
      do
	constraint++;
1828
      while (--len && *constraint && *constraint != ',');
1829 1830
      if (len)
	return 0;
1831 1832
    }

1833
  /* For operands without < or > constraints reject side-effects.  */
1834
  if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847
    switch (GET_CODE (XEXP (op, 0)))
      {
      case PRE_INC:
      case POST_INC:
      case PRE_DEC:
      case POST_DEC:
      case PRE_MODIFY:
      case POST_MODIFY:
	return 0;
      default:
	break;
      }

1848
  return result;
1849
}
Richard Kenner committed
1850 1851 1852 1853 1854

/* Given an rtx *P, if it is a sum containing an integer constant term,
   return the location (type rtx *) of the pointer to that constant term.
   Otherwise, return a null pointer.  */

1855
rtx *
1856
find_constant_term_loc (rtx *p)
Richard Kenner committed
1857
{
1858 1859
  rtx *tem;
  enum rtx_code code = GET_CODE (*p);
Richard Kenner committed
1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908

  /* If *P IS such a constant term, P is its location.  */

  if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
      || code == CONST)
    return p;

  /* Otherwise, if not a sum, it has no constant term.  */

  if (GET_CODE (*p) != PLUS)
    return 0;

  /* If one of the summands is constant, return its location.  */

  if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
      && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
    return p;

  /* Otherwise, check each summand for containing a constant term.  */

  if (XEXP (*p, 0) != 0)
    {
      tem = find_constant_term_loc (&XEXP (*p, 0));
      if (tem != 0)
	return tem;
    }

  if (XEXP (*p, 1) != 0)
    {
      tem = find_constant_term_loc (&XEXP (*p, 1));
      if (tem != 0)
	return tem;
    }

  return 0;
}

/* Return 1 if OP is a memory reference
   whose address contains no side effects
   and remains valid after the addition
   of a positive integer less than the
   size of the object being referenced.

   We assume that the original address is valid and do not check it.

   This uses strict_memory_address_p as a subroutine, so
   don't use it before reload.  */

int
1909
offsettable_memref_p (rtx op)
Richard Kenner committed
1910
{
1911
  return ((MEM_P (op))
1912 1913
	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
					       MEM_ADDR_SPACE (op)));
Richard Kenner committed
1914 1915 1916 1917 1918 1919
}

/* Similar, but don't require a strictly valid mem ref:
   consider pseudo-regs valid as index or base regs.  */

int
1920
offsettable_nonstrict_memref_p (rtx op)
Richard Kenner committed
1921
{
1922
  return ((MEM_P (op))
1923 1924
	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
					       MEM_ADDR_SPACE (op)));
Richard Kenner committed
1925 1926 1927
}

/* Return 1 if Y is a memory address which contains no side effects
1928 1929
   and would remain valid for address space AS after the addition of
   a positive integer less than the size of that mode.
Richard Kenner committed
1930 1931 1932 1933 1934 1935 1936 1937

   We assume that the original address is valid and do not check it.
   We do check that it is valid for narrower modes.

   If STRICTP is nonzero, we require a strictly valid address,
   for the sake of use in reload.c.  */

int
1938
offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1939
				  addr_space_t as)
Richard Kenner committed
1940
{
1941 1942
  enum rtx_code ycode = GET_CODE (y);
  rtx z;
Richard Kenner committed
1943 1944
  rtx y1 = y;
  rtx *y2;
1945
  int (*addressp) (machine_mode, rtx, addr_space_t) =
1946 1947
    (strictp ? strict_memory_address_addr_space_p
	     : memory_address_addr_space_p);
1948
  poly_int64 mode_sz = GET_MODE_SIZE (mode);
Richard Kenner committed
1949 1950 1951 1952 1953 1954 1955

  if (CONSTANT_ADDRESS_P (y))
    return 1;

  /* Adjusting an offsettable address involves changing to a narrower mode.
     Make sure that's OK.  */

1956
  if (mode_dependent_address_p (y, as))
Richard Kenner committed
1957 1958
    return 0;

1959
  machine_mode address_mode = GET_MODE (y);
1960 1961 1962
  if (address_mode == VOIDmode)
    address_mode = targetm.addr_space.address_mode (as);
#ifdef POINTERS_EXTEND_UNSIGNED
1963
  machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1964 1965
#endif

1966 1967 1968 1969
  /* ??? How much offset does an offsettable BLKmode reference need?
     Clearly that depends on the situation in which it's being used.
     However, the current situation in which we test 0xffffffff is
     less than ideal.  Caveat user.  */
1970
  if (known_eq (mode_sz, 0))
1971 1972
    mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;

Richard Kenner committed
1973 1974 1975 1976 1977 1978 1979 1980
  /* If the expression contains a constant term,
     see if it remains valid when max possible offset is added.  */

  if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
    {
      int good;

      y1 = *y2;
1981
      *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
Richard Kenner committed
1982 1983
      /* Use QImode because an odd displacement may be automatically invalid
	 for any wider mode.  But it should be valid for a single byte.  */
1984
      good = (*addressp) (QImode, y, as);
Richard Kenner committed
1985 1986 1987 1988 1989 1990

      /* In any case, restore old contents of memory.  */
      *y2 = y1;
      return good;
    }

1991
  if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
Richard Kenner committed
1992 1993 1994 1995 1996
    return 0;

  /* The offset added here is chosen as the maximum offset that
     any instruction could need to add when operating on something
     of the specified mode.  We assume that if Y and Y+c are
1997 1998
     valid addresses then so is Y+d for all 0<d<c.  adjust_address will
     go inside a LO_SUM here, so we do so as well.  */
1999 2000
  if (GET_CODE (y) == LO_SUM
      && mode != BLKmode
2001
      && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2002 2003
    z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
			plus_constant (address_mode, XEXP (y, 1),
2004
				       mode_sz - 1));
2005 2006 2007 2008 2009
#ifdef POINTERS_EXTEND_UNSIGNED
  /* Likewise for a ZERO_EXTEND from pointer_mode.  */
  else if (POINTERS_EXTEND_UNSIGNED > 0
	   && GET_CODE (y) == ZERO_EXTEND
	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2010
    z = gen_rtx_ZERO_EXTEND (address_mode,
2011 2012 2013
			     plus_constant (pointer_mode, XEXP (y, 0),
					    mode_sz - 1));
#endif
2014
  else
2015
    z = plus_constant (address_mode, y, mode_sz - 1);
Richard Kenner committed
2016 2017 2018

  /* Use QImode because an odd displacement may be automatically invalid
     for any wider mode.  But it should be valid for a single byte.  */
2019
  return (*addressp) (QImode, z, as);
Richard Kenner committed
2020 2021 2022 2023 2024
}

/* Return 1 if ADDR is an address-expression whose effect depends
   on the mode of the memory reference it is used in.

2025 2026
   ADDRSPACE is the address space associated with the address.

Richard Kenner committed
2027 2028 2029
   Autoincrement addressing is a typical example of mode-dependence
   because the amount of the increment depends on the mode.  */

2030
bool
2031
mode_dependent_address_p (rtx addr, addr_space_t addrspace)
Richard Kenner committed
2032
{
2033 2034 2035 2036 2037 2038 2039
  /* Auto-increment addressing with anything other than post_modify
     or pre_modify always introduces a mode dependency.  Catch such
     cases now instead of deferring to the target.  */
  if (GET_CODE (addr) == PRE_INC
      || GET_CODE (addr) == POST_INC
      || GET_CODE (addr) == PRE_DEC
      || GET_CODE (addr) == POST_DEC)
2040
    return true;
2041

2042
  return targetm.mode_dependent_address_p (addr, addrspace);
Richard Kenner committed
2043 2044
}

2045
/* Return true if boolean attribute ATTR is supported.  */
2046

2047 2048
static bool
have_bool_attr (bool_attr attr)
2049
{
2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060
  switch (attr)
    {
    case BA_ENABLED:
      return HAVE_ATTR_enabled;
    case BA_PREFERRED_FOR_SIZE:
      return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
    case BA_PREFERRED_FOR_SPEED:
      return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
    }
  gcc_unreachable ();
}
2061

2062
/* Return the value of ATTR for instruction INSN.  */
2063

2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084
static bool
get_bool_attr (rtx_insn *insn, bool_attr attr)
{
  switch (attr)
    {
    case BA_ENABLED:
      return get_attr_enabled (insn);
    case BA_PREFERRED_FOR_SIZE:
      return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
    case BA_PREFERRED_FOR_SPEED:
      return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
    }
  gcc_unreachable ();
}

/* Like get_bool_attr_mask, but don't use the cache.  */

static alternative_mask
get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
{
  /* Temporarily install enough information for get_attr_<foo> to assume
2085 2086 2087
     that the insn operands are already cached.  As above, the attribute
     mustn't depend on the values of operands, so we don't provide their
     real values here.  */
2088
  rtx_insn *old_insn = recog_data.insn;
2089 2090 2091
  int old_alternative = which_alternative;

  recog_data.insn = insn;
2092 2093
  alternative_mask mask = ALL_ALTERNATIVES;
  int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2094 2095 2096
  for (int i = 0; i < n_alternatives; i++)
    {
      which_alternative = i;
2097 2098
      if (!get_bool_attr (insn, attr))
	mask &= ~ALTERNATIVE_BIT (i);
2099 2100 2101 2102
    }

  recog_data.insn = old_insn;
  which_alternative = old_alternative;
2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117
  return mask;
}

/* Return the mask of operand alternatives that are allowed for INSN
   by boolean attribute ATTR.  This mask depends only on INSN and on
   the current target; it does not depend on things like the values of
   operands.  */

static alternative_mask
get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
{
  /* Quick exit for asms and for targets that don't use these attributes.  */
  int code = INSN_CODE (insn);
  if (code < 0 || !have_bool_attr (attr))
    return ALL_ALTERNATIVES;
2118

2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148
  /* Calling get_attr_<foo> can be expensive, so cache the mask
     for speed.  */
  if (!this_target_recog->x_bool_attr_masks[code][attr])
    this_target_recog->x_bool_attr_masks[code][attr]
      = get_bool_attr_mask_uncached (insn, attr);
  return this_target_recog->x_bool_attr_masks[code][attr];
}

/* Return the set of alternatives of INSN that are allowed by the current
   target.  */

alternative_mask
get_enabled_alternatives (rtx_insn *insn)
{
  return get_bool_attr_mask (insn, BA_ENABLED);
}

/* Return the set of alternatives of INSN that are allowed by the current
   target and are preferred for the current size/speed optimization
   choice.  */

alternative_mask
get_preferred_alternatives (rtx_insn *insn)
{
  if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
  else
    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
}

2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163
/* Return the set of alternatives of INSN that are allowed by the current
   target and are preferred for the size/speed optimization choice
   associated with BB.  Passing a separate BB is useful if INSN has not
   been emitted yet or if we are considering moving it to a different
   block.  */

alternative_mask
get_preferred_alternatives (rtx_insn *insn, basic_block bb)
{
  if (optimize_bb_for_speed_p (bb))
    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
  else
    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
}

2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181
/* Assert that the cached boolean attributes for INSN are still accurate.
   The backend is required to define these attributes in a way that only
   depends on the current target (rather than operands, compiler phase,
   etc.).  */

bool
check_bool_attrs (rtx_insn *insn)
{
  int code = INSN_CODE (insn);
  if (code >= 0)
    for (int i = 0; i <= BA_LAST; ++i)
      {
	enum bool_attr attr = (enum bool_attr) i;
	if (this_target_recog->x_bool_attr_masks[code][attr])
	  gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
		      == get_bool_attr_mask_uncached (insn, attr));
      }
  return true;
2182 2183
}

2184 2185 2186 2187 2188
/* Like extract_insn, but save insn extracted and don't extract again, when
   called again for the same insn expecting that recog_data still contain the
   valid information.  This is used primary by gen_attr infrastructure that
   often does extract insn again and again.  */
void
2189
extract_insn_cached (rtx_insn *insn)
2190 2191 2192 2193 2194 2195
{
  if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
    return;
  extract_insn (insn);
  recog_data.insn = insn;
}
2196

2197 2198 2199 2200 2201 2202 2203
/* Do uncached extract_insn, constrain_operands and complain about failures.
   This should be used when extracting a pre-existing constrained instruction
   if the caller wants to know which alternative was chosen.  */
void
extract_constrain_insn (rtx_insn *insn)
{
  extract_insn (insn);
2204
  if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2205 2206 2207
    fatal_insn_not_found (insn);
}

2208
/* Do cached extract_insn, constrain_operands and complain about failures.
2209 2210
   Used by insn_attrtab.  */
void
2211
extract_constrain_insn_cached (rtx_insn *insn)
2212 2213 2214
{
  extract_insn_cached (insn);
  if (which_alternative == -1
2215 2216
      && !constrain_operands (reload_completed,
			      get_enabled_alternatives (insn)))
2217 2218
    fatal_insn_not_found (insn);
}
2219

2220
/* Do cached constrain_operands on INSN and complain about failures.  */
2221
int
2222
constrain_operands_cached (rtx_insn *insn, int strict)
2223 2224
{
  if (which_alternative == -1)
2225
    return constrain_operands (strict, get_enabled_alternatives (insn));
2226 2227 2228
  else
    return 1;
}
2229

2230 2231
/* Analyze INSN and fill in recog_data.  */

2232
void
2233
extract_insn (rtx_insn *insn)
2234 2235 2236 2237 2238 2239
{
  int i;
  int icode;
  int noperands;
  rtx body = PATTERN (insn);

2240 2241 2242
  recog_data.n_operands = 0;
  recog_data.n_alternatives = 0;
  recog_data.n_dups = 0;
2243
  recog_data.is_asm = false;
2244 2245 2246 2247 2248 2249 2250 2251

  switch (GET_CODE (body))
    {
    case USE:
    case CLOBBER:
    case ASM_INPUT:
    case ADDR_VEC:
    case ADDR_DIFF_VEC:
2252
    case VAR_LOCATION:
2253
    case DEBUG_MARKER:
2254 2255 2256
      return;

    case SET:
2257 2258 2259 2260
      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
	goto asm_insn;
      else
	goto normal_insn;
2261
    case PARALLEL:
2262 2263
      if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2264 2265
	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2266 2267 2268
	goto asm_insn;
      else
	goto normal_insn;
2269
    case ASM_OPERANDS:
2270
    asm_insn:
2271
      recog_data.n_operands = noperands = asm_noperands (body);
2272 2273 2274 2275 2276
      if (noperands >= 0)
	{
	  /* This insn is an `asm' with operands.  */

	  /* expand_asm_operands makes sure there aren't too many operands.  */
2277
	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2278 2279

	  /* Now get the operand values and constraints out of the insn.  */
2280 2281 2282
	  decode_asm_operands (body, recog_data.operand,
			       recog_data.operand_loc,
			       recog_data.constraints,
2283
			       recog_data.operand_mode, NULL);
2284
	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2285 2286
	  if (noperands > 0)
	    {
2287 2288
	      const char *p =  recog_data.constraints[0];
	      recog_data.n_alternatives = 1;
2289
	      while (*p)
2290
		recog_data.n_alternatives += (*p++ == ',');
2291
	    }
2292
	  recog_data.is_asm = true;
2293 2294
	  break;
	}
2295
      fatal_insn_not_found (insn);
2296 2297

    default:
2298
    normal_insn:
2299 2300 2301 2302 2303 2304 2305
      /* Ordinary insn: recognize it, get the operands via insn_extract
	 and get the constraints.  */

      icode = recog_memoized (insn);
      if (icode < 0)
	fatal_insn_not_found (insn);

2306 2307 2308
      recog_data.n_operands = noperands = insn_data[icode].n_operands;
      recog_data.n_alternatives = insn_data[icode].n_alternatives;
      recog_data.n_dups = insn_data[icode].n_dups;
2309 2310 2311 2312 2313

      insn_extract (insn);

      for (i = 0; i < noperands; i++)
	{
2314
	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2315
	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2316
	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2317 2318 2319
	  /* VOIDmode match_operands gets mode from their real operand.  */
	  if (recog_data.operand_mode[i] == VOIDmode)
	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2320 2321
	}
    }
2322
  for (i = 0; i < noperands; i++)
2323 2324 2325 2326
    recog_data.operand_type[i]
      = (recog_data.constraints[i][0] == '=' ? OP_OUT
	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
	 : OP_IN);
2327

2328
  gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2329 2330 2331

  recog_data.insn = NULL;
  which_alternative = -1;
2332 2333
}

2334 2335 2336 2337 2338 2339 2340 2341
/* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
   operands, N_ALTERNATIVES alternatives and constraint strings
   CONSTRAINTS.  OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
   and CONSTRAINTS has N_OPERANDS entries.  OPLOC should be passed in
   if the insn is an asm statement and preprocessing should take the
   asm operands into account, e.g. to determine whether they could be
   addresses in constraints that require addresses; it should then
   point to an array of pointers to each operand.  */
2342

2343
void
2344 2345
preprocess_constraints (int n_operands, int n_alternatives,
			const char **constraints,
2346 2347
			operand_alternative *op_alt_base,
			rtx **oploc)
2348
{
2349
  for (int i = 0; i < n_operands; i++)
2350 2351 2352
    {
      int j;
      struct operand_alternative *op_alt;
2353
      const char *p = constraints[i];
2354

2355
      op_alt = op_alt_base;
2356

2357
      for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2358
	{
2359 2360 2361 2362
	  op_alt[i].cl = NO_REGS;
	  op_alt[i].constraint = p;
	  op_alt[i].matches = -1;
	  op_alt[i].matched = -1;
2363 2364 2365

	  if (*p == '\0' || *p == ',')
	    {
2366
	      op_alt[i].anything_ok = 1;
2367 2368 2369 2370 2371
	      continue;
	    }

	  for (;;)
	    {
2372
	      char c = *p;
2373 2374
	      if (c == '#')
		do
2375
		  c = *++p;
2376 2377
		while (c != ',' && c != '\0');
	      if (c == ',' || c == '\0')
2378 2379 2380 2381
		{
		  p++;
		  break;
		}
2382 2383 2384 2385

	      switch (c)
		{
		case '?':
2386
		  op_alt[i].reject += 6;
2387 2388
		  break;
		case '!':
2389
		  op_alt[i].reject += 600;
2390 2391
		  break;
		case '&':
2392
		  op_alt[i].earlyclobber = 1;
2393
		  break;
2394 2395 2396

		case '0': case '1': case '2': case '3': case '4':
		case '5': case '6': case '7': case '8': case '9':
2397 2398
		  {
		    char *end;
2399 2400
		    op_alt[i].matches = strtoul (p, &end, 10);
		    op_alt[op_alt[i].matches].matched = i;
2401 2402
		    p = end;
		  }
2403
		  continue;
2404 2405

		case 'X':
2406
		  op_alt[i].anything_ok = 1;
2407 2408
		  break;

2409
		case 'g':
2410 2411
		  op_alt[i].cl =
		   reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2412 2413 2414
		  break;

		default:
2415 2416 2417
		  enum constraint_num cn = lookup_constraint (p);
		  enum reg_class cl;
		  switch (get_constraint_type (cn))
2418
		    {
2419 2420 2421 2422 2423 2424
		    case CT_REGISTER:
		      cl = reg_class_for_constraint (cn);
		      if (cl != NO_REGS)
			op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
		      break;

2425 2426 2427
		    case CT_CONST_INT:
		      break;

2428
		    case CT_MEMORY:
2429
		    case CT_SPECIAL_MEMORY:
2430
		      op_alt[i].memory_ok = 1;
2431
		      break;
2432 2433

		    case CT_ADDRESS:
2434 2435 2436
		      if (oploc && !address_operand (*oploc[i], VOIDmode))
			break;

2437 2438
		      op_alt[i].is_address = 1;
		      op_alt[i].cl
2439
			= (reg_class_subunion
2440
			   [(int) op_alt[i].cl]
2441 2442
			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
						  ADDRESS, SCRATCH)]);
2443 2444
		      break;

2445 2446 2447
		    case CT_FIXED_FORM:
		      break;
		    }
2448 2449
		  break;
		}
2450
	      p += CONSTRAINT_LEN (c, p);
2451 2452 2453 2454
	    }
	}
    }
}
2455

2456 2457 2458 2459
/* Return an array of operand_alternative instructions for
   instruction ICODE.  */

const operand_alternative *
2460
preprocess_insn_constraints (unsigned int icode)
2461
{
2462
  gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480
  if (this_target_recog->x_op_alt[icode])
    return this_target_recog->x_op_alt[icode];

  int n_operands = insn_data[icode].n_operands;
  if (n_operands == 0)
    return 0;
  /* Always provide at least one alternative so that which_op_alt ()
     works correctly.  If the instruction has 0 alternatives (i.e. all
     constraint strings are empty) then each operand in this alternative
     will have anything_ok set.  */
  int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
  int n_entries = n_operands * n_alternatives;

  operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
  const char **constraints = XALLOCAVEC (const char *, n_operands);

  for (int i = 0; i < n_operands; ++i)
    constraints[i] = insn_data[icode].operand[i].constraint;
2481 2482
  preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
			  NULL);
2483 2484 2485 2486 2487 2488 2489 2490 2491 2492

  this_target_recog->x_op_alt[icode] = op_alt;
  return op_alt;
}

/* After calling extract_insn, you can use this function to extract some
   information from the constraint strings into a more usable form.
   The collected data is stored in recog_op_alt.  */

void
2493
preprocess_constraints (rtx_insn *insn)
2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504
{
  int icode = INSN_CODE (insn);
  if (icode >= 0)
    recog_op_alt = preprocess_insn_constraints (icode);
  else
    {
      int n_operands = recog_data.n_operands;
      int n_alternatives = recog_data.n_alternatives;
      int n_entries = n_operands * n_alternatives;
      memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
      preprocess_constraints (n_operands, n_alternatives,
2505 2506
			      recog_data.constraints, asm_op_alt,
			      NULL);
2507 2508 2509 2510
      recog_op_alt = asm_op_alt;
    }
}

2511
/* Check the operands of an insn against the insn's operand constraints
2512 2513
   and return 1 if they match any of the alternatives in ALTERNATIVES.

2514 2515
   The information about the insn's operands, constraints, operand modes
   etc. is obtained from the global variables set up by extract_insn.
Richard Kenner committed
2516 2517 2518 2519 2520

   WHICH_ALTERNATIVE is set to a number which indicates which
   alternative of constraints was matched: 0 for the first alternative,
   1 for the next, etc.

2521
   In addition, when two operands are required to match
Richard Kenner committed
2522 2523 2524 2525 2526 2527 2528 2529
   and it happens that the output operand is (reg) while the
   input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
   make the output operand look like the input.
   This is because the output operand is the one the template will print.

   This is used in final, just before printing the assembler code and by
   the routines that determine an insn's attribute.

2530
   If STRICT is a positive nonzero value, it means that we have been
Richard Kenner committed
2531 2532 2533 2534 2535 2536 2537 2538 2539 2540
   called after reload has been completed.  In that case, we must
   do all checks strictly.  If it is zero, it means that we have been called
   before reload has completed.  In that case, we first try to see if we can
   find an alternative that matches strictly.  If not, we try again, this
   time assuming that reload will fix up the insn.  This provides a "best
   guess" for the alternative and is used to compute attributes of insns prior
   to reload.  A negative value of STRICT is used for this internal call.  */

struct funny_match
{
2541
  int this_op, other;
Richard Kenner committed
2542 2543 2544
};

int
2545
constrain_operands (int strict, alternative_mask alternatives)
Richard Kenner committed
2546
{
2547
  const char *constraints[MAX_RECOG_OPERANDS];
2548 2549
  int matching_operands[MAX_RECOG_OPERANDS];
  int earlyclobber[MAX_RECOG_OPERANDS];
2550
  int c;
Richard Kenner committed
2551 2552 2553 2554

  struct funny_match funny_match[MAX_RECOG_OPERANDS];
  int funny_match_index;

2555
  which_alternative = 0;
2556
  if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
Richard Kenner committed
2557 2558
    return 1;

2559
  for (c = 0; c < recog_data.n_operands; c++)
2560
    {
2561
      constraints[c] = recog_data.constraints[c];
2562 2563
      matching_operands[c] = -1;
    }
Richard Kenner committed
2564

2565
  do
Richard Kenner committed
2566
    {
2567
      int seen_earlyclobber_at = -1;
2568
      int opno;
Richard Kenner committed
2569 2570 2571
      int lose = 0;
      funny_match_index = 0;

2572
      if (!TEST_BIT (alternatives, which_alternative))
2573 2574 2575 2576 2577 2578 2579 2580 2581 2582
	{
	  int i;

	  for (i = 0; i < recog_data.n_operands; i++)
	    constraints[i] = skip_alternative (constraints[i]);

	  which_alternative++;
	  continue;
	}

2583
      for (opno = 0; opno < recog_data.n_operands; opno++)
Richard Kenner committed
2584
	{
2585
	  rtx op = recog_data.operand[opno];
2586
	  machine_mode mode = GET_MODE (op);
2587
	  const char *p = constraints[opno];
Richard Kenner committed
2588 2589 2590
	  int offset = 0;
	  int win = 0;
	  int val;
2591
	  int len;
Richard Kenner committed
2592

2593 2594
	  earlyclobber[opno] = 0;

2595
	  /* A unary operator may be accepted by the predicate, but it
2596
	     is irrelevant for matching constraints.  */
2597
	  if (UNARY_P (op))
2598 2599
	    op = XEXP (op, 0);

Richard Kenner committed
2600 2601
	  if (GET_CODE (op) == SUBREG)
	    {
2602
	      if (REG_P (SUBREG_REG (op))
Richard Kenner committed
2603
		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2604 2605 2606 2607
		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
					      GET_MODE (SUBREG_REG (op)),
					      SUBREG_BYTE (op),
					      GET_MODE (op));
Richard Kenner committed
2608 2609 2610 2611 2612 2613 2614 2615
	      op = SUBREG_REG (op);
	    }

	  /* An empty constraint or empty alternative
	     allows anything which matched the pattern.  */
	  if (*p == 0 || *p == ',')
	    win = 1;

2616 2617
	  do
	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
Richard Kenner committed
2618
	      {
2619 2620 2621 2622 2623 2624 2625
	      case '\0':
		len = 0;
		break;
	      case ',':
		c = '\0';
		break;

2626 2627 2628
	      case '#':
		/* Ignore rest of this alternative as far as
		   constraint checking is concerned.  */
2629
		do
2630
		  p++;
2631 2632
		while (*p && *p != ',');
		len = 0;
2633 2634
		break;

2635 2636
	      case '&':
		earlyclobber[opno] = 1;
2637 2638
		if (seen_earlyclobber_at < 0)
		  seen_earlyclobber_at = opno;
2639 2640
		break;

Jeff Law committed
2641 2642
	      case '0':  case '1':  case '2':  case '3':  case '4':
	      case '5':  case '6':  case '7':  case '8':  case '9':
2643 2644 2645 2646 2647 2648 2649 2650 2651 2652 2653 2654 2655
		{
		  /* This operand must be the same as a previous one.
		     This kind of constraint is used for instructions such
		     as add when they take only two operands.

		     Note that the lower-numbered operand is passed first.

		     If we are not testing strictly, assume that this
		     constraint will be satisfied.  */

		  char *end;
		  int match;

2656
		  match = strtoul (p, &end, 10);
2657 2658 2659 2660 2661 2662 2663 2664 2665 2666 2667
		  p = end;

		  if (strict < 0)
		    val = 1;
		  else
		    {
		      rtx op1 = recog_data.operand[match];
		      rtx op2 = recog_data.operand[opno];

		      /* A unary operator may be accepted by the predicate,
			 but it is irrelevant for matching constraints.  */
2668
		      if (UNARY_P (op1))
2669
			op1 = XEXP (op1, 0);
2670
		      if (UNARY_P (op2))
2671 2672 2673 2674 2675 2676 2677 2678 2679 2680 2681 2682 2683 2684 2685 2686
			op2 = XEXP (op2, 0);

		      val = operands_match_p (op1, op2);
		    }

		  matching_operands[opno] = match;
		  matching_operands[match] = opno;

		  if (val != 0)
		    win = 1;

		  /* If output is *x and input is *--x, arrange later
		     to change the output to *--x as well, since the
		     output op is the one that will be printed.  */
		  if (val == 2 && strict > 0)
		    {
2687
		      funny_match[funny_match_index].this_op = opno;
2688 2689 2690
		      funny_match[funny_match_index++].other = match;
		    }
		}
2691
		len = 0;
Richard Kenner committed
2692 2693 2694 2695
		break;

	      case 'p':
		/* p is used for address_operands.  When we are called by
Richard Kenner committed
2696 2697 2698
		   gen_reload, no one will have checked that the address is
		   strictly valid, i.e., that all pseudos requiring hard regs
		   have gotten them.  */
Richard Kenner committed
2699
		if (strict <= 0
2700
		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2701
						 op)))
Richard Kenner committed
2702 2703 2704 2705
		  win = 1;
		break;

		/* No need to check general_operand again;
2706 2707 2708
		   it was done in insn-recog.c.  Well, except that reload
		   doesn't check the validity of its replacements, but
		   that should only matter when there's a bug.  */
Richard Kenner committed
2709 2710 2711
	      case 'g':
		/* Anything goes unless it is a REG and really has a hard reg
		   but the hard reg is not in the class GENERAL_REGS.  */
2712 2713 2714 2715 2716 2717 2718 2719 2720 2721
		if (REG_P (op))
		  {
		    if (strict < 0
			|| GENERAL_REGS == ALL_REGS
			|| (reload_in_progress
			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
		      win = 1;
		  }
		else if (strict < 0 || general_operand (op, mode))
Richard Kenner committed
2722 2723 2724 2725
		  win = 1;
		break;

	      default:
2726
		{
Richard Sandiford committed
2727 2728
		  enum constraint_num cn = lookup_constraint (p);
		  enum reg_class cl = reg_class_for_constraint (cn);
2729
		  if (cl != NO_REGS)
2730 2731 2732
		    {
		      if (strict < 0
			  || (strict == 0
2733
			      && REG_P (op)
2734 2735
			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2736
			  || (REG_P (op)
2737
			      && reg_fits_class_p (op, cl, offset, mode)))
2738 2739
		        win = 1;
		    }
2740 2741

		  else if (constraint_satisfied_p (op, cn))
2742
		    win = 1;
2743

2744
		  else if (insn_extra_memory_constraint (cn)
2745
			   /* Every memory operand can be reloaded to fit.  */
2746
			   && ((strict < 0 && MEM_P (op))
2747
			       /* Before reload, accept what reload can turn
2748
				  into a mem.  */
2749
			       || (strict < 0 && CONSTANT_P (op))
2750 2751 2752 2753
			       /* Before reload, accept a pseudo,
				  since LRA can turn it into a mem.  */
			       || (strict < 0 && targetm.lra_p () && REG_P (op)
				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2754
			       /* During reload, accept a pseudo  */
2755
			       || (reload_in_progress && REG_P (op)
2756 2757
				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
		    win = 1;
2758
		  else if (insn_extra_address_constraint (cn)
2759 2760 2761
			   /* Every address operand can be reloaded to fit.  */
			   && strict < 0)
		    win = 1;
2762 2763 2764 2765 2766 2767 2768
		  /* Cater to architectures like IA-64 that define extra memory
		     constraints without using define_memory_constraint.  */
		  else if (reload_in_progress
			   && REG_P (op)
			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
			   && reg_renumber[REGNO (op)] < 0
			   && reg_equiv_mem (REGNO (op)) != 0
2769 2770
			   && constraint_satisfied_p
			      (reg_equiv_mem (REGNO (op)), cn))
2771
		    win = 1;
2772 2773
		  break;
		}
Richard Kenner committed
2774
	      }
2775
	  while (p += len, c);
Richard Kenner committed
2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786

	  constraints[opno] = p;
	  /* If this operand did not win somehow,
	     this alternative loses.  */
	  if (! win)
	    lose = 1;
	}
      /* This alternative won; the operands are ok.
	 Change whichever operands this alternative says to change.  */
      if (! lose)
	{
2787 2788 2789 2790 2791
	  int opno, eopno;

	  /* See if any earlyclobber operand conflicts with some other
	     operand.  */

2792 2793 2794 2795
	  if (strict > 0  && seen_earlyclobber_at >= 0)
	    for (eopno = seen_earlyclobber_at;
		 eopno < recog_data.n_operands;
		 eopno++)
2796 2797 2798 2799
	      /* Ignore earlyclobber operands now in memory,
		 because we would often report failure when we have
		 two memory operands, one of which was formerly a REG.  */
	      if (earlyclobber[eopno]
2800
		  && REG_P (recog_data.operand[eopno]))
2801
		for (opno = 0; opno < recog_data.n_operands; opno++)
2802
		  if ((MEM_P (recog_data.operand[opno])
2803
		       || recog_data.operand_type[opno] != OP_OUT)
2804
		      && opno != eopno
Mike Stump committed
2805
		      /* Ignore things like match_operator operands.  */
2806
		      && *recog_data.constraints[opno] != 0
2807
		      && ! (matching_operands[opno] == eopno
2808 2809 2810 2811
			    && operands_match_p (recog_data.operand[opno],
						 recog_data.operand[eopno]))
		      && ! safe_from_earlyclobber (recog_data.operand[opno],
						   recog_data.operand[eopno]))
2812 2813 2814
		    lose = 1;

	  if (! lose)
Richard Kenner committed
2815
	    {
2816 2817
	      while (--funny_match_index >= 0)
		{
2818
		  recog_data.operand[funny_match[funny_match_index].other]
2819
		    = recog_data.operand[funny_match[funny_match_index].this_op];
2820 2821
		}

2822
	      /* For operands without < or > constraints reject side-effects.  */
2823
	      if (AUTO_INC_DEC && recog_data.is_asm)
2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835
		{
		  for (opno = 0; opno < recog_data.n_operands; opno++)
		    if (MEM_P (recog_data.operand[opno]))
		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
			{
			case PRE_INC:
			case POST_INC:
			case PRE_DEC:
			case POST_DEC:
			case PRE_MODIFY:
			case POST_MODIFY:
			  if (strchr (recog_data.constraints[opno], '<') == NULL
2836
			      && strchr (recog_data.constraints[opno], '>')
2837 2838 2839 2840 2841 2842 2843
				 == NULL)
			    return 0;
			  break;
			default:
			  break;
			}
		}
2844

2845
	      return 1;
Richard Kenner committed
2846 2847 2848 2849 2850
	    }
	}

      which_alternative++;
    }
2851
  while (which_alternative < recog_data.n_alternatives);
Richard Kenner committed
2852

2853
  which_alternative = -1;
Richard Kenner committed
2854 2855 2856
  /* If we are about to reject this, but we are not to test strictly,
     try a very loose test.  Only return failure if it fails also.  */
  if (strict == 0)
2857
    return constrain_operands (-1, alternatives);
Richard Kenner committed
2858 2859 2860 2861
  else
    return 0;
}

2862
/* Return true iff OPERAND (assumed to be a REG rtx)
2863
   is a hard reg in class CLASS when its regno is offset by OFFSET
Richard Kenner committed
2864 2865 2866
   and changed to mode MODE.
   If REG occupies multiple hard regs, all of them must be in CLASS.  */

2867 2868
bool
reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2869
		  machine_mode mode)
Richard Kenner committed
2870
{
2871
  unsigned int regno = REGNO (operand);
2872 2873

  if (cl == NO_REGS)
2874
    return false;
2875

2876
  /* Regno must not be a pseudo register.  Offset may be negative.  */
2877
  return (HARD_REGISTER_NUM_P (regno)
2878 2879 2880
	  && HARD_REGISTER_NUM_P (regno + offset)
	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode, 
				regno + offset));
Richard Kenner committed
2881
}
2882

2883 2884 2885 2886
/* Split single instruction.  Helper function for split_all_insns and
   split_all_insns_noflow.  Return last insn in the sequence if successful,
   or NULL if unsuccessful.  */

2887
static rtx_insn *
David Malcolm committed
2888
split_insn (rtx_insn *insn)
2889
{
2890
  /* Split insns here to get max fine-grain parallelism.  */
David Malcolm committed
2891 2892
  rtx_insn *first = PREV_INSN (insn);
  rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2893
  rtx insn_set, last_set, note;
2894 2895

  if (last == insn)
2896
    return NULL;
2897

2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911
  /* If the original instruction was a single set that was known to be
     equivalent to a constant, see if we can say the same about the last
     instruction in the split sequence.  The two instructions must set
     the same destination.  */
  insn_set = single_set (insn);
  if (insn_set)
    {
      last_set = single_set (last);
      if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
	{
	  note = find_reg_equal_equiv_note (insn);
	  if (note && CONSTANT_P (XEXP (note, 0)))
	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
	  else if (CONSTANT_P (SET_SRC (insn_set)))
2912 2913
	    set_unique_reg_note (last, REG_EQUAL,
				 copy_rtx (SET_SRC (insn_set)));
2914 2915 2916
	}
    }

2917
  /* try_split returns the NOTE that INSN became.  */
2918
  SET_INSN_DELETED (insn);
2919

2920 2921 2922 2923 2924 2925
  /* ??? Coddle to md files that generate subregs in post-reload
     splitters instead of computing the proper hard register.  */
  if (reload_completed && first != last)
    {
      first = NEXT_INSN (first);
      for (;;)
2926
	{
2927 2928 2929 2930 2931
	  if (INSN_P (first))
	    cleanup_subreg_operands (first);
	  if (first == last)
	    break;
	  first = NEXT_INSN (first);
2932 2933
	}
    }
2934

2935
  return last;
2936
}
2937

2938
/* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2939 2940

void
2941
split_all_insns (void)
2942
{
2943
  bool changed;
2944
  bool need_cfg_cleanup = false;
2945
  basic_block bb;
2946

2947
  auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2948
  bitmap_clear (blocks);
2949
  changed = false;
2950

2951
  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2952
    {
David Malcolm committed
2953
      rtx_insn *insn, *next;
2954
      bool finish = false;
2955

2956
      rtl_profile_for_bb (bb);
2957
      for (insn = BB_HEAD (bb); !finish ; insn = next)
2958
	{
2959 2960 2961
	  /* Can't use `next_real_insn' because that might go across
	     CODE_LABELS and short-out basic blocks.  */
	  next = NEXT_INSN (insn);
2962
	  finish = (insn == BB_END (bb));
2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974

	  /* If INSN has a REG_EH_REGION note and we split INSN, the
	     resulting split may not have/need REG_EH_REGION notes.

	     If that happens and INSN was the last reference to the
	     given EH region, then the EH region will become unreachable.
	     We can not leave the unreachable blocks in the CFG as that
	     will trigger a checking failure.

	     So track if INSN has a REG_EH_REGION note.  If so and we
	     split INSN, then trigger a CFG cleanup.  */
	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2975
	  if (INSN_P (insn))
2976
	    {
2977 2978 2979 2980
	      rtx set = single_set (insn);

	      /* Don't split no-op move insns.  These should silently
		 disappear later in final.  Splitting such insns would
2981
		 break the code that handles LIBCALL blocks.  */
2982 2983 2984 2985 2986 2987 2988 2989 2990
	      if (set && set_noop_p (set))
		{
		  /* Nops get in the way while scheduling, so delete them
		     now if register allocation has already been done.  It
		     is too risky to try to do this before register
		     allocation, and there are unlikely to be very many
		     nops then anyways.  */
		  if (reload_completed)
		      delete_insn_and_edges (insn);
2991 2992
		  if (note)
		    need_cfg_cleanup = true;
2993 2994 2995
		}
	      else
		{
2996
		  if (split_insn (insn))
2997
		    {
2998
		      bitmap_set_bit (blocks, bb->index);
2999
		      changed = true;
3000 3001
		      if (note)
			need_cfg_cleanup = true;
3002 3003
		    }
		}
3004 3005 3006
	    }
	}
    }
3007

3008
  default_rtl_profile ();
3009
  if (changed)
3010 3011 3012 3013 3014 3015 3016 3017 3018 3019
    {
      find_many_sub_basic_blocks (blocks);

      /* Splitting could drop an REG_EH_REGION if it potentially
	 trapped in its original form, but does not in its split
	 form.  Consider a FLOAT_TRUNCATE which splits into a memory
	 store/load pair and -fnon-call-exceptions.  */
      if (need_cfg_cleanup)
	cleanup_cfg (0);
    }
3020

3021
  checking_verify_flow_info ();
3022
}
3023

3024
/* Same as split_all_insns, but do not expect CFG to be available.
3025
   Used by machine dependent reorg passes.  */
3026

3027
unsigned int
3028
split_all_insns_noflow (void)
3029
{
David Malcolm committed
3030
  rtx_insn *next, *insn;
3031 3032 3033 3034

  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);
3035 3036 3037 3038
      if (INSN_P (insn))
	{
	  /* Don't split no-op move insns.  These should silently
	     disappear later in final.  Splitting such insns would
3039
	     break the code that handles LIBCALL blocks.  */
3040 3041 3042 3043 3044 3045 3046 3047 3048 3049 3050 3051 3052 3053 3054 3055
	  rtx set = single_set (insn);
	  if (set && set_noop_p (set))
	    {
	      /* Nops get in the way while scheduling, so delete them
		 now if register allocation has already been done.  It
		 is too risky to try to do this before register
		 allocation, and there are unlikely to be very many
		 nops then anyways.

		 ??? Should we use delete_insn when the CFG isn't valid?  */
	      if (reload_completed)
		delete_insn_and_edges (insn);
	    }
	  else
	    split_insn (insn);
	}
3056
    }
3057
  return 0;
3058
}
3059

3060 3061
struct peep2_insn_data
{
3062
  rtx_insn *insn;
3063 3064 3065 3066 3067
  regset live_before;
};

static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
static int peep2_current;
3068 3069 3070 3071

static bool peep2_do_rebuild_jump_labels;
static bool peep2_do_cleanup_cfg;

3072 3073
/* The number of instructions available to match a peep2.  */
int peep2_current_count;
3074

3075 3076 3077
/* A marker indicating the last insn of the block.  The live_before regset
   for this element is correct, indicating DF_LIVE_OUT for the block.  */
#define PEEP2_EOB invalid_insn_rtx
3078

3079 3080 3081 3082 3083 3084 3085 3086 3087 3088
/* Wrap N to fit into the peep2_insn_data buffer.  */

static int
peep2_buf_position (int n)
{
  if (n >= MAX_INSNS_PER_PEEP2 + 1)
    n -= MAX_INSNS_PER_PEEP2 + 1;
  return n;
}

3089 3090 3091
/* Return the Nth non-note insn after `current', or return NULL_RTX if it
   does not exist.  Used by the recognizer to find the next insn to match
   in a multi-insn pattern.  */
3092

3093
rtx_insn *
3094
peep2_next_insn (int n)
3095
{
3096
  gcc_assert (n <= peep2_current_count);
3097

3098
  n = peep2_buf_position (peep2_current + n);
3099 3100 3101 3102 3103 3104 3105 3106

  return peep2_insn_data[n].insn;
}

/* Return true if REGNO is dead before the Nth non-note insn
   after `current'.  */

int
3107
peep2_regno_dead_p (int ofs, int regno)
3108
{
3109
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3110

3111
  ofs = peep2_buf_position (peep2_current + ofs);
3112

3113
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3114 3115 3116 3117 3118 3119 3120

  return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
}

/* Similarly for a REG.  */

int
3121
peep2_reg_dead_p (int ofs, rtx reg)
3122
{
3123
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3124

3125
  ofs = peep2_buf_position (peep2_current + ofs);
3126

3127
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3128

3129 3130 3131
  unsigned int end_regno = END_REGNO (reg);
  for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
    if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3132 3133 3134 3135
      return 0;
  return 1;
}

3136 3137 3138
/* Regno offset to be used in the register search.  */
static int search_ofs;

3139 3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150
/* Try to find a hard register of mode MODE, matching the register class in
   CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
   remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
   in which case the only condition is that the register must be available
   before CURRENT_INSN.
   Registers that already have bits set in REG_SET will not be considered.

   If an appropriate register is available, it will be returned and the
   corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
   returned.  */

rtx
3151
peep2_find_free_register (int from, int to, const char *class_str,
3152
			  machine_mode mode, HARD_REG_SET *reg_set)
3153
{
3154
  enum reg_class cl;
3155
  HARD_REG_SET live;
3156
  df_ref def;
3157 3158
  int i;

3159 3160
  gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
  gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3161

3162 3163
  from = peep2_buf_position (peep2_current + from);
  to = peep2_buf_position (peep2_current + to);
3164

3165
  gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3166 3167 3168
  REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);

  while (from != to)
3169
    {
3170 3171 3172
      gcc_assert (peep2_insn_data[from].insn != NULL_RTX);

      /* Don't use registers set or clobbered by the insn.  */
3173 3174
      FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
	SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3175

3176
      from = peep2_buf_position (from + 1);
3177 3178
    }

Richard Sandiford committed
3179
  cl = reg_class_for_constraint (lookup_constraint (class_str));
3180 3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194

  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    {
      int raw_regno, regno, success, j;

      /* Distribute the free registers as much as possible.  */
      raw_regno = search_ofs + i;
      if (raw_regno >= FIRST_PSEUDO_REGISTER)
	raw_regno -= FIRST_PSEUDO_REGISTER;
#ifdef REG_ALLOC_ORDER
      regno = reg_alloc_order[raw_regno];
#else
      regno = raw_regno;
#endif

3195
      /* Can it support the mode we need?  */
3196
      if (!targetm.hard_regno_mode_ok (regno, mode))
3197 3198 3199
	continue;

      success = 1;
3200
      for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3201
	{
3202 3203 3204 3205 3206 3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223 3224 3225 3226 3227 3228 3229 3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240 3241
	  /* Don't allocate fixed registers.  */
	  if (fixed_regs[regno + j])
	    {
	      success = 0;
	      break;
	    }
	  /* Don't allocate global registers.  */
	  if (global_regs[regno + j])
	    {
	      success = 0;
	      break;
	    }
	  /* Make sure the register is of the right class.  */
	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
	    {
	      success = 0;
	      break;
	    }
	  /* And that we don't create an extra save/restore.  */
	  if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
	    {
	      success = 0;
	      break;
	    }

	  if (! targetm.hard_regno_scratch_ok (regno + j))
	    {
	      success = 0;
	      break;
	    }

	  /* And we don't clobber traceback for noreturn functions.  */
	  if ((regno + j == FRAME_POINTER_REGNUM
	       || regno + j == HARD_FRAME_POINTER_REGNUM)
	      && (! reload_completed || frame_pointer_needed))
	    {
	      success = 0;
	      break;
	    }

3242 3243 3244 3245 3246 3247 3248
	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
	      || TEST_HARD_REG_BIT (live, regno + j))
	    {
	      success = 0;
	      break;
	    }
	}
3249

3250
      if (success)
3251
	{
3252
	  add_to_hard_reg_set (reg_set, mode, regno);
3253

3254 3255 3256 3257
	  /* Start the next search with the next register.  */
	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
	    raw_regno = 0;
	  search_ofs = raw_regno;
3258

3259
	  return gen_rtx_REG (mode, regno);
3260
	}
3261 3262
    }

3263 3264
  search_ofs = 0;
  return NULL_RTX;
3265 3266
}

3267 3268 3269 3270 3271 3272 3273 3274 3275 3276
/* Forget all currently tracked instructions, only remember current
   LIVE regset.  */

static void
peep2_reinit_state (regset live)
{
  int i;

  /* Indicate that all slots except the last holds invalid data.  */
  for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3277
    peep2_insn_data[i].insn = NULL;
3278 3279 3280 3281 3282 3283 3284 3285 3286
  peep2_current_count = 0;

  /* Indicate that the last slot contains live_after data.  */
  peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
  peep2_current = MAX_INSNS_PER_PEEP2;

  COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
}

3287 3288
/* While scanning basic block BB, we found a match of length MATCH_LEN,
   starting at INSN.  Perform the replacement, removing the old insns and
3289 3290
   replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
   if the replacement is rejected.  */
3291

3292
static rtx_insn *
3293
peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3294 3295
{
  int i;
3296 3297
  rtx_insn *last, *before_try, *x;
  rtx eh_note, as_note;
David Malcolm committed
3298
  rtx_insn *old_insn;
3299
  rtx_insn *new_insn;
3300 3301
  bool was_call = false;

3302
  /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3303
     match more than one insn, or to be split into more than one insn.  */
3304
  old_insn = peep2_insn_data[peep2_current].insn;
3305 3306 3307
  if (RTX_FRAME_RELATED_P (old_insn))
    {
      bool any_note = false;
3308
      rtx note;
3309 3310 3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376

      if (match_len != 0)
	return NULL;

      /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
	 may be in the stream for the purpose of register allocation.  */
      if (active_insn_p (attempt))
	new_insn = attempt;
      else
	new_insn = next_active_insn (attempt);
      if (next_active_insn (new_insn))
	return NULL;

      /* We have a 1-1 replacement.  Copy over any frame-related info.  */
      RTX_FRAME_RELATED_P (new_insn) = 1;

      /* Allow the backend to fill in a note during the split.  */
      for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
	switch (REG_NOTE_KIND (note))
	  {
	  case REG_FRAME_RELATED_EXPR:
	  case REG_CFA_DEF_CFA:
	  case REG_CFA_ADJUST_CFA:
	  case REG_CFA_OFFSET:
	  case REG_CFA_REGISTER:
	  case REG_CFA_EXPRESSION:
	  case REG_CFA_RESTORE:
	  case REG_CFA_SET_VDRAP:
	    any_note = true;
	    break;
	  default:
	    break;
	  }

      /* If the backend didn't supply a note, copy one over.  */
      if (!any_note)
        for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
	  switch (REG_NOTE_KIND (note))
	    {
	    case REG_FRAME_RELATED_EXPR:
	    case REG_CFA_DEF_CFA:
	    case REG_CFA_ADJUST_CFA:
	    case REG_CFA_OFFSET:
	    case REG_CFA_REGISTER:
	    case REG_CFA_EXPRESSION:
	    case REG_CFA_RESTORE:
	    case REG_CFA_SET_VDRAP:
	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
	      any_note = true;
	      break;
	    default:
	      break;
	    }

      /* If there still isn't a note, make sure the unwind info sees the
	 same expression as before the split.  */
      if (!any_note)
	{
	  rtx old_set, new_set;

	  /* The old insn had better have been simple, or annotated.  */
	  old_set = single_set (old_insn);
	  gcc_assert (old_set != NULL);

	  new_set = single_set (new_insn);
	  if (!new_set || !rtx_equal_p (new_set, old_set))
	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
	}
3377 3378 3379 3380

      /* Copy prologue/epilogue status.  This is required in order to keep
	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
      maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3381 3382
    }

3383 3384 3385 3386 3387 3388
  /* If we are splitting a CALL_INSN, look for the CALL_INSN
     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
     cfg-related call notes.  */
  for (i = 0; i <= match_len; ++i)
    {
      int j;
3389
      rtx note;
3390 3391

      j = peep2_buf_position (peep2_current + i);
3392
      old_insn = peep2_insn_data[j].insn;
3393 3394 3395 3396 3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408
      if (!CALL_P (old_insn))
	continue;
      was_call = true;

      new_insn = attempt;
      while (new_insn != NULL_RTX)
	{
	  if (CALL_P (new_insn))
	    break;
	  new_insn = NEXT_INSN (new_insn);
	}

      gcc_assert (new_insn != NULL_RTX);

      CALL_INSN_FUNCTION_USAGE (new_insn)
	= CALL_INSN_FUNCTION_USAGE (old_insn);
3409
      SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3410 3411 3412 3413 3414 3415 3416 3417

      for (note = REG_NOTES (old_insn);
	   note;
	   note = XEXP (note, 1))
	switch (REG_NOTE_KIND (note))
	  {
	  case REG_NORETURN:
	  case REG_SETJMP:
3418
	  case REG_TM:
3419
	  case REG_CALL_NOCF_CHECK:
3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431
	    add_reg_note (new_insn, REG_NOTE_KIND (note),
			  XEXP (note, 0));
	    break;
	  default:
	    /* Discard all other reg notes.  */
	    break;
	  }

      /* Croak if there is another call in the sequence.  */
      while (++i <= match_len)
	{
	  j = peep2_buf_position (peep2_current + i);
3432
	  old_insn = peep2_insn_data[j].insn;
3433 3434 3435 3436 3437
	  gcc_assert (!CALL_P (old_insn));
	}
      break;
    }

3438 3439 3440 3441 3442 3443
  /* If we matched any instruction that had a REG_ARGS_SIZE, then
     move those notes over to the new sequence.  */
  as_note = NULL;
  for (i = match_len; i >= 0; --i)
    {
      int j = peep2_buf_position (peep2_current + i);
3444
      old_insn = peep2_insn_data[j].insn;
3445 3446 3447 3448 3449

      as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
      if (as_note)
	break;
    }
3450

3451 3452
  i = peep2_buf_position (peep2_current + match_len);
  eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3453 3454

  /* Replace the old sequence with the new.  */
3455
  rtx_insn *peepinsn = peep2_insn_data[i].insn;
3456 3457
  last = emit_insn_after_setloc (attempt,
				 peep2_insn_data[i].insn,
3458
				 INSN_LOCATION (peepinsn));
3459 3460
  if (JUMP_P (peepinsn) && JUMP_P (last))
    CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3461 3462 3463 3464
  before_try = PREV_INSN (insn);
  delete_insn_chain (insn, peep2_insn_data[i].insn, false);

  /* Re-insert the EH_REGION notes.  */
3465
  if (eh_note || (was_call && nonlocal_goto_handler_labels))
3466 3467 3468 3469 3470 3471 3472 3473
    {
      edge eh_edge;
      edge_iterator ei;

      FOR_EACH_EDGE (eh_edge, ei, bb->succs)
	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
	  break;

3474 3475
      if (eh_note)
	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487 3488 3489 3490 3491 3492 3493 3494

      if (eh_edge)
	for (x = last; x != before_try; x = PREV_INSN (x))
	  if (x != BB_END (bb)
	      && (can_throw_internal (x)
		  || can_nonlocal_goto (x)))
	    {
	      edge nfte, nehe;
	      int flags;

	      nfte = split_block (bb, x);
	      flags = (eh_edge->flags
		       & (EDGE_EH | EDGE_ABNORMAL));
	      if (CALL_P (x))
		flags |= EDGE_ABNORMAL_CALL;
	      nehe = make_edge (nfte->src, eh_edge->dest,
				flags);

	      nehe->probability = eh_edge->probability;
3495
	      nfte->probability = nehe->probability.invert ();
3496 3497 3498 3499 3500 3501 3502 3503 3504 3505 3506

	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
	      bb = nfte->src;
	      eh_edge = nehe;
	    }

      /* Converting possibly trapping insn to non-trapping is
	 possible.  Zap dummy outgoing edges.  */
      peep2_do_cleanup_cfg |= purge_dead_edges (bb);
    }

3507 3508
  /* Re-insert the ARGS_SIZE notes.  */
  if (as_note)
3509
    fixup_args_size_notes (before_try, last, get_args_size (as_note));
3510

3511 3512 3513 3514 3515 3516 3517 3518 3519 3520 3521 3522 3523 3524 3525 3526 3527 3528 3529
  /* If we generated a jump instruction, it won't have
     JUMP_LABEL set.  Recompute after we're done.  */
  for (x = last; x != before_try; x = PREV_INSN (x))
    if (JUMP_P (x))
      {
	peep2_do_rebuild_jump_labels = true;
	break;
      }

  return last;
}

/* After performing a replacement in basic block BB, fix up the life
   information in our buffer.  LAST is the last of the insns that we
   emitted as a replacement.  PREV is the insn before the start of
   the replacement.  MATCH_LEN is the number of instructions that were
   matched, and which now need to be replaced in the buffer.  */

static void
3530 3531
peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
		   rtx_insn *prev)
3532 3533
{
  int i = peep2_buf_position (peep2_current + match_len + 1);
3534
  rtx_insn *x;
3535 3536 3537 3538 3539 3540 3541 3542
  regset_head live;

  INIT_REG_SET (&live);
  COPY_REG_SET (&live, peep2_insn_data[i].live_before);

  gcc_assert (peep2_current_count >= match_len + 1);
  peep2_current_count -= match_len + 1;

3543
  x = last;
3544 3545 3546 3547 3548 3549 3550 3551 3552 3553 3554 3555 3556 3557 3558 3559 3560 3561 3562 3563 3564 3565 3566 3567 3568 3569 3570 3571 3572
  do
    {
      if (INSN_P (x))
	{
	  df_insn_rescan (x);
	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
	    {
	      peep2_current_count++;
	      if (--i < 0)
		i = MAX_INSNS_PER_PEEP2;
	      peep2_insn_data[i].insn = x;
	      df_simulate_one_insn_backwards (bb, x, &live);
	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
	    }
	}
      x = PREV_INSN (x);
    }
  while (x != prev);
  CLEAR_REG_SET (&live);

  peep2_current = i;
}

/* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
   Return true if we added it, false otherwise.  The caller will try to match
   peepholes against the buffer if we return false; otherwise it will try to
   add more instructions to the buffer.  */

static bool
3573
peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3574 3575 3576 3577 3578 3579 3580 3581 3582 3583
{
  int pos;

  /* Once we have filled the maximum number of insns the buffer can hold,
     allow the caller to match the insns against peepholes.  We wait until
     the buffer is full in case the target has similar peepholes of different
     length; we always want to match the longest if possible.  */
  if (peep2_current_count == MAX_INSNS_PER_PEEP2)
    return false;

3584 3585
  /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
     any other pattern, lest it change the semantics of the frame info.  */
3586 3587 3588 3589 3590
  if (RTX_FRAME_RELATED_P (insn))
    {
      /* Let the buffer drain first.  */
      if (peep2_current_count > 0)
	return false;
3591
      /* Now the insn will be the only thing in the buffer.  */
3592 3593 3594 3595 3596 3597 3598
    }

  pos = peep2_buf_position (peep2_current + peep2_current_count);
  peep2_insn_data[pos].insn = insn;
  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
  peep2_current_count++;

3599
  df_simulate_one_insn_forwards (bb, insn, live);
3600 3601 3602
  return true;
}

3603
/* Perform the peephole2 optimization pass.  */
3604

3605
static void
3606
peephole2_optimize (void)
3607
{
3608
  rtx_insn *insn;
3609
  bitmap live;
3610 3611
  int i;
  basic_block bb;
3612 3613 3614

  peep2_do_cleanup_cfg = false;
  peep2_do_rebuild_jump_labels = false;
3615

3616
  df_set_flags (DF_LR_RUN_DCE);
3617
  df_note_add_problem ();
3618 3619
  df_analyze ();

3620 3621
  /* Initialize the regsets we're going to use.  */
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3622
    peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3623
  search_ofs = 0;
3624
  live = BITMAP_ALLOC (&reg_obstack);
3625

3626
  FOR_EACH_BB_REVERSE_FN (bb, cfun)
3627
    {
3628 3629 3630
      bool past_end = false;
      int pos;

3631
      rtl_profile_for_bb (bb);
3632

3633
      /* Start up propagation.  */
3634 3635
      bitmap_copy (live, DF_LR_IN (bb));
      df_simulate_initialize_forwards (bb, live);
3636
      peep2_reinit_state (live);
3637

3638 3639
      insn = BB_HEAD (bb);
      for (;;)
3640
	{
3641
	  rtx_insn *attempt, *head;
3642
	  int match_len;
3643

3644 3645 3646 3647 3648 3649 3650 3651 3652 3653
	  if (!past_end && !NONDEBUG_INSN_P (insn))
	    {
	    next_insn:
	      insn = NEXT_INSN (insn);
	      if (insn == NEXT_INSN (BB_END (bb)))
		past_end = true;
	      continue;
	    }
	  if (!past_end && peep2_fill_buffer (bb, insn, live))
	    goto next_insn;
3654

3655 3656 3657 3658
	  /* If we did not fill an empty buffer, it signals the end of the
	     block.  */
	  if (peep2_current_count == 0)
	    break;
3659

3660
	  /* The buffer filled to the current maximum, so try to match.  */
3661

3662 3663 3664
	  pos = peep2_buf_position (peep2_current + peep2_current_count);
	  peep2_insn_data[pos].insn = PEEP2_EOB;
	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3665

3666 3667
	  /* Match the peephole.  */
	  head = peep2_insn_data[peep2_current].insn;
3668
	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
3669 3670
	  if (attempt != NULL)
	    {
3671
	      rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3672 3673 3674 3675 3676
	      if (last)
		{
		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
		  continue;
		}
3677
	    }
3678 3679 3680 3681

	  /* No match: advance the buffer by one insn.  */
	  peep2_current = peep2_buf_position (peep2_current + 1);
	  peep2_current_count--;
3682 3683 3684
	}
    }

3685
  default_rtl_profile ();
3686
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3687 3688
    BITMAP_FREE (peep2_insn_data[i].live_before);
  BITMAP_FREE (live);
3689
  if (peep2_do_rebuild_jump_labels)
3690
    rebuild_jump_labels (get_insns ());
3691 3692
  if (peep2_do_cleanup_cfg)
    cleanup_cfg (CLEANUP_CFG_CHANGED);
3693
}
3694 3695 3696

/* Common predicates for use with define_bypass.  */

3697 3698
/* Helper function for store_data_bypass_p, handle just a single SET
   IN_SET.  */
3699

3700 3701
static bool
store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3702
{
3703 3704
  if (!MEM_P (SET_DEST (in_set)))
    return false;
3705

3706 3707 3708
  rtx out_set = single_set (out_insn);
  if (out_set)
    return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3709

3710 3711 3712
  rtx out_pat = PATTERN (out_insn);
  if (GET_CODE (out_pat) != PARALLEL)
    return false;
3713

3714 3715 3716
  for (int i = 0; i < XVECLEN (out_pat, 0); i++)
    {
      rtx out_exp = XVECEXP (out_pat, 0, i);
3717

3718 3719
      if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
	continue;
3720

3721
      gcc_assert (GET_CODE (out_exp) == SET);
3722

3723 3724
      if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
	return false;
3725 3726
    }

3727 3728
  return true;
}
3729

3730 3731 3732
/* True if the dependency between OUT_INSN and IN_INSN is on the store
   data not the address operand(s) of the store.  IN_INSN and OUT_INSN
   must be either a single_set or a PARALLEL with SETs inside.  */
3733

3734 3735 3736 3737 3738 3739
int
store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
{
  rtx in_set = single_set (in_insn);
  if (in_set)
    return store_data_bypass_p_1 (out_insn, in_set);
3740

3741 3742 3743
  rtx in_pat = PATTERN (in_insn);
  if (GET_CODE (in_pat) != PARALLEL)
    return false;
3744

3745 3746 3747
  for (int i = 0; i < XVECLEN (in_pat, 0); i++)
    {
      rtx in_exp = XVECEXP (in_pat, 0, i);
3748

3749 3750
      if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
	continue;
3751

3752
      gcc_assert (GET_CODE (in_exp) == SET);
3753

3754 3755
      if (!store_data_bypass_p_1 (out_insn, in_exp))
	return false;
3756
    }
3757 3758 3759 3760

  return true;
}

3761 3762 3763 3764
/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
   condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
   or multiple set; IN_INSN should be single_set for truth, but for convenience
   of insn categorization may be any JUMP or CALL insn.  */
3765 3766

int
David Malcolm committed
3767
if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3768 3769 3770 3771 3772
{
  rtx out_set, in_set;

  in_set = single_set (in_insn);
  if (! in_set)
3773
    {
3774 3775
      gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
      return false;
3776
    }
3777 3778 3779

  if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
    return false;
3780
  in_set = SET_SRC (in_set);
3781

3782 3783 3784 3785 3786
  out_set = single_set (out_insn);
  if (out_set)
    {
      if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3787
	return false;
3788 3789 3790 3791 3792 3793 3794
    }
  else
    {
      rtx out_pat;
      int i;

      out_pat = PATTERN (out_insn);
3795
      gcc_assert (GET_CODE (out_pat) == PARALLEL);
3796 3797 3798 3799 3800 3801 3802 3803

      for (i = 0; i < XVECLEN (out_pat, 0); i++)
	{
	  rtx exp = XVECEXP (out_pat, 0, i);

	  if (GET_CODE (exp) == CLOBBER)
	    continue;

3804
	  gcc_assert (GET_CODE (exp) == SET);
3805 3806 3807 3808 3809 3810

	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
	    return false;
	}
    }
3811 3812 3813

  return true;
}
3814

3815
static unsigned int
3816 3817
rest_of_handle_peephole2 (void)
{
3818 3819 3820
  if (HAVE_peephole2)
    peephole2_optimize ();

3821
  return 0;
3822 3823
}

3824 3825 3826 3827 3828 3829 3830 3831 3832 3833 3834 3835
namespace {

const pass_data pass_data_peephole2 =
{
  RTL_PASS, /* type */
  "peephole2", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_PEEPHOLE2, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
3836
  TODO_df_finish, /* todo_flags_finish */
3837 3838
};

3839 3840 3841
class pass_peephole2 : public rtl_opt_pass
{
public:
3842 3843
  pass_peephole2 (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_peephole2, ctxt)
3844 3845 3846
  {}

  /* opt_pass methods: */
3847 3848
  /* The epiphany backend creates a second instance of this pass, so we need
     a clone method.  */
3849
  opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3850
  virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3851 3852 3853 3854
  virtual unsigned int execute (function *)
    {
      return rest_of_handle_peephole2 ();
    }
3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867 3868 3869 3870 3871 3872 3873 3874

}; // class pass_peephole2

} // anon namespace

rtl_opt_pass *
make_pass_peephole2 (gcc::context *ctxt)
{
  return new pass_peephole2 (ctxt);
}

namespace {

const pass_data pass_data_split_all_insns =
{
  RTL_PASS, /* type */
  "split1", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  0, /* properties_required */
3875
  PROP_rtl_split_insns, /* properties_provided */
3876 3877 3878
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
3879 3880
};

3881 3882 3883
class pass_split_all_insns : public rtl_opt_pass
{
public:
3884 3885
  pass_split_all_insns (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3886 3887 3888
  {}

  /* opt_pass methods: */
3889 3890
  /* The epiphany backend creates a second instance of this pass, so
     we need a clone method.  */
3891
  opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3892 3893 3894 3895 3896
  virtual unsigned int execute (function *)
    {
      split_all_insns ();
      return 0;
    }
3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920

}; // class pass_split_all_insns

} // anon namespace

rtl_opt_pass *
make_pass_split_all_insns (gcc::context *ctxt)
{
  return new pass_split_all_insns (ctxt);
}

namespace {

const pass_data pass_data_split_after_reload =
{
  RTL_PASS, /* type */
  "split2", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
3921 3922
};

3923 3924 3925
class pass_split_after_reload : public rtl_opt_pass
{
public:
3926 3927
  pass_split_after_reload (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3928 3929 3930
  {}

  /* opt_pass methods: */
3931 3932 3933 3934 3935 3936 3937 3938 3939 3940 3941 3942 3943
  virtual bool gate (function *)
    {
      /* If optimizing, then go ahead and split insns now.  */
      if (optimize > 0)
	return true;

#ifdef STACK_REGS
      return true;
#else
      return false;
#endif
    }

3944 3945
  virtual unsigned int execute (function *)
    {
3946 3947
      split_all_insns ();
      return 0;
3948
    }
3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972

}; // class pass_split_after_reload

} // anon namespace

rtl_opt_pass *
make_pass_split_after_reload (gcc::context *ctxt)
{
  return new pass_split_after_reload (ctxt);
}

namespace {

const pass_data pass_data_split_before_regstack =
{
  RTL_PASS, /* type */
  "split3", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
3973 3974
};

3975 3976 3977
class pass_split_before_regstack : public rtl_opt_pass
{
public:
3978 3979
  pass_split_before_regstack (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3980 3981 3982
  {}

  /* opt_pass methods: */
3983
  virtual bool gate (function *);
3984 3985 3986 3987 3988
  virtual unsigned int execute (function *)
    {
      split_all_insns ();
      return 0;
    }
3989 3990 3991

}; // class pass_split_before_regstack

3992 3993 3994 3995 3996 3997 3998 3999 4000 4001 4002 4003 4004 4005 4006 4007 4008 4009
bool
pass_split_before_regstack::gate (function *)
{
#if HAVE_ATTR_length && defined (STACK_REGS)
  /* If flow2 creates new instructions which need splitting
     and scheduling after reload is not done, they might not be
     split until final which doesn't allow splitting
     if HAVE_ATTR_length.  */
# ifdef INSN_SCHEDULING
  return (optimize && !flag_schedule_insns_after_reload);
# else
  return (optimize);
# endif
#else
  return 0;
#endif
}

4010 4011 4012 4013 4014 4015 4016 4017
} // anon namespace

rtl_opt_pass *
make_pass_split_before_regstack (gcc::context *ctxt)
{
  return new pass_split_before_regstack (ctxt);
}

4018 4019 4020 4021 4022 4023 4024 4025 4026
static unsigned int
rest_of_handle_split_before_sched2 (void)
{
#ifdef INSN_SCHEDULING
  split_all_insns ();
#endif
  return 0;
}

4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037 4038
namespace {

const pass_data pass_data_split_before_sched2 =
{
  RTL_PASS, /* type */
  "split4", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
4039
  0, /* todo_flags_finish */
4040 4041
};

4042 4043 4044
class pass_split_before_sched2 : public rtl_opt_pass
{
public:
4045 4046
  pass_split_before_sched2 (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4047 4048 4049
  {}

  /* opt_pass methods: */
4050 4051 4052 4053 4054 4055 4056 4057 4058
  virtual bool gate (function *)
    {
#ifdef INSN_SCHEDULING
      return optimize > 0 && flag_schedule_insns_after_reload;
#else
      return false;
#endif
    }

4059 4060 4061 4062
  virtual unsigned int execute (function *)
    {
      return rest_of_handle_split_before_sched2 ();
    }
4063 4064 4065 4066 4067 4068 4069 4070 4071 4072 4073 4074 4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085

}; // class pass_split_before_sched2

} // anon namespace

rtl_opt_pass *
make_pass_split_before_sched2 (gcc::context *ctxt)
{
  return new pass_split_before_sched2 (ctxt);
}

namespace {

const pass_data pass_data_split_for_shorten_branches =
{
  RTL_PASS, /* type */
  "split5", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
4086
  0, /* todo_flags_finish */
4087
};
4088 4089 4090 4091

class pass_split_for_shorten_branches : public rtl_opt_pass
{
public:
4092 4093
  pass_split_for_shorten_branches (gcc::context *ctxt)
    : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4094 4095 4096
  {}

  /* opt_pass methods: */
4097 4098 4099 4100 4101 4102 4103 4104 4105 4106 4107
  virtual bool gate (function *)
    {
      /* The placement of the splitting that we do for shorten_branches
	 depends on whether regstack is used by the target or not.  */
#if HAVE_ATTR_length && !defined (STACK_REGS)
      return true;
#else
      return false;
#endif
    }

4108 4109 4110 4111
  virtual unsigned int execute (function *)
    {
      return split_all_insns_noflow ();
    }
4112 4113 4114 4115 4116 4117 4118 4119 4120 4121

}; // class pass_split_for_shorten_branches

} // anon namespace

rtl_opt_pass *
make_pass_split_for_shorten_branches (gcc::context *ctxt)
{
  return new pass_split_for_shorten_branches (ctxt);
}
4122 4123 4124 4125 4126 4127 4128 4129 4130 4131 4132 4133 4134

/* (Re)initialize the target information after a change in target.  */

void
recog_init ()
{
  /* The information is zero-initialized, so we don't need to do anything
     first time round.  */
  if (!this_target_recog->x_initialized)
    {
      this_target_recog->x_initialized = true;
      return;
    }
4135 4136
  memset (this_target_recog->x_bool_attr_masks, 0,
	  sizeof (this_target_recog->x_bool_attr_masks));
4137
  for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4138 4139 4140 4141 4142
    if (this_target_recog->x_op_alt[i])
      {
	free (this_target_recog->x_op_alt[i]);
	this_target_recog->x_op_alt[i] = 0;
      }
4143
}