explow.c 46.8 KB
Newer Older
Richard Kenner committed
1
/* Subroutines for manipulating rtx's in semantically interesting ways.
Jeff Law committed
2
   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
Kazu Hirata committed
3
   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
Richard Kenner committed
4

5
This file is part of GCC.
Richard Kenner committed
6

7 8 9 10
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
Richard Kenner committed
11

12 13 14 15
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
16 17

You should have received a copy of the GNU General Public License
18 19 20
along with GCC; see the file COPYING.  If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA.  */
Richard Kenner committed
21 22 23


#include "config.h"
24
#include "system.h"
25 26
#include "coretypes.h"
#include "tm.h"
27
#include "toplev.h"
Richard Kenner committed
28 29
#include "rtl.h"
#include "tree.h"
30
#include "tm_p.h"
Richard Kenner committed
31
#include "flags.h"
32
#include "function.h"
Richard Kenner committed
33
#include "expr.h"
34
#include "optabs.h"
Richard Kenner committed
35 36
#include "hard-reg-set.h"
#include "insn-config.h"
37
#include "ggc.h"
Richard Kenner committed
38
#include "recog.h"
39
#include "langhooks.h"
Richard Kenner committed
40

41 42
static rtx break_out_memory_refs	PARAMS ((rtx));
static void emit_stack_probe		PARAMS ((rtx));
43 44 45 46 47 48 49 50 51 52 53


/* Truncate and perhaps sign-extend C as appropriate for MODE.  */

HOST_WIDE_INT
trunc_int_for_mode (c, mode)
     HOST_WIDE_INT c;
     enum machine_mode mode;
{
  int width = GET_MODE_BITSIZE (mode);

54 55 56 57
  /* You want to truncate to a _what_?  */
  if (! SCALAR_INT_MODE_P (mode))
    abort ();

58 59 60 61
  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
  if (mode == BImode)
    return c & 1 ? STORE_FLAG_VALUE : 0;

62 63 64 65 66 67 68 69 70 71
  /* Sign-extend for the requested mode.  */

  if (width < HOST_BITS_PER_WIDE_INT)
    {
      HOST_WIDE_INT sign = 1;
      sign <<= width - 1;
      c &= (sign << 1) - 1;
      c ^= sign;
      c -= sign;
    }
72 73 74 75

  return c;
}

Charles Hannum committed
76 77
/* Return an rtx for the sum of X and the integer C.

78
   This function should be used via the `plus_constant' macro.  */
Richard Kenner committed
79 80

rtx
Charles Hannum committed
81
plus_constant_wide (x, c)
82 83
     rtx x;
     HOST_WIDE_INT c;
Richard Kenner committed
84
{
85
  RTX_CODE code;
86
  rtx y;
87 88
  enum machine_mode mode;
  rtx tem;
Richard Kenner committed
89 90 91 92 93 94 95 96 97
  int all_constant = 0;

  if (c == 0)
    return x;

 restart:

  code = GET_CODE (x);
  mode = GET_MODE (x);
98 99
  y = x;

Richard Kenner committed
100 101 102
  switch (code)
    {
    case CONST_INT:
Charles Hannum committed
103
      return GEN_INT (INTVAL (x) + c);
Richard Kenner committed
104 105 106

    case CONST_DOUBLE:
      {
107
	unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
Charles Hannum committed
108
	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
109
	unsigned HOST_WIDE_INT l2 = c;
Charles Hannum committed
110
	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
111 112
	unsigned HOST_WIDE_INT lv;
	HOST_WIDE_INT hv;
Richard Kenner committed
113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151

	add_double (l1, h1, l2, h2, &lv, &hv);

	return immed_double_const (lv, hv, VOIDmode);
      }

    case MEM:
      /* If this is a reference to the constant pool, try replacing it with
	 a reference to a new constant.  If the resulting address isn't
	 valid, don't return it because we have no way to validize it.  */
      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
	{
	  tem
	    = force_const_mem (GET_MODE (x),
			       plus_constant (get_pool_constant (XEXP (x, 0)),
					      c));
	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
	    return tem;
	}
      break;

    case CONST:
      /* If adding to something entirely constant, set a flag
	 so that we can add a CONST around the result.  */
      x = XEXP (x, 0);
      all_constant = 1;
      goto restart;

    case SYMBOL_REF:
    case LABEL_REF:
      all_constant = 1;
      break;

    case PLUS:
      /* The interesting case is adding the integer to a sum.
	 Look for constant term in the sum and combine
	 with C.  For an integer constant term, we make a combined
	 integer.  For a constant term that is not an explicit integer,
152
	 we cannot really combine, but group them together anyway.
153

154 155 156 157 158
	 Restart or use a recursive call in case the remaining operand is
	 something that we handle specially, such as a SYMBOL_REF.

	 We may not immediately return from the recursive call here, lest
	 all_constant gets lost.  */
159 160

      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
161 162
	{
	  c += INTVAL (XEXP (x, 1));
163 164 165 166

	  if (GET_MODE (x) != VOIDmode)
	    c = trunc_int_for_mode (c, GET_MODE (x));

167 168 169
	  x = XEXP (x, 0);
	  goto restart;
	}
170
      else if (CONSTANT_P (XEXP (x, 1)))
171
	{
172
	  x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
173 174
	  c = 0;
	}
175
      else if (find_constant_term_loc (&y))
176
	{
177 178 179 180 181 182 183
	  /* We need to be careful since X may be shared and we can't
	     modify it in place.  */
	  rtx copy = copy_rtx (x);
	  rtx *const_loc = find_constant_term_loc (&copy);

	  *const_loc = plus_constant (*const_loc, c);
	  x = copy;
184 185
	  c = 0;
	}
186
      break;
187

188 189
    default:
      break;
Richard Kenner committed
190 191 192
    }

  if (c != 0)
193
    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
Richard Kenner committed
194 195 196 197

  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
    return x;
  else if (all_constant)
198
    return gen_rtx_CONST (mode, x);
Richard Kenner committed
199 200 201 202 203 204 205 206 207 208 209 210 211 212
  else
    return x;
}

/* If X is a sum, return a new sum like X but lacking any constant terms.
   Add all the removed constant terms into *CONSTPTR.
   X itself is not altered.  The result != X if and only if
   it is not isomorphic to X.  */

rtx
eliminate_constant_term (x, constptr)
     rtx x;
     rtx *constptr;
{
213
  rtx x0, x1;
Richard Kenner committed
214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237
  rtx tem;

  if (GET_CODE (x) != PLUS)
    return x;

  /* First handle constants appearing at this level explicitly.  */
  if (GET_CODE (XEXP (x, 1)) == CONST_INT
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
						XEXP (x, 1)))
      && GET_CODE (tem) == CONST_INT)
    {
      *constptr = tem;
      return eliminate_constant_term (XEXP (x, 0), constptr);
    }

  tem = const0_rtx;
  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
						*constptr, tem))
      && GET_CODE (tem) == CONST_INT)
    {
      *constptr = tem;
238
      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
Richard Kenner committed
239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272
    }

  return x;
}

/* Returns the insn that next references REG after INSN, or 0
   if REG is clobbered before next referenced or we cannot find
   an insn that references REG in a straight-line piece of code.  */

rtx
find_next_ref (reg, insn)
     rtx reg;
     rtx insn;
{
  rtx next;

  for (insn = NEXT_INSN (insn); insn; insn = next)
    {
      next = NEXT_INSN (insn);
      if (GET_CODE (insn) == NOTE)
	continue;
      if (GET_CODE (insn) == CODE_LABEL
	  || GET_CODE (insn) == BARRIER)
	return 0;
      if (GET_CODE (insn) == INSN
	  || GET_CODE (insn) == JUMP_INSN
	  || GET_CODE (insn) == CALL_INSN)
	{
	  if (reg_set_p (reg, insn))
	    return 0;
	  if (reg_mentioned_p (reg, PATTERN (insn)))
	    return insn;
	  if (GET_CODE (insn) == JUMP_INSN)
	    {
273
	      if (any_uncondjump_p (insn))
Richard Kenner committed
274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294
		next = JUMP_LABEL (insn);
	      else
		return 0;
	    }
	  if (GET_CODE (insn) == CALL_INSN
	      && REGNO (reg) < FIRST_PSEUDO_REGISTER
	      && call_used_regs[REGNO (reg)])
	    return 0;
	}
      else
	abort ();
    }
  return 0;
}

/* Return an rtx for the size in bytes of the value of EXP.  */

rtx
expr_size (exp)
     tree exp;
{
295
  tree size = (*lang_hooks.expr_size) (exp);
296 297 298 299 300

  if (TREE_CODE (size) != INTEGER_CST
      && contains_placeholder_p (size))
    size = build (WITH_RECORD_EXPR, sizetype, size, exp);

301
  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
Richard Kenner committed
302
}
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322

/* Return a wide integer for the size in bytes of the value of EXP, or -1
   if the size can vary or is larger than an integer.  */

HOST_WIDE_INT
int_expr_size (exp)
     tree exp;
{
  tree t = (*lang_hooks.expr_size) (exp);

  if (t == 0
      || TREE_CODE (t) != INTEGER_CST
      || TREE_OVERFLOW (t)
      || TREE_INT_CST_HIGH (t) != 0
      /* If the result would appear negative, it's too big to represent.  */
      || (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0)
    return -1;

  return TREE_INT_CST_LOW (t);
}
Richard Kenner committed
323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341

/* Return a copy of X in which all memory references
   and all constants that involve symbol refs
   have been replaced with new temporary registers.
   Also emit code to load the memory locations and constants
   into those registers.

   If X contains no such constants or memory references,
   X itself (not a copy) is returned.

   If a constant is found in the address that is not a legitimate constant
   in an insn, it is left alone in the hope that it might be valid in the
   address.

   X may contain no arithmetic except addition, subtraction and multiplication.
   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */

static rtx
break_out_memory_refs (x)
342
     rtx x;
Richard Kenner committed
343 344
{
  if (GET_CODE (x) == MEM
345
      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
Richard Kenner committed
346
	  && GET_MODE (x) != VOIDmode))
347
    x = force_reg (GET_MODE (x), x);
Richard Kenner committed
348 349 350
  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
	   || GET_CODE (x) == MULT)
    {
351 352
      rtx op0 = break_out_memory_refs (XEXP (x, 0));
      rtx op1 = break_out_memory_refs (XEXP (x, 1));
353

Richard Kenner committed
354
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
355
	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
Richard Kenner committed
356
    }
357

Richard Kenner committed
358 359 360
  return x;
}

361 362 363
#ifdef POINTERS_EXTEND_UNSIGNED

/* Given X, a memory address in ptr_mode, convert it to an address
364 365 366 367
   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
   the fact that pointers are not allowed to overflow by commuting arithmetic
   operations over conversions so that address arithmetic insns can be
   used.  */
368

369 370 371
rtx
convert_memory_address (to_mode, x)
     enum machine_mode to_mode;
372 373
     rtx x;
{
374
  enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
375
  rtx temp;
376
  enum rtx_code code;
377

378 379
  /* Here we handle some special cases.  If none of them apply, fall through
     to the default case.  */
380 381 382 383
  switch (GET_CODE (x))
    {
    case CONST_INT:
    case CONST_DOUBLE:
384 385 386 387 388 389 390 391 392 393 394 395
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
	code = TRUNCATE;
      else if (POINTERS_EXTEND_UNSIGNED < 0)
	break;
      else if (POINTERS_EXTEND_UNSIGNED > 0)
	code = ZERO_EXTEND;
      else
	code = SIGN_EXTEND;
      temp = simplify_unary_operation (code, to_mode, x, from_mode);
      if (temp)
	return temp;
      break;
396

397
    case SUBREG:
398
      if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
399
	  && GET_MODE (SUBREG_REG (x)) == to_mode)
400 401 402
	return SUBREG_REG (x);
      break;

403
    case LABEL_REF:
404 405 406
      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
      return temp;
407
      break;
408

409
    case SYMBOL_REF:
410 411
      temp = shallow_copy_rtx (x);
      PUT_MODE (temp, to_mode);
412
      return temp;
413
      break;
414

415
    case CONST:
416 417
      return gen_rtx_CONST (to_mode,
			    convert_memory_address (to_mode, XEXP (x, 0)));
418
      break;
419

420 421
    case PLUS:
    case MULT:
422 423 424 425 426 427 428 429
      /* For addition we can safely permute the conversion and addition
	 operation if one operand is a constant and converting the constant
	 does not change it.  We can always safely permute them if we are
	 making the address narrower.  */
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
	  || (GET_CODE (x) == PLUS
	      && GET_CODE (XEXP (x, 1)) == CONST_INT
	      && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
430
	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
431
			       convert_memory_address (to_mode, XEXP (x, 0)),
432
			       XEXP (x, 1));
433
      break;
434

435 436
    default:
      break;
437
    }
438 439 440

  return convert_modes (to_mode, from_mode,
			x, POINTERS_EXTEND_UNSIGNED);
441 442 443
}
#endif

Richard Kenner committed
444 445 446 447 448 449 450 451 452 453 454 455 456 457 458
/* Given a memory address or facsimile X, construct a new address,
   currently equivalent, that is stable: future stores won't change it.

   X must be composed of constants, register and memory references
   combined with addition, subtraction and multiplication:
   in other words, just what you can get from expand_expr if sum_ok is 1.

   Works by making copies of all regs and memory locations used
   by X and combining them the same way X does.
   You could also stabilize the reference to this address
   by copying the address to a register with copy_to_reg;
   but then you wouldn't get indexed addressing in the reference.  */

rtx
copy_all_regs (x)
459
     rtx x;
Richard Kenner committed
460 461 462
{
  if (GET_CODE (x) == REG)
    {
463 464 465 466 467
      if (REGNO (x) != FRAME_POINTER_REGNUM
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
	  && REGNO (x) != HARD_FRAME_POINTER_REGNUM
#endif
	  )
Richard Kenner committed
468 469 470 471 472 473 474
	x = copy_to_reg (x);
    }
  else if (GET_CODE (x) == MEM)
    x = copy_to_reg (x);
  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
	   || GET_CODE (x) == MULT)
    {
475 476
      rtx op0 = copy_all_regs (XEXP (x, 0));
      rtx op1 = copy_all_regs (XEXP (x, 1));
Richard Kenner committed
477
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
478
	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
Richard Kenner committed
479 480 481 482 483 484 485 486 487 488 489
    }
  return x;
}

/* Return something equivalent to X but valid as a memory address
   for something of mode MODE.  When X is not itself valid, this
   works by copying X or subexpressions of it into registers.  */

rtx
memory_address (mode, x)
     enum machine_mode mode;
490
     rtx x;
Richard Kenner committed
491
{
492
  rtx oldx = x;
Richard Kenner committed
493

494 495 496
  if (GET_CODE (x) == ADDRESSOF)
    return x;

497
#ifdef POINTERS_EXTEND_UNSIGNED
498
  if (GET_MODE (x) != Pmode)
499
    x = convert_memory_address (Pmode, x);
500 501
#endif

Richard Kenner committed
502 503
  /* By passing constant addresses thru registers
     we get a chance to cse them.  */
504
  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
505
    x = force_reg (Pmode, x);
Richard Kenner committed
506 507 508 509 510

  /* Accept a QUEUED that refers to a REG
     even though that isn't a valid address.
     On attempting to put this in an insn we will call protect_from_queue
     which will turn it into a REG, which is valid.  */
511
  else if (GET_CODE (x) == QUEUED
Richard Kenner committed
512
      && GET_CODE (QUEUED_VAR (x)) == REG)
513
    ;
Richard Kenner committed
514 515 516 517 518

  /* We get better cse by rejecting indirect addressing at this stage.
     Let the combiner create indirect addresses where appropriate.
     For now, generate the code so that the subexpressions useful to share
     are visible.  But not if cse won't be done!  */
519
  else
Richard Kenner committed
520
    {
521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555
      if (! cse_not_expected && GET_CODE (x) != REG)
	x = break_out_memory_refs (x);

      /* At this point, any valid address is accepted.  */
      GO_IF_LEGITIMATE_ADDRESS (mode, x, win);

      /* If it was valid before but breaking out memory refs invalidated it,
	 use it the old way.  */
      if (memory_address_p (mode, oldx))
	goto win2;

      /* Perform machine-dependent transformations on X
	 in certain cases.  This is not necessary since the code
	 below can handle all possible cases, but machine-dependent
	 transformations can make better code.  */
      LEGITIMIZE_ADDRESS (x, oldx, mode, win);

      /* PLUS and MULT can appear in special ways
	 as the result of attempts to make an address usable for indexing.
	 Usually they are dealt with by calling force_operand, below.
	 But a sum containing constant terms is special
	 if removing them makes the sum a valid address:
	 then we generate that address in a register
	 and index off of it.  We do this because it often makes
	 shorter code, and because the addresses thus generated
	 in registers often become common subexpressions.  */
      if (GET_CODE (x) == PLUS)
	{
	  rtx constant_term = const0_rtx;
	  rtx y = eliminate_constant_term (x, &constant_term);
	  if (constant_term == const0_rtx
	      || ! memory_address_p (mode, y))
	    x = force_operand (x, NULL_RTX);
	  else
	    {
556
	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
557 558 559 560 561 562
	      if (! memory_address_p (mode, y))
		x = force_operand (x, NULL_RTX);
	      else
		x = y;
	    }
	}
Richard Kenner committed
563

564
      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
565
	x = force_operand (x, NULL_RTX);
Richard Kenner committed
566

567 568 569 570 571 572 573 574 575 576 577
      /* If we have a register that's an invalid address,
	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
      else if (GET_CODE (x) == REG)
	x = copy_to_reg (x);

      /* Last resort: copy the value to a register, since
	 the register is a valid address.  */
      else
	x = force_reg (Pmode, x);

      goto done;
Richard Kenner committed
578

579 580 581 582 583 584 585 586 587 588 589 590 591 592
    win2:
      x = oldx;
    win:
      if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
	  /* Don't copy an addr via a reg if it is one of our stack slots.  */
	  && ! (GET_CODE (x) == PLUS
		&& (XEXP (x, 0) == virtual_stack_vars_rtx
		    || XEXP (x, 0) == virtual_incoming_args_rtx)))
	{
	  if (general_operand (x, Pmode))
	    x = force_reg (Pmode, x);
	  else
	    x = force_operand (x, NULL_RTX);
	}
Richard Kenner committed
593
    }
594 595 596

 done:

597 598 599 600 601
  /* If we didn't change the address, we are done.  Otherwise, mark
     a reg as a pointer if we have REG or REG + CONST_INT.  */
  if (oldx == x)
    return x;
  else if (GET_CODE (x) == REG)
602
    mark_reg_pointer (x, BITS_PER_UNIT);
603 604 605
  else if (GET_CODE (x) == PLUS
	   && GET_CODE (XEXP (x, 0)) == REG
	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
606
    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
607

608 609 610 611
  /* OLDX may have been the address on a temporary.  Update the address
     to indicate that X is now used.  */
  update_temp_slot_address (oldx, x);

Richard Kenner committed
612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639
  return x;
}

/* Like `memory_address' but pretend `flag_force_addr' is 0.  */

rtx
memory_address_noforce (mode, x)
     enum machine_mode mode;
     rtx x;
{
  int ambient_force_addr = flag_force_addr;
  rtx val;

  flag_force_addr = 0;
  val = memory_address (mode, x);
  flag_force_addr = ambient_force_addr;
  return val;
}

/* Convert a mem ref into one with a valid memory address.
   Pass through anything else unchanged.  */

rtx
validize_mem (ref)
     rtx ref;
{
  if (GET_CODE (ref) != MEM)
    return ref;
640 641
  if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
      && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
Richard Kenner committed
642
    return ref;
643

Richard Kenner committed
644
  /* Don't alter REF itself, since that is probably a stack slot.  */
645
  return replace_equiv_address (ref, XEXP (ref, 0));
Richard Kenner committed
646 647
}

648 649 650 651 652 653 654 655 656 657 658 659 660 661 662
/* Given REF, either a MEM or a REG, and T, either the type of X or
   the expression corresponding to REF, set RTX_UNCHANGING_P if
   appropriate.  */

void
maybe_set_unchanging (ref, t)
     rtx ref;
     tree t;
{
  /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
     initialization is only executed once, or whose initializer always
     has the same value.  Currently we simplify this to PARM_DECLs in the
     first case, and decls with TREE_CONSTANT initializers in the second.  */
  if ((TREE_READONLY (t) && DECL_P (t)
       && (TREE_CODE (t) == PARM_DECL
663
	   || (DECL_INITIAL (t) && TREE_CONSTANT (DECL_INITIAL (t)))))
664 665 666
      || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
    RTX_UNCHANGING_P (ref) = 1;
}
667

Richard Kenner committed
668 669 670 671 672 673 674 675 676
/* Return a modified copy of X with its memory address copied
   into a temporary register to protect it from side effects.
   If X is not a MEM, it is returned unchanged (and not copied).
   Perhaps even if it is a MEM, if there is no need to change it.  */

rtx
stabilize (x)
     rtx x;
{
677

Richard Kenner committed
678 679
  if (GET_CODE (x) != MEM
      || ! rtx_unstable_p (XEXP (x, 0)))
Richard Kenner committed
680
    return x;
681

Richard Kenner committed
682 683
  return
    replace_equiv_address (x, force_reg (Pmode, copy_all_regs (XEXP (x, 0))));
Richard Kenner committed
684 685 686 687 688 689 690 691
}

/* Copy the value or contents of X to a new temp reg and return that reg.  */

rtx
copy_to_reg (x)
     rtx x;
{
692
  rtx temp = gen_reg_rtx (GET_MODE (x));
693

Richard Kenner committed
694
  /* If not an operand, must be an address with PLUS and MULT so
695
     do the computation.  */
Richard Kenner committed
696 697
  if (! general_operand (x, VOIDmode))
    x = force_operand (x, temp);
698

Richard Kenner committed
699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722
  if (x != temp)
    emit_move_insn (temp, x);

  return temp;
}

/* Like copy_to_reg but always give the new register mode Pmode
   in case X is a constant.  */

rtx
copy_addr_to_reg (x)
     rtx x;
{
  return copy_to_mode_reg (Pmode, x);
}

/* Like copy_to_reg but always give the new register mode MODE
   in case X is a constant.  */

rtx
copy_to_mode_reg (mode, x)
     enum machine_mode mode;
     rtx x;
{
723
  rtx temp = gen_reg_rtx (mode);
724

Richard Kenner committed
725
  /* If not an operand, must be an address with PLUS and MULT so
726
     do the computation.  */
Richard Kenner committed
727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749
  if (! general_operand (x, VOIDmode))
    x = force_operand (x, temp);

  if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
    abort ();
  if (x != temp)
    emit_move_insn (temp, x);
  return temp;
}

/* Load X into a register if it is not already one.
   Use mode MODE for the register.
   X should be valid for mode MODE, but it may be a constant which
   is valid for all integer modes; that's why caller must specify MODE.

   The caller must not alter the value in the register we return,
   since we mark it as a "constant" register.  */

rtx
force_reg (mode, x)
     enum machine_mode mode;
     rtx x;
{
750
  rtx temp, insn, set;
Richard Kenner committed
751 752 753

  if (GET_CODE (x) == REG)
    return x;
754

755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771
  if (general_operand (x, mode))
    {
      temp = gen_reg_rtx (mode);
      insn = emit_move_insn (temp, x);
    }
  else
    {
      temp = force_operand (x, NULL_RTX);
      if (GET_CODE (temp) == REG)
	insn = get_last_insn ();
      else
	{
	  rtx temp2 = gen_reg_rtx (mode);
	  insn = emit_move_insn (temp2, temp);
	  temp = temp2;
	}
    }
772

Richard Kenner committed
773
  /* Let optimizers know that TEMP's value never changes
774 775 776 777
     and that X can be substituted for it.  Don't get confused
     if INSN set something else (such as a SUBREG of TEMP).  */
  if (CONSTANT_P (x)
      && (set = single_set (insn)) != 0
778 779
      && SET_DEST (set) == temp
      && ! rtx_equal_p (x, SET_SRC (set)))
780
    set_unique_reg_note (insn, REG_EQUAL, x);
781

Richard Kenner committed
782 783 784 785 786 787 788 789 790 791
  return temp;
}

/* If X is a memory ref, copy its contents to a new temp reg and return
   that reg.  Otherwise, return X.  */

rtx
force_not_mem (x)
     rtx x;
{
792
  rtx temp;
Aldy Hernandez committed
793

Richard Kenner committed
794 795
  if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
    return x;
Aldy Hernandez committed
796

Richard Kenner committed
797 798 799 800 801 802 803 804 805 806 807 808 809 810
  temp = gen_reg_rtx (GET_MODE (x));
  emit_move_insn (temp, x);
  return temp;
}

/* Copy X to TARGET (if it's nonzero and a reg)
   or to a new temp reg and return that reg.
   MODE is the mode to use for X in case it is a constant.  */

rtx
copy_to_suggested_reg (x, target, mode)
     rtx x, target;
     enum machine_mode mode;
{
811
  rtx temp;
Richard Kenner committed
812 813 814 815 816 817 818 819 820 821

  if (target && GET_CODE (target) == REG)
    temp = target;
  else
    temp = gen_reg_rtx (mode);

  emit_move_insn (temp, x);
  return temp;
}

822 823 824 825
/* Return the mode to use to store a scalar of TYPE and MODE.
   PUNSIGNEDP points to the signedness of the type and may be adjusted
   to show what signedness to use on extension operations.

826
   FOR_CALL is nonzero if this call is promoting args for a call.  */
827 828 829 830 831 832

enum machine_mode
promote_mode (type, mode, punsignedp, for_call)
     tree type;
     enum machine_mode mode;
     int *punsignedp;
Kaveh R. Ghazi committed
833
     int for_call ATTRIBUTE_UNUSED;
834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851
{
  enum tree_code code = TREE_CODE (type);
  int unsignedp = *punsignedp;

#ifdef PROMOTE_FOR_CALL_ONLY
  if (! for_call)
    return mode;
#endif

  switch (code)
    {
#ifdef PROMOTE_MODE
    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
    case CHAR_TYPE:      case REAL_TYPE:       case OFFSET_TYPE:
      PROMOTE_MODE (mode, unsignedp, type);
      break;
#endif

852
#ifdef POINTERS_EXTEND_UNSIGNED
853
    case REFERENCE_TYPE:
854
    case POINTER_TYPE:
855 856
      mode = Pmode;
      unsignedp = POINTERS_EXTEND_UNSIGNED;
857
      break;
858
#endif
859

860 861
    default:
      break;
862 863 864 865 866 867
    }

  *punsignedp = unsignedp;
  return mode;
}

Richard Kenner committed
868 869 870 871 872 873 874 875 876 877 878 879 880
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
   This pops when ADJUST is positive.  ADJUST need not be constant.  */

void
adjust_stack (adjust)
     rtx adjust;
{
  rtx temp;
  adjust = protect_from_queue (adjust, 0);

  if (adjust == const0_rtx)
    return;

881 882 883 884 885
  /* We expect all variable sized adjustments to be multiple of
     PREFERRED_STACK_BOUNDARY.  */
  if (GET_CODE (adjust) == CONST_INT)
    stack_pointer_delta -= INTVAL (adjust);

Richard Kenner committed
886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911
  temp = expand_binop (Pmode,
#ifdef STACK_GROWS_DOWNWARD
		       add_optab,
#else
		       sub_optab,
#endif
		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
		       OPTAB_LIB_WIDEN);

  if (temp != stack_pointer_rtx)
    emit_move_insn (stack_pointer_rtx, temp);
}

/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
   This pushes when ADJUST is positive.  ADJUST need not be constant.  */

void
anti_adjust_stack (adjust)
     rtx adjust;
{
  rtx temp;
  adjust = protect_from_queue (adjust, 0);

  if (adjust == const0_rtx)
    return;

912 913 914 915 916
  /* We expect all variable sized adjustments to be multiple of
     PREFERRED_STACK_BOUNDARY.  */
  if (GET_CODE (adjust) == CONST_INT)
    stack_pointer_delta += INTVAL (adjust);

Richard Kenner committed
917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936
  temp = expand_binop (Pmode,
#ifdef STACK_GROWS_DOWNWARD
		       sub_optab,
#else
		       add_optab,
#endif
		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
		       OPTAB_LIB_WIDEN);

  if (temp != stack_pointer_rtx)
    emit_move_insn (stack_pointer_rtx, temp);
}

/* Round the size of a block to be pushed up to the boundary required
   by this machine.  SIZE is the desired size, which need not be constant.  */

rtx
round_push (size)
     rtx size;
{
937
  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
Richard Kenner committed
938 939 940 941 942 943
  if (align == 1)
    return size;
  if (GET_CODE (size) == CONST_INT)
    {
      int new = (INTVAL (size) + align - 1) / align * align;
      if (INTVAL (size) != new)
Charles Hannum committed
944
	size = GEN_INT (new);
Richard Kenner committed
945 946 947
    }
  else
    {
948
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
Mike Stump committed
949 950
	 but we know it can't.  So add ourselves and then do
	 TRUNC_DIV_EXPR.  */
951 952 953
      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
Charles Hannum committed
954 955
			    NULL_RTX, 1);
      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
Richard Kenner committed
956 957 958 959
    }
  return size;
}

960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975
/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
   to a previously-created save area.  If no save area has been allocated,
   this function will allocate one.  If a save area is specified, it
   must be of the proper mode.

   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
   are emitted at the current position.  */

void
emit_stack_save (save_level, psave, after)
     enum save_level save_level;
     rtx *psave;
     rtx after;
{
  rtx sa = *psave;
  /* The default is that we use a move insn and save in a Pmode object.  */
976
  rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
977
  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
978 979 980 981 982 983 984

  /* See if this machine has anything special to do for this kind of save.  */
  switch (save_level)
    {
#ifdef HAVE_save_stack_block
    case SAVE_BLOCK:
      if (HAVE_save_stack_block)
985
	fcn = gen_save_stack_block;
986 987 988 989 990
      break;
#endif
#ifdef HAVE_save_stack_function
    case SAVE_FUNCTION:
      if (HAVE_save_stack_function)
991
	fcn = gen_save_stack_function;
992 993 994 995 996
      break;
#endif
#ifdef HAVE_save_stack_nonlocal
    case SAVE_NONLOCAL:
      if (HAVE_save_stack_nonlocal)
997
	fcn = gen_save_stack_nonlocal;
998 999
      break;
#endif
1000 1001
    default:
      break;
1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023
    }

  /* If there is no save area and we have to allocate one, do so.  Otherwise
     verify the save area is the proper mode.  */

  if (sa == 0)
    {
      if (mode != VOIDmode)
	{
	  if (save_level == SAVE_NONLOCAL)
	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
	  else
	    *psave = sa = gen_reg_rtx (mode);
	}
    }
  else
    {
      if (mode == VOIDmode || GET_MODE (sa) != mode)
	abort ();
    }

  if (after)
1024 1025 1026 1027
    {
      rtx seq;

      start_sequence ();
1028 1029 1030 1031
      /* We must validize inside the sequence, to ensure that any instructions
	 created by the validize call also get moved to the right place.  */
      if (sa != 0)
	sa = validize_mem (sa);
1032
      emit_insn (fcn (sa, stack_pointer_rtx));
1033
      seq = get_insns ();
1034 1035 1036
      end_sequence ();
      emit_insn_after (seq, after);
    }
1037
  else
1038 1039 1040 1041 1042
    {
      if (sa != 0)
	sa = validize_mem (sa);
      emit_insn (fcn (sa, stack_pointer_rtx));
    }
1043 1044 1045
}

/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
1046
   area made by emit_stack_save.  If it is zero, we have nothing to do.
1047

1048
   Put any emitted insns after insn AFTER, if nonzero, otherwise at
1049 1050 1051 1052 1053 1054 1055 1056 1057
   current position.  */

void
emit_stack_restore (save_level, sa, after)
     enum save_level save_level;
     rtx after;
     rtx sa;
{
  /* The default is that we use a move insn.  */
1058
  rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080

  /* See if this machine has anything special to do for this kind of save.  */
  switch (save_level)
    {
#ifdef HAVE_restore_stack_block
    case SAVE_BLOCK:
      if (HAVE_restore_stack_block)
	fcn = gen_restore_stack_block;
      break;
#endif
#ifdef HAVE_restore_stack_function
    case SAVE_FUNCTION:
      if (HAVE_restore_stack_function)
	fcn = gen_restore_stack_function;
      break;
#endif
#ifdef HAVE_restore_stack_nonlocal
    case SAVE_NONLOCAL:
      if (HAVE_restore_stack_nonlocal)
	fcn = gen_restore_stack_nonlocal;
      break;
#endif
1081 1082
    default:
      break;
1083 1084
    }

1085
  if (sa != 0)
1086 1087 1088 1089
    {
      sa = validize_mem (sa);
      /* These clobbers prevent the scheduler from moving
	 references to variable arrays below the code
1090
	 that deletes (pops) the arrays.  */
1091 1092 1093 1094 1095 1096
      emit_insn (gen_rtx_CLOBBER (VOIDmode,
		    gen_rtx_MEM (BLKmode, 
			gen_rtx_SCRATCH (VOIDmode))));
      emit_insn (gen_rtx_CLOBBER (VOIDmode,
		    gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
    }
1097

1098
  if (after)
1099 1100 1101 1102
    {
      rtx seq;

      start_sequence ();
1103
      emit_insn (fcn (stack_pointer_rtx, sa));
1104
      seq = get_insns ();
1105 1106 1107
      end_sequence ();
      emit_insn_after (seq, after);
    }
1108
  else
1109
    emit_insn (fcn (stack_pointer_rtx, sa));
1110 1111
}

1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141
#ifdef SETJMP_VIA_SAVE_AREA
/* Optimize RTL generated by allocate_dynamic_stack_space for targets
   where SETJMP_VIA_SAVE_AREA is true.  The problem is that on these
   platforms, the dynamic stack space used can corrupt the original
   frame, thus causing a crash if a longjmp unwinds to it.  */

void
optimize_save_area_alloca (insns)
     rtx insns;
{
  rtx insn;

  for (insn = insns; insn; insn = NEXT_INSN(insn))
    {
      rtx note;

      if (GET_CODE (insn) != INSN)
	continue;

      for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
	{
	  if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
	    continue;

	  if (!current_function_calls_setjmp)
	    {
	      rtx pat = PATTERN (insn);

	      /* If we do not see the note in a pattern matching
		 these precise characteristics, we did something
1142
		 entirely wrong in allocate_dynamic_stack_space.
1143

Jeff Law committed
1144
		 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178
		 was defined on a machine where stacks grow towards higher
		 addresses.

		 Right now only supported port with stack that grow upward
		 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA.  */
	      if (GET_CODE (pat) != SET
		  || SET_DEST (pat) != stack_pointer_rtx
		  || GET_CODE (SET_SRC (pat)) != MINUS
		  || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
		abort ();

	      /* This will now be transformed into a (set REG REG)
		 so we can just blow away all the other notes.  */
	      XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
	      REG_NOTES (insn) = NULL_RTX;
	    }
	  else
	    {
	      /* setjmp was called, we must remove the REG_SAVE_AREA
		 note so that later passes do not get confused by its
		 presence.  */
	      if (note == REG_NOTES (insn))
		{
		  REG_NOTES (insn) = XEXP (note, 1);
		}
	      else
		{
		  rtx srch;

		  for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
		    if (XEXP (srch, 1) == note)
		      break;

		  if (srch == NULL_RTX)
Kazu Hirata committed
1179
		    abort ();
1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191

		  XEXP (srch, 1) = XEXP (note, 1);
		}
	    }
	  /* Once we've seen the note of interest, we need not look at
	     the rest of them.  */
	  break;
	}
    }
}
#endif /* SETJMP_VIA_SAVE_AREA */

Richard Kenner committed
1192 1193 1194 1195 1196 1197 1198
/* Return an rtx representing the address of an area of memory dynamically
   pushed on the stack.  This region of memory is always aligned to
   a multiple of BIGGEST_ALIGNMENT.

   Any required stack pointer alignment is preserved.

   SIZE is an rtx representing the size of the area.
1199 1200 1201
   TARGET is a place in which the address can be placed.

   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
Richard Kenner committed
1202 1203

rtx
1204
allocate_dynamic_stack_space (size, target, known_align)
Richard Kenner committed
1205 1206
     rtx size;
     rtx target;
1207
     int known_align;
Richard Kenner committed
1208
{
1209 1210 1211 1212
#ifdef SETJMP_VIA_SAVE_AREA
  rtx setjmpless_size = NULL_RTX;
#endif

1213
  /* If we're asking for zero bytes, it doesn't matter what we point
Richard Kenner committed
1214
     to since we can't dereference it.  But return a reasonable
1215 1216 1217 1218 1219 1220 1221
     address anyway.  */
  if (size == const0_rtx)
    return virtual_stack_dynamic_rtx;

  /* Otherwise, show we're calling alloca or equivalent.  */
  current_function_calls_alloca = 1;

Richard Kenner committed
1222 1223 1224 1225
  /* Ensure the size is in the proper mode.  */
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    size = convert_to_mode (Pmode, size, 1);

1226 1227 1228 1229 1230
  /* We can't attempt to minimize alignment necessary, because we don't
     know the final value of preferred_stack_boundary yet while executing
     this code.  */
  cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;

Richard Kenner committed
1231 1232
  /* We will need to ensure that the address we return is aligned to
     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1233
     always know its final value at this point in the compilation (it
Richard Kenner committed
1234 1235
     might depend on the size of the outgoing parameter lists, for
     example), so we must align the value to be returned in that case.
1236
     (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
Richard Kenner committed
1237 1238 1239 1240 1241 1242 1243
     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
     We must also do an alignment operation on the returned value if
     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.

     If we have to align, we must leave space in SIZE for the hole
     that might result from the alignment operation.  */

1244
#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1245 1246
#define MUST_ALIGN 1
#else
1247
#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
Richard Kenner committed
1248 1249
#endif

1250
  if (MUST_ALIGN)
1251
    size
1252
      = force_operand (plus_constant (size,
1253 1254
				      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
		       NULL_RTX);
1255

Richard Kenner committed
1256 1257 1258 1259 1260 1261 1262 1263 1264
#ifdef SETJMP_VIA_SAVE_AREA
  /* If setjmp restores regs from a save area in the stack frame,
     avoid clobbering the reg save area.  Note that the offset of
     virtual_incoming_args_rtx includes the preallocated stack args space.
     It would be no problem to clobber that, but it's on the wrong side
     of the old save area.  */
  {
    rtx dynamic_offset
      = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
Charles Hannum committed
1265
		      stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1266 1267 1268

    if (!current_function_calls_setjmp)
      {
1269
	int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1270 1271 1272 1273

	/* See optimize_save_area_alloca to understand what is being
	   set up here.  */

1274 1275 1276 1277
	/* ??? Code below assumes that the save area needs maximal
	   alignment.  This constraint may be too strong.  */
	if (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
	  abort ();
1278 1279 1280

	if (GET_CODE (size) == CONST_INT)
	  {
1281
	    HOST_WIDE_INT new = INTVAL (size) / align * align;
1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299

	    if (INTVAL (size) != new)
	      setjmpless_size = GEN_INT (new);
	    else
	      setjmpless_size = size;
	  }
	else
	  {
	    /* Since we know overflow is not possible, we avoid using
	       CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
	    setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
					     GEN_INT (align), NULL_RTX, 1);
	    setjmpless_size = expand_mult (Pmode, setjmpless_size,
					   GEN_INT (align), NULL_RTX, 1);
	  }
	/* Our optimization works based upon being able to perform a simple
	   transformation of this RTL into a (set REG REG) so make sure things
	   did in fact end up in a REG.  */
1300
	if (!register_operand (setjmpless_size, Pmode))
1301 1302 1303
	  setjmpless_size = force_reg (Pmode, setjmpless_size);
      }

Richard Kenner committed
1304
    size = expand_binop (Pmode, add_optab, size, dynamic_offset,
Charles Hannum committed
1305
			 NULL_RTX, 1, OPTAB_LIB_WIDEN);
Richard Kenner committed
1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322
  }
#endif /* SETJMP_VIA_SAVE_AREA */

  /* Round the size to a multiple of the required stack alignment.
     Since the stack if presumed to be rounded before this allocation,
     this will maintain the required alignment.

     If the stack grows downward, we could save an insn by subtracting
     SIZE from the stack pointer and then aligning the stack pointer.
     The problem with this is that the stack pointer may be unaligned
     between the execution of the subtraction and alignment insns and
     some machines do not allow this.  Even on those that do, some
     signal handlers malfunction if a signal should occur between those
     insns.  Since this is an extremely rare event, we have no reliable
     way of knowing which systems have this problem.  So we avoid even
     momentarily mis-aligning the stack.  */

1323 1324
  /* If we added a variable amount to SIZE,
     we can no longer assume it is aligned.  */
1325
#if !defined (SETJMP_VIA_SAVE_AREA)
1326
  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1327
#endif
1328
    size = round_push (size);
Richard Kenner committed
1329 1330 1331

  do_pending_stack_adjust ();

1332
 /* We ought to be called always on the toplevel and stack ought to be aligned
1333
    properly.  */
1334 1335 1336
  if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
    abort ();

1337 1338 1339 1340 1341
  /* If needed, check that we have the required amount of stack.  Take into
     account what has already been checked.  */
  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
    probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);

1342
  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1343
  if (target == 0 || GET_CODE (target) != REG
1344 1345
      || REGNO (target) < FIRST_PSEUDO_REGISTER
      || GET_MODE (target) != Pmode)
Richard Kenner committed
1346 1347
    target = gen_reg_rtx (Pmode);

1348
  mark_reg_pointer (target, known_align);
1349

Richard Kenner committed
1350 1351
  /* Perform the required allocation from the stack.  Some systems do
     this differently than simply incrementing/decrementing from the
1352
     stack pointer, such as acquiring the space by calling malloc().  */
Richard Kenner committed
1353 1354 1355
#ifdef HAVE_allocate_stack
  if (HAVE_allocate_stack)
    {
1356
      enum machine_mode mode = STACK_SIZE_MODE;
1357
      insn_operand_predicate_fn pred;
1358

1359 1360 1361 1362
      /* We don't have to check against the predicate for operand 0 since
	 TARGET is known to be a pseudo of the proper mode, which must
	 be valid for the operand.  For operand 1, convert to the
	 proper mode and validate.  */
Jeff Law committed
1363
      if (mode == VOIDmode)
1364
	mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
Jeff Law committed
1365

1366 1367
      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
      if (pred && ! ((*pred) (size, mode)))
1368
	size = copy_to_mode_reg (mode, size);
Richard Kenner committed
1369

1370
      emit_insn (gen_allocate_stack (target, size));
Richard Kenner committed
1371 1372 1373
    }
  else
#endif
1374
    {
1375 1376 1377
#ifndef STACK_GROWS_DOWNWARD
      emit_move_insn (target, virtual_stack_dynamic_rtx);
#endif
1378 1379 1380 1381 1382 1383 1384

      /* Check stack bounds if necessary.  */
      if (current_function_limit_stack)
	{
	  rtx available;
	  rtx space_available = gen_label_rtx ();
#ifdef STACK_GROWS_DOWNWARD
1385
	  available = expand_binop (Pmode, sub_optab,
1386 1387 1388
				    stack_pointer_rtx, stack_limit_rtx,
				    NULL_RTX, 1, OPTAB_WIDEN);
#else
1389
	  available = expand_binop (Pmode, sub_optab,
1390 1391 1392 1393
				    stack_limit_rtx, stack_pointer_rtx,
				    NULL_RTX, 1, OPTAB_WIDEN);
#endif
	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1394
				   space_available);
1395 1396 1397 1398 1399 1400 1401 1402 1403 1404
#ifdef HAVE_trap
	  if (HAVE_trap)
	    emit_insn (gen_trap ());
	  else
#endif
	    error ("stack limits not supported on this target");
	  emit_barrier ();
	  emit_label (space_available);
	}

1405
      anti_adjust_stack (size);
1406 1407 1408
#ifdef SETJMP_VIA_SAVE_AREA
      if (setjmpless_size != NULL_RTX)
	{
Kazu Hirata committed
1409
	  rtx note_target = get_last_insn ();
1410

1411 1412 1413
	  REG_NOTES (note_target)
	    = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
				 REG_NOTES (note_target));
1414 1415
	}
#endif /* SETJMP_VIA_SAVE_AREA */
1416

Richard Kenner committed
1417
#ifdef STACK_GROWS_DOWNWARD
Richard Henderson committed
1418
      emit_move_insn (target, virtual_stack_dynamic_rtx);
Richard Kenner committed
1419
#endif
1420
    }
Richard Kenner committed
1421

1422
  if (MUST_ALIGN)
1423
    {
1424
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
Mike Stump committed
1425 1426
	 but we know it can't.  So add ourselves and then do
	 TRUNC_DIV_EXPR.  */
1427
      target = expand_binop (Pmode, add_optab, target,
1428 1429 1430
			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
Charles Hannum committed
1431 1432
			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
			      NULL_RTX, 1);
1433
      target = expand_mult (Pmode, target,
Charles Hannum committed
1434 1435
			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
			    NULL_RTX, 1);
1436
    }
1437

Richard Kenner committed
1438 1439 1440 1441 1442 1443 1444
  /* Some systems require a particular insn to refer to the stack
     to make the pages exist.  */
#ifdef HAVE_probe
  if (HAVE_probe)
    emit_insn (gen_probe ());
#endif

1445
  /* Record the new stack level for nonlocal gotos.  */
1446
  if (nonlocal_goto_handler_slots != 0)
1447 1448
    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);

Richard Kenner committed
1449 1450 1451
  return target;
}

1452
/* A front end may want to override GCC's stack checking by providing a
Richard Kenner committed
1453 1454 1455
   run-time routine to call to check the stack, so provide a mechanism for
   calling that routine.  */

1456
static GTY(()) rtx stack_check_libfunc;
Richard Kenner committed
1457 1458 1459 1460 1461 1462 1463 1464

void
set_stack_check_libfunc (libfunc)
     rtx libfunc;
{
  stack_check_libfunc = libfunc;
}

1465 1466 1467 1468 1469 1470
/* Emit one stack probe at ADDRESS, an address within the stack.  */

static void
emit_stack_probe (address)
     rtx address;
{
1471
  rtx memref = gen_rtx_MEM (word_mode, address);
1472 1473 1474 1475 1476 1477 1478 1479 1480

  MEM_VOLATILE_P (memref) = 1;

  if (STACK_CHECK_PROBE_LOAD)
    emit_move_insn (gen_reg_rtx (word_mode), memref);
  else
    emit_move_insn (memref, const0_rtx);
}

1481
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497
   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
   subtract from the stack.  If SIZE is constant, this is done
   with a fixed number of probes.  Otherwise, we must make a loop.  */

#ifdef STACK_GROWS_DOWNWARD
#define STACK_GROW_OP MINUS
#else
#define STACK_GROW_OP PLUS
#endif

void
probe_stack_range (first, size)
     HOST_WIDE_INT first;
     rtx size;
{
1498 1499 1500 1501 1502
  /* First ensure SIZE is Pmode.  */
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    size = convert_to_mode (Pmode, size, 1);

  /* Next see if the front end has set up a function for us to call to
Richard Kenner committed
1503 1504
     check the stack.  */
  if (stack_check_libfunc != 0)
1505 1506
    {
      rtx addr = memory_address (QImode,
1507 1508 1509
				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					         stack_pointer_rtx,
					         plus_constant (size, first)));
1510 1511 1512 1513 1514 1515

#ifdef POINTERS_EXTEND_UNSIGNED
      if (GET_MODE (addr) != ptr_mode)
	addr = convert_memory_address (ptr_mode, addr);
#endif

1516
      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1517 1518
			 ptr_mode);
    }
Richard Kenner committed
1519 1520

  /* Next see if we have an insn to check the stack.  Use it if so.  */
1521
#ifdef HAVE_check_stack
Richard Kenner committed
1522
  else if (HAVE_check_stack)
1523
    {
1524
      insn_operand_predicate_fn pred;
1525
      rtx last_addr
1526 1527 1528
	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					 stack_pointer_rtx,
					 plus_constant (size, first)),
1529
			 NULL_RTX);
1530

1531 1532
      pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
      if (pred && ! ((*pred) (last_addr, Pmode)))
Jeff Law committed
1533
	last_addr = copy_to_mode_reg (Pmode, last_addr);
1534

Jeff Law committed
1535
      emit_insn (gen_check_stack (last_addr));
1536 1537 1538 1539
    }
#endif

  /* If we have to generate explicit probes, see if we have a constant
1540
     small number of them to generate.  If so, that's the easy case.  */
Richard Kenner committed
1541 1542
  else if (GET_CODE (size) == CONST_INT
	   && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1543 1544 1545 1546 1547 1548 1549 1550 1551 1552
    {
      HOST_WIDE_INT offset;

      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
	 for values of N from 1 until it exceeds LAST.  If only one
	 probe is needed, this will not generate any code.  Then probe
	 at LAST.  */
      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
	   offset < INTVAL (size);
	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1553 1554 1555
	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					  stack_pointer_rtx,
					  GEN_INT (offset)));
1556

1557 1558 1559
      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					stack_pointer_rtx,
					plus_constant (size, first)));
1560 1561 1562 1563 1564 1565 1566
    }

  /* In the variable case, do the same as above, but in a loop.  We emit loop
     notes so that loop optimization can be done.  */
  else
    {
      rtx test_addr
1567 1568 1569
	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					 stack_pointer_rtx,
					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1570 1571
			 NULL_RTX);
      rtx last_addr
1572 1573 1574
	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					 stack_pointer_rtx,
					 plus_constant (size, first)),
1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585
			 NULL_RTX);
      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
      rtx loop_lab = gen_label_rtx ();
      rtx test_lab = gen_label_rtx ();
      rtx end_lab = gen_label_rtx ();
      rtx temp;

      if (GET_CODE (test_addr) != REG
	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
	test_addr = force_reg (Pmode, test_addr);

1586
      emit_note (NULL, NOTE_INSN_LOOP_BEG);
1587 1588 1589 1590 1591
      emit_jump (test_lab);

      emit_label (loop_lab);
      emit_stack_probe (test_addr);

1592
      emit_note (NULL, NOTE_INSN_LOOP_CONT);
1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607

#ifdef STACK_GROWS_DOWNWARD
#define CMP_OPCODE GTU
      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
			   1, OPTAB_WIDEN);
#else
#define CMP_OPCODE LTU
      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
			   1, OPTAB_WIDEN);
#endif

      if (temp != test_addr)
	abort ();

      emit_label (test_lab);
1608
      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1609
			       NULL_RTX, Pmode, 1, loop_lab);
1610
      emit_jump (end_lab);
1611
      emit_note (NULL, NOTE_INSN_LOOP_END);
1612 1613 1614 1615 1616 1617
      emit_label (end_lab);

      emit_stack_probe (last_addr);
    }
}

Richard Kenner committed
1618 1619 1620 1621
/* Return an rtx representing the register or memory location
   in which a scalar value of data type VALTYPE
   was returned by a function call to function FUNC.
   FUNC is a FUNCTION_DECL node if the precise function is known,
1622 1623 1624
   otherwise 0.
   OUTGOING is 1 if on a machine with register windows this function
   should return the register in which the function will put its result
1625
   and 0 otherwise.  */
Richard Kenner committed
1626 1627

rtx
1628
hard_function_value (valtype, func, outgoing)
Richard Kenner committed
1629
     tree valtype;
Kaveh R. Ghazi committed
1630
     tree func ATTRIBUTE_UNUSED;
1631
     int outgoing ATTRIBUTE_UNUSED;
Richard Kenner committed
1632
{
1633
  rtx val;
1634

1635 1636 1637 1638 1639 1640
#ifdef FUNCTION_OUTGOING_VALUE
  if (outgoing)
    val = FUNCTION_OUTGOING_VALUE (valtype, func);
  else
#endif
    val = FUNCTION_VALUE (valtype, func);
1641

1642 1643 1644
  if (GET_CODE (val) == REG
      && GET_MODE (val) == BLKmode)
    {
1645
      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1646
      enum machine_mode tmpmode;
1647

1648 1649 1650 1651
      /* int_size_in_bytes can return -1.  We don't need a check here
	 since the value of bytes will be large enough that no mode
	 will match and we will abort later in this function.  */

1652
      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
Kazu Hirata committed
1653 1654 1655 1656 1657 1658 1659
	   tmpmode != VOIDmode;
	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
	{
	  /* Have we found a large enough mode?  */
	  if (GET_MODE_SIZE (tmpmode) >= bytes)
	    break;
	}
1660 1661

      /* No suitable mode found.  */
1662
      if (tmpmode == VOIDmode)
Kazu Hirata committed
1663
	abort ();
1664 1665

      PUT_MODE (val, tmpmode);
1666
    }
1667
  return val;
Richard Kenner committed
1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678
}

/* Return an rtx representing the register or memory location
   in which a scalar value of mode MODE was returned by a library call.  */

rtx
hard_libcall_value (mode)
     enum machine_mode mode;
{
  return LIBCALL_VALUE (mode);
}
1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716

/* Look up the tree code for a given rtx code
   to provide the arithmetic operation for REAL_ARITHMETIC.
   The function returns an int because the caller may not know
   what `enum tree_code' means.  */

int
rtx_to_tree_code (code)
     enum rtx_code code;
{
  enum tree_code tcode;

  switch (code)
    {
    case PLUS:
      tcode = PLUS_EXPR;
      break;
    case MINUS:
      tcode = MINUS_EXPR;
      break;
    case MULT:
      tcode = MULT_EXPR;
      break;
    case DIV:
      tcode = RDIV_EXPR;
      break;
    case SMIN:
      tcode = MIN_EXPR;
      break;
    case SMAX:
      tcode = MAX_EXPR;
      break;
    default:
      tcode = LAST_AND_UNUSED_TREE_CODE;
      break;
    }
  return ((int) tcode);
}
1717 1718

#include "gt-explow.h"