explow.c 42.6 KB
Newer Older
Richard Kenner committed
1
/* Subroutines for manipulating rtx's in semantically interesting ways.
Jeff Law committed
2
   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
21 22 23


#include "config.h"
24
#include "system.h"
25 26
#include "coretypes.h"
#include "tm.h"
27
#include "toplev.h"
Richard Kenner committed
28 29
#include "rtl.h"
#include "tree.h"
30
#include "tm_p.h"
Richard Kenner committed
31
#include "flags.h"
32
#include "except.h"
33
#include "function.h"
Richard Kenner committed
34
#include "expr.h"
35
#include "optabs.h"
Richard Kenner committed
36 37
#include "hard-reg-set.h"
#include "insn-config.h"
38
#include "ggc.h"
Richard Kenner committed
39
#include "recog.h"
40
#include "langhooks.h"
41
#include "target.h"
42
#include "output.h"
Richard Kenner committed
43

44 45
static rtx break_out_memory_refs (rtx);
static void emit_stack_probe (rtx);
46 47 48 49 50


/* Truncate and perhaps sign-extend C as appropriate for MODE.  */

HOST_WIDE_INT
51
trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
52 53 54
{
  int width = GET_MODE_BITSIZE (mode);

55
  /* You want to truncate to a _what_?  */
56
  gcc_assert (SCALAR_INT_MODE_P (mode));
57

58 59 60 61
  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
  if (mode == BImode)
    return c & 1 ? STORE_FLAG_VALUE : 0;

62 63 64 65 66 67 68 69 70 71
  /* Sign-extend for the requested mode.  */

  if (width < HOST_BITS_PER_WIDE_INT)
    {
      HOST_WIDE_INT sign = 1;
      sign <<= width - 1;
      c &= (sign << 1) - 1;
      c ^= sign;
      c -= sign;
    }
72 73 74 75

  return c;
}

76
/* Return an rtx for the sum of X and the integer C.  */
Richard Kenner committed
77 78

rtx
79
plus_constant (rtx x, HOST_WIDE_INT c)
Richard Kenner committed
80
{
81
  RTX_CODE code;
82
  rtx y;
83 84
  enum machine_mode mode;
  rtx tem;
Richard Kenner committed
85 86 87 88 89 90 91 92 93
  int all_constant = 0;

  if (c == 0)
    return x;

 restart:

  code = GET_CODE (x);
  mode = GET_MODE (x);
94 95
  y = x;

Richard Kenner committed
96 97 98
  switch (code)
    {
    case CONST_INT:
Charles Hannum committed
99
      return GEN_INT (INTVAL (x) + c);
Richard Kenner committed
100 101 102

    case CONST_DOUBLE:
      {
103
	unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
Charles Hannum committed
104
	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
105
	unsigned HOST_WIDE_INT l2 = c;
Charles Hannum committed
106
	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
107 108
	unsigned HOST_WIDE_INT lv;
	HOST_WIDE_INT hv;
Richard Kenner committed
109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147

	add_double (l1, h1, l2, h2, &lv, &hv);

	return immed_double_const (lv, hv, VOIDmode);
      }

    case MEM:
      /* If this is a reference to the constant pool, try replacing it with
	 a reference to a new constant.  If the resulting address isn't
	 valid, don't return it because we have no way to validize it.  */
      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
	{
	  tem
	    = force_const_mem (GET_MODE (x),
			       plus_constant (get_pool_constant (XEXP (x, 0)),
					      c));
	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
	    return tem;
	}
      break;

    case CONST:
      /* If adding to something entirely constant, set a flag
	 so that we can add a CONST around the result.  */
      x = XEXP (x, 0);
      all_constant = 1;
      goto restart;

    case SYMBOL_REF:
    case LABEL_REF:
      all_constant = 1;
      break;

    case PLUS:
      /* The interesting case is adding the integer to a sum.
	 Look for constant term in the sum and combine
	 with C.  For an integer constant term, we make a combined
	 integer.  For a constant term that is not an explicit integer,
148
	 we cannot really combine, but group them together anyway.
149

150 151 152 153 154
	 Restart or use a recursive call in case the remaining operand is
	 something that we handle specially, such as a SYMBOL_REF.

	 We may not immediately return from the recursive call here, lest
	 all_constant gets lost.  */
155 156

      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
157 158
	{
	  c += INTVAL (XEXP (x, 1));
159 160 161 162

	  if (GET_MODE (x) != VOIDmode)
	    c = trunc_int_for_mode (c, GET_MODE (x));

163 164 165
	  x = XEXP (x, 0);
	  goto restart;
	}
166
      else if (CONSTANT_P (XEXP (x, 1)))
167
	{
168
	  x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
169 170
	  c = 0;
	}
171
      else if (find_constant_term_loc (&y))
172
	{
173 174 175 176 177 178 179
	  /* We need to be careful since X may be shared and we can't
	     modify it in place.  */
	  rtx copy = copy_rtx (x);
	  rtx *const_loc = find_constant_term_loc (&copy);

	  *const_loc = plus_constant (*const_loc, c);
	  x = copy;
180 181
	  c = 0;
	}
182
      break;
183

184 185
    default:
      break;
Richard Kenner committed
186 187 188
    }

  if (c != 0)
189
    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
Richard Kenner committed
190 191 192 193

  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
    return x;
  else if (all_constant)
194
    return gen_rtx_CONST (mode, x);
Richard Kenner committed
195 196 197 198 199 200 201 202 203 204
  else
    return x;
}

/* If X is a sum, return a new sum like X but lacking any constant terms.
   Add all the removed constant terms into *CONSTPTR.
   X itself is not altered.  The result != X if and only if
   it is not isomorphic to X.  */

rtx
205
eliminate_constant_term (rtx x, rtx *constptr)
Richard Kenner committed
206
{
207
  rtx x0, x1;
Richard Kenner committed
208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
  rtx tem;

  if (GET_CODE (x) != PLUS)
    return x;

  /* First handle constants appearing at this level explicitly.  */
  if (GET_CODE (XEXP (x, 1)) == CONST_INT
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
						XEXP (x, 1)))
      && GET_CODE (tem) == CONST_INT)
    {
      *constptr = tem;
      return eliminate_constant_term (XEXP (x, 0), constptr);
    }

  tem = const0_rtx;
  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
						*constptr, tem))
      && GET_CODE (tem) == CONST_INT)
    {
      *constptr = tem;
232
      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
Richard Kenner committed
233 234 235 236 237 238 239 240
    }

  return x;
}

/* Return an rtx for the size in bytes of the value of EXP.  */

rtx
241
expr_size (tree exp)
Richard Kenner committed
242
{
243 244 245 246 247
  tree size;

  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
    size = TREE_OPERAND (exp, 1);
  else
248 249 250 251 252
    {
      size = lang_hooks.expr_size (exp);
      gcc_assert (size);
      size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp);
    }
253

254
  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
Richard Kenner committed
255
}
256 257 258 259 260

/* Return a wide integer for the size in bytes of the value of EXP, or -1
   if the size can vary or is larger than an integer.  */

HOST_WIDE_INT
261
int_expr_size (tree exp)
262
{
263 264 265 266 267
  tree size;

  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
    size = TREE_OPERAND (exp, 1);
  else
268 269 270 271
    {
      size = lang_hooks.expr_size (exp);
      gcc_assert (size);
    }
272 273

  if (size == 0 || !host_integerp (size, 0))
274 275
    return -1;

276
  return tree_low_cst (size, 0);
277
}
Richard Kenner committed
278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295

/* Return a copy of X in which all memory references
   and all constants that involve symbol refs
   have been replaced with new temporary registers.
   Also emit code to load the memory locations and constants
   into those registers.

   If X contains no such constants or memory references,
   X itself (not a copy) is returned.

   If a constant is found in the address that is not a legitimate constant
   in an insn, it is left alone in the hope that it might be valid in the
   address.

   X may contain no arithmetic except addition, subtraction and multiplication.
   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */

static rtx
296
break_out_memory_refs (rtx x)
Richard Kenner committed
297
{
298
  if (MEM_P (x)
299
      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
Richard Kenner committed
300
	  && GET_MODE (x) != VOIDmode))
301
    x = force_reg (GET_MODE (x), x);
Richard Kenner committed
302 303 304
  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
	   || GET_CODE (x) == MULT)
    {
305 306
      rtx op0 = break_out_memory_refs (XEXP (x, 0));
      rtx op1 = break_out_memory_refs (XEXP (x, 1));
307

Richard Kenner committed
308
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
Peter Bergner committed
309
	x = simplify_gen_binary (GET_CODE (x), Pmode, op0, op1);
Richard Kenner committed
310
    }
311

Richard Kenner committed
312 313 314
  return x;
}

315
/* Given X, a memory address in ptr_mode, convert it to an address
316 317 318 319
   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
   the fact that pointers are not allowed to overflow by commuting arithmetic
   operations over conversions so that address arithmetic insns can be
   used.  */
320

321
rtx
322 323
convert_memory_address (enum machine_mode to_mode ATTRIBUTE_UNUSED, 
			rtx x)
324
{
325
#ifndef POINTERS_EXTEND_UNSIGNED
326
  gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
327 328 329
  return x;
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
  enum machine_mode from_mode;
330
  rtx temp;
331
  enum rtx_code code;
332

333 334 335 336 337 338
  /* If X already has the right mode, just return it.  */
  if (GET_MODE (x) == to_mode)
    return x;

  from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;

339 340
  /* Here we handle some special cases.  If none of them apply, fall through
     to the default case.  */
341 342 343 344
  switch (GET_CODE (x))
    {
    case CONST_INT:
    case CONST_DOUBLE:
345 346 347 348 349 350 351 352 353 354 355 356
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
	code = TRUNCATE;
      else if (POINTERS_EXTEND_UNSIGNED < 0)
	break;
      else if (POINTERS_EXTEND_UNSIGNED > 0)
	code = ZERO_EXTEND;
      else
	code = SIGN_EXTEND;
      temp = simplify_unary_operation (code, to_mode, x, from_mode);
      if (temp)
	return temp;
      break;
357

358
    case SUBREG:
359
      if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
360
	  && GET_MODE (SUBREG_REG (x)) == to_mode)
361 362 363
	return SUBREG_REG (x);
      break;

364
    case LABEL_REF:
365 366 367
      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
      return temp;
368
      break;
369

370
    case SYMBOL_REF:
371 372
      temp = shallow_copy_rtx (x);
      PUT_MODE (temp, to_mode);
373
      return temp;
374
      break;
375

376
    case CONST:
377 378
      return gen_rtx_CONST (to_mode,
			    convert_memory_address (to_mode, XEXP (x, 0)));
379
      break;
380

381 382
    case PLUS:
    case MULT:
383 384
      /* For addition we can safely permute the conversion and addition
	 operation if one operand is a constant and converting the constant
385 386 387 388
	 does not change it or if one operand is a constant and we are
	 using a ptr_extend instruction  (POINTERS_EXTEND_UNSIGNED < 0).
	 We can always safely permute them if we are making the address
	 narrower.  */
389 390 391
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
	  || (GET_CODE (x) == PLUS
	      && GET_CODE (XEXP (x, 1)) == CONST_INT
392 393
	      && (XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))
                 || POINTERS_EXTEND_UNSIGNED < 0)))
394
	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
395
			       convert_memory_address (to_mode, XEXP (x, 0)),
396
			       XEXP (x, 1));
397
      break;
398

399 400
    default:
      break;
401
    }
402 403 404

  return convert_modes (to_mode, from_mode,
			x, POINTERS_EXTEND_UNSIGNED);
405
#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
406
}
Richard Kenner committed
407 408 409 410 411 412

/* Return something equivalent to X but valid as a memory address
   for something of mode MODE.  When X is not itself valid, this
   works by copying X or subexpressions of it into registers.  */

rtx
413
memory_address (enum machine_mode mode, rtx x)
Richard Kenner committed
414
{
415
  rtx oldx = x;
Richard Kenner committed
416

417
  x = convert_memory_address (Pmode, x);
418

419
  /* By passing constant addresses through registers
Richard Kenner committed
420
     we get a chance to cse them.  */
421
  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
422
    x = force_reg (Pmode, x);
Richard Kenner committed
423 424 425 426 427

  /* We get better cse by rejecting indirect addressing at this stage.
     Let the combiner create indirect addresses where appropriate.
     For now, generate the code so that the subexpressions useful to share
     are visible.  But not if cse won't be done!  */
428
  else
Richard Kenner committed
429
    {
430
      if (! cse_not_expected && !REG_P (x))
431 432 433
	x = break_out_memory_refs (x);

      /* At this point, any valid address is accepted.  */
434
      if (memory_address_p (mode, x))
435
	goto done;
436 437 438 439

      /* If it was valid before but breaking out memory refs invalidated it,
	 use it the old way.  */
      if (memory_address_p (mode, oldx))
440 441 442 443
	{
	  x = oldx;
	  goto done;
	}
444 445 446 447 448

      /* Perform machine-dependent transformations on X
	 in certain cases.  This is not necessary since the code
	 below can handle all possible cases, but machine-dependent
	 transformations can make better code.  */
449
      LEGITIMIZE_ADDRESS (x, oldx, mode, done);
450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468

      /* PLUS and MULT can appear in special ways
	 as the result of attempts to make an address usable for indexing.
	 Usually they are dealt with by calling force_operand, below.
	 But a sum containing constant terms is special
	 if removing them makes the sum a valid address:
	 then we generate that address in a register
	 and index off of it.  We do this because it often makes
	 shorter code, and because the addresses thus generated
	 in registers often become common subexpressions.  */
      if (GET_CODE (x) == PLUS)
	{
	  rtx constant_term = const0_rtx;
	  rtx y = eliminate_constant_term (x, &constant_term);
	  if (constant_term == const0_rtx
	      || ! memory_address_p (mode, y))
	    x = force_operand (x, NULL_RTX);
	  else
	    {
469
	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
470 471 472 473 474 475
	      if (! memory_address_p (mode, y))
		x = force_operand (x, NULL_RTX);
	      else
		x = y;
	    }
	}
Richard Kenner committed
476

477
      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
478
	x = force_operand (x, NULL_RTX);
Richard Kenner committed
479

480 481
      /* If we have a register that's an invalid address,
	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
482
      else if (REG_P (x))
483 484 485 486 487 488
	x = copy_to_reg (x);

      /* Last resort: copy the value to a register, since
	 the register is a valid address.  */
      else
	x = force_reg (Pmode, x);
Richard Kenner committed
489
    }
490 491 492

 done:

493
  gcc_assert (memory_address_p (mode, x));
494 495 496 497
  /* If we didn't change the address, we are done.  Otherwise, mark
     a reg as a pointer if we have REG or REG + CONST_INT.  */
  if (oldx == x)
    return x;
498
  else if (REG_P (x))
499
    mark_reg_pointer (x, BITS_PER_UNIT);
500
  else if (GET_CODE (x) == PLUS
501
	   && REG_P (XEXP (x, 0))
502
	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
503
    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
504

505 506 507 508
  /* OLDX may have been the address on a temporary.  Update the address
     to indicate that X is now used.  */
  update_temp_slot_address (oldx, x);

Richard Kenner committed
509 510 511 512 513 514 515
  return x;
}

/* Convert a mem ref into one with a valid memory address.
   Pass through anything else unchanged.  */

rtx
516
validize_mem (rtx ref)
Richard Kenner committed
517
{
518
  if (!MEM_P (ref))
Richard Kenner committed
519
    return ref;
520
  ref = use_anchored_address (ref);
521
  if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
Richard Kenner committed
522
    return ref;
523

Richard Kenner committed
524
  /* Don't alter REF itself, since that is probably a stack slot.  */
525
  return replace_equiv_address (ref, XEXP (ref, 0));
Richard Kenner committed
526
}
527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556

/* If X is a memory reference to a member of an object block, try rewriting
   it to use an anchor instead.  Return the new memory reference on success
   and the old one on failure.  */

rtx
use_anchored_address (rtx x)
{
  rtx base;
  HOST_WIDE_INT offset;

  if (!flag_section_anchors)
    return x;

  if (!MEM_P (x))
    return x;

  /* Split the address into a base and offset.  */
  base = XEXP (x, 0);
  offset = 0;
  if (GET_CODE (base) == CONST
      && GET_CODE (XEXP (base, 0)) == PLUS
      && GET_CODE (XEXP (XEXP (base, 0), 1)) == CONST_INT)
    {
      offset += INTVAL (XEXP (XEXP (base, 0), 1));
      base = XEXP (XEXP (base, 0), 0);
    }

  /* Check whether BASE is suitable for anchors.  */
  if (GET_CODE (base) != SYMBOL_REF
557
      || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
558
      || SYMBOL_REF_ANCHOR_P (base)
559
      || SYMBOL_REF_BLOCK (base) == NULL
560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581
      || !targetm.use_anchors_for_symbol_p (base))
    return x;

  /* Decide where BASE is going to be.  */
  place_block_symbol (base);

  /* Get the anchor we need to use.  */
  offset += SYMBOL_REF_BLOCK_OFFSET (base);
  base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
			     SYMBOL_REF_TLS_MODEL (base));

  /* Work out the offset from the anchor.  */
  offset -= SYMBOL_REF_BLOCK_OFFSET (base);

  /* If we're going to run a CSE pass, force the anchor into a register.
     We will then be able to reuse registers for several accesses, if the
     target costs say that that's worthwhile.  */
  if (!cse_not_expected)
    base = force_reg (GET_MODE (base), base);

  return replace_equiv_address (x, plus_constant (base, offset));
}
Richard Kenner committed
582 583 584 585

/* Copy the value or contents of X to a new temp reg and return that reg.  */

rtx
586
copy_to_reg (rtx x)
Richard Kenner committed
587
{
588
  rtx temp = gen_reg_rtx (GET_MODE (x));
589

Richard Kenner committed
590
  /* If not an operand, must be an address with PLUS and MULT so
591
     do the computation.  */
Richard Kenner committed
592 593
  if (! general_operand (x, VOIDmode))
    x = force_operand (x, temp);
594

Richard Kenner committed
595 596 597 598 599 600 601 602 603 604
  if (x != temp)
    emit_move_insn (temp, x);

  return temp;
}

/* Like copy_to_reg but always give the new register mode Pmode
   in case X is a constant.  */

rtx
605
copy_addr_to_reg (rtx x)
Richard Kenner committed
606 607 608 609 610 611 612 613
{
  return copy_to_mode_reg (Pmode, x);
}

/* Like copy_to_reg but always give the new register mode MODE
   in case X is a constant.  */

rtx
614
copy_to_mode_reg (enum machine_mode mode, rtx x)
Richard Kenner committed
615
{
616
  rtx temp = gen_reg_rtx (mode);
617

Richard Kenner committed
618
  /* If not an operand, must be an address with PLUS and MULT so
619
     do the computation.  */
Richard Kenner committed
620 621 622
  if (! general_operand (x, VOIDmode))
    x = force_operand (x, temp);

623
  gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
Richard Kenner committed
624 625 626 627 628 629 630 631 632 633 634 635 636 637
  if (x != temp)
    emit_move_insn (temp, x);
  return temp;
}

/* Load X into a register if it is not already one.
   Use mode MODE for the register.
   X should be valid for mode MODE, but it may be a constant which
   is valid for all integer modes; that's why caller must specify MODE.

   The caller must not alter the value in the register we return,
   since we mark it as a "constant" register.  */

rtx
638
force_reg (enum machine_mode mode, rtx x)
Richard Kenner committed
639
{
640
  rtx temp, insn, set;
Richard Kenner committed
641

642
  if (REG_P (x))
Richard Kenner committed
643
    return x;
644

645 646 647 648 649 650 651 652
  if (general_operand (x, mode))
    {
      temp = gen_reg_rtx (mode);
      insn = emit_move_insn (temp, x);
    }
  else
    {
      temp = force_operand (x, NULL_RTX);
653
      if (REG_P (temp))
654 655 656 657 658 659 660 661
	insn = get_last_insn ();
      else
	{
	  rtx temp2 = gen_reg_rtx (mode);
	  insn = emit_move_insn (temp2, temp);
	  temp = temp2;
	}
    }
662

Richard Kenner committed
663
  /* Let optimizers know that TEMP's value never changes
664 665 666 667
     and that X can be substituted for it.  Don't get confused
     if INSN set something else (such as a SUBREG of TEMP).  */
  if (CONSTANT_P (x)
      && (set = single_set (insn)) != 0
668 669
      && SET_DEST (set) == temp
      && ! rtx_equal_p (x, SET_SRC (set)))
670
    set_unique_reg_note (insn, REG_EQUAL, x);
671

672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701
  /* Let optimizers know that TEMP is a pointer, and if so, the
     known alignment of that pointer.  */
  {
    unsigned align = 0;
    if (GET_CODE (x) == SYMBOL_REF)
      {
        align = BITS_PER_UNIT;
	if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
	  align = DECL_ALIGN (SYMBOL_REF_DECL (x));
      }
    else if (GET_CODE (x) == LABEL_REF)
      align = BITS_PER_UNIT;
    else if (GET_CODE (x) == CONST
	     && GET_CODE (XEXP (x, 0)) == PLUS
	     && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
	     && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
      {
	rtx s = XEXP (XEXP (x, 0), 0);
	rtx c = XEXP (XEXP (x, 0), 1);
	unsigned sa, ca;

	sa = BITS_PER_UNIT;
	if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
	  sa = DECL_ALIGN (SYMBOL_REF_DECL (s));

	ca = exact_log2 (INTVAL (c) & -INTVAL (c)) * BITS_PER_UNIT;

	align = MIN (sa, ca);
      }

702
    if (align || (MEM_P (x) && MEM_POINTER (x)))
703 704 705
      mark_reg_pointer (temp, align);
  }

Richard Kenner committed
706 707 708 709 710 711 712
  return temp;
}

/* If X is a memory ref, copy its contents to a new temp reg and return
   that reg.  Otherwise, return X.  */

rtx
713
force_not_mem (rtx x)
Richard Kenner committed
714
{
715
  rtx temp;
Aldy Hernandez committed
716

717
  if (!MEM_P (x) || GET_MODE (x) == BLKmode)
Richard Kenner committed
718
    return x;
Aldy Hernandez committed
719

Richard Kenner committed
720
  temp = gen_reg_rtx (GET_MODE (x));
721 722 723 724

  if (MEM_POINTER (x))
    REG_POINTER (temp) = 1;

Richard Kenner committed
725 726 727 728 729 730 731 732 733
  emit_move_insn (temp, x);
  return temp;
}

/* Copy X to TARGET (if it's nonzero and a reg)
   or to a new temp reg and return that reg.
   MODE is the mode to use for X in case it is a constant.  */

rtx
734
copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
Richard Kenner committed
735
{
736
  rtx temp;
Richard Kenner committed
737

738
  if (target && REG_P (target))
Richard Kenner committed
739 740 741 742 743 744 745 746
    temp = target;
  else
    temp = gen_reg_rtx (mode);

  emit_move_insn (temp, x);
  return temp;
}

747 748 749 750
/* Return the mode to use to store a scalar of TYPE and MODE.
   PUNSIGNEDP points to the signedness of the type and may be adjusted
   to show what signedness to use on extension operations.

751
   FOR_CALL is nonzero if this call is promoting args for a call.  */
752

753
#if defined(PROMOTE_MODE) && !defined(PROMOTE_FUNCTION_MODE)
754
#define PROMOTE_FUNCTION_MODE PROMOTE_MODE
755 756
#endif

757
enum machine_mode
758
promote_mode (const_tree type, enum machine_mode mode, int *punsignedp,
759
	      int for_call ATTRIBUTE_UNUSED)
760
{
761
  const enum tree_code code = TREE_CODE (type);
762 763
  int unsignedp = *punsignedp;

764
#ifndef PROMOTE_MODE
765 766 767 768 769 770
  if (! for_call)
    return mode;
#endif

  switch (code)
    {
771
#ifdef PROMOTE_FUNCTION_MODE
772
    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
773
    case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
774 775 776 777 778 779 780 781 782 783 784 785
#ifdef PROMOTE_MODE
      if (for_call)
	{
#endif
	  PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
#ifdef PROMOTE_MODE
	}
      else
	{
	  PROMOTE_MODE (mode, unsignedp, type);
	}
#endif
786 787 788
      break;
#endif

789
#ifdef POINTERS_EXTEND_UNSIGNED
790
    case REFERENCE_TYPE:
791
    case POINTER_TYPE:
792 793
      mode = Pmode;
      unsignedp = POINTERS_EXTEND_UNSIGNED;
794
      break;
795
#endif
796

797 798
    default:
      break;
799 800 801 802 803 804
    }

  *punsignedp = unsignedp;
  return mode;
}

Richard Kenner committed
805 806 807 808
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
   This pops when ADJUST is positive.  ADJUST need not be constant.  */

void
809
adjust_stack (rtx adjust)
Richard Kenner committed
810 811 812 813 814 815
{
  rtx temp;

  if (adjust == const0_rtx)
    return;

816 817 818 819 820
  /* We expect all variable sized adjustments to be multiple of
     PREFERRED_STACK_BOUNDARY.  */
  if (GET_CODE (adjust) == CONST_INT)
    stack_pointer_delta -= INTVAL (adjust);

Richard Kenner committed
821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837
  temp = expand_binop (Pmode,
#ifdef STACK_GROWS_DOWNWARD
		       add_optab,
#else
		       sub_optab,
#endif
		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
		       OPTAB_LIB_WIDEN);

  if (temp != stack_pointer_rtx)
    emit_move_insn (stack_pointer_rtx, temp);
}

/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
   This pushes when ADJUST is positive.  ADJUST need not be constant.  */

void
838
anti_adjust_stack (rtx adjust)
Richard Kenner committed
839 840 841 842 843 844
{
  rtx temp;

  if (adjust == const0_rtx)
    return;

845 846 847 848 849
  /* We expect all variable sized adjustments to be multiple of
     PREFERRED_STACK_BOUNDARY.  */
  if (GET_CODE (adjust) == CONST_INT)
    stack_pointer_delta += INTVAL (adjust);

Richard Kenner committed
850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865
  temp = expand_binop (Pmode,
#ifdef STACK_GROWS_DOWNWARD
		       sub_optab,
#else
		       add_optab,
#endif
		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
		       OPTAB_LIB_WIDEN);

  if (temp != stack_pointer_rtx)
    emit_move_insn (stack_pointer_rtx, temp);
}

/* Round the size of a block to be pushed up to the boundary required
   by this machine.  SIZE is the desired size, which need not be constant.  */

866
static rtx
867
round_push (rtx size)
Richard Kenner committed
868
{
869
  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
870

Richard Kenner committed
871 872
  if (align == 1)
    return size;
873

Richard Kenner committed
874 875
  if (GET_CODE (size) == CONST_INT)
    {
876
      HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
877

878 879
      if (INTVAL (size) != new_size)
	size = GEN_INT (new_size);
Richard Kenner committed
880 881 882
    }
  else
    {
883
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
Mike Stump committed
884 885
	 but we know it can't.  So add ourselves and then do
	 TRUNC_DIV_EXPR.  */
886 887 888
      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
Charles Hannum committed
889 890
			    NULL_RTX, 1);
      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
Richard Kenner committed
891
    }
892

Richard Kenner committed
893 894 895
  return size;
}

896 897 898 899 900 901 902 903 904
/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
   to a previously-created save area.  If no save area has been allocated,
   this function will allocate one.  If a save area is specified, it
   must be of the proper mode.

   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
   are emitted at the current position.  */

void
905
emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
906 907 908
{
  rtx sa = *psave;
  /* The default is that we use a move insn and save in a Pmode object.  */
909
  rtx (*fcn) (rtx, rtx) = gen_move_insn;
910
  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
911 912 913 914 915 916 917

  /* See if this machine has anything special to do for this kind of save.  */
  switch (save_level)
    {
#ifdef HAVE_save_stack_block
    case SAVE_BLOCK:
      if (HAVE_save_stack_block)
918
	fcn = gen_save_stack_block;
919 920 921 922 923
      break;
#endif
#ifdef HAVE_save_stack_function
    case SAVE_FUNCTION:
      if (HAVE_save_stack_function)
924
	fcn = gen_save_stack_function;
925 926 927 928 929
      break;
#endif
#ifdef HAVE_save_stack_nonlocal
    case SAVE_NONLOCAL:
      if (HAVE_save_stack_nonlocal)
930
	fcn = gen_save_stack_nonlocal;
931 932
      break;
#endif
933 934
    default:
      break;
935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951
    }

  /* If there is no save area and we have to allocate one, do so.  Otherwise
     verify the save area is the proper mode.  */

  if (sa == 0)
    {
      if (mode != VOIDmode)
	{
	  if (save_level == SAVE_NONLOCAL)
	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
	  else
	    *psave = sa = gen_reg_rtx (mode);
	}
    }

  if (after)
952 953 954 955
    {
      rtx seq;

      start_sequence ();
956
      do_pending_stack_adjust ();
957 958 959 960
      /* We must validize inside the sequence, to ensure that any instructions
	 created by the validize call also get moved to the right place.  */
      if (sa != 0)
	sa = validize_mem (sa);
961
      emit_insn (fcn (sa, stack_pointer_rtx));
962
      seq = get_insns ();
963 964 965
      end_sequence ();
      emit_insn_after (seq, after);
    }
966
  else
967
    {
968
      do_pending_stack_adjust ();
969 970 971 972
      if (sa != 0)
	sa = validize_mem (sa);
      emit_insn (fcn (sa, stack_pointer_rtx));
    }
973 974 975
}

/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
976
   area made by emit_stack_save.  If it is zero, we have nothing to do.
977

978
   Put any emitted insns after insn AFTER, if nonzero, otherwise at
979 980 981
   current position.  */

void
982
emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
983 984
{
  /* The default is that we use a move insn.  */
985
  rtx (*fcn) (rtx, rtx) = gen_move_insn;
986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007

  /* See if this machine has anything special to do for this kind of save.  */
  switch (save_level)
    {
#ifdef HAVE_restore_stack_block
    case SAVE_BLOCK:
      if (HAVE_restore_stack_block)
	fcn = gen_restore_stack_block;
      break;
#endif
#ifdef HAVE_restore_stack_function
    case SAVE_FUNCTION:
      if (HAVE_restore_stack_function)
	fcn = gen_restore_stack_function;
      break;
#endif
#ifdef HAVE_restore_stack_nonlocal
    case SAVE_NONLOCAL:
      if (HAVE_restore_stack_nonlocal)
	fcn = gen_restore_stack_nonlocal;
      break;
#endif
1008 1009
    default:
      break;
1010 1011
    }

1012
  if (sa != 0)
1013 1014 1015 1016
    {
      sa = validize_mem (sa);
      /* These clobbers prevent the scheduler from moving
	 references to variable arrays below the code
1017
	 that deletes (pops) the arrays.  */
1018 1019
      emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
      emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1020
    }
1021

1022 1023
  discard_pending_stack_adjust ();

1024
  if (after)
1025 1026 1027 1028
    {
      rtx seq;

      start_sequence ();
1029
      emit_insn (fcn (stack_pointer_rtx, sa));
1030
      seq = get_insns ();
1031 1032 1033
      end_sequence ();
      emit_insn_after (seq, after);
    }
1034
  else
1035
    emit_insn (fcn (stack_pointer_rtx, sa));
1036
}
1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051

/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
   function.  This function should be called whenever we allocate or
   deallocate dynamic stack space.  */

void
update_nonlocal_goto_save_area (void)
{
  tree t_save;
  rtx r_save;

  /* The nonlocal_goto_save_area object is an array of N pointers.  The
     first one is used for the frame pointer save; the rest are sized by
     STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
     of the stack save area slots.  */
1052 1053
  t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
		   integer_one_node, NULL_TREE, NULL_TREE);
1054 1055 1056 1057
  r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);

  emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
}
1058

Richard Kenner committed
1059 1060 1061 1062 1063 1064 1065
/* Return an rtx representing the address of an area of memory dynamically
   pushed on the stack.  This region of memory is always aligned to
   a multiple of BIGGEST_ALIGNMENT.

   Any required stack pointer alignment is preserved.

   SIZE is an rtx representing the size of the area.
1066 1067 1068
   TARGET is a place in which the address can be placed.

   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
Richard Kenner committed
1069 1070

rtx
1071
allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
Richard Kenner committed
1072
{
1073
  /* If we're asking for zero bytes, it doesn't matter what we point
Richard Kenner committed
1074
     to since we can't dereference it.  But return a reasonable
1075 1076 1077 1078 1079
     address anyway.  */
  if (size == const0_rtx)
    return virtual_stack_dynamic_rtx;

  /* Otherwise, show we're calling alloca or equivalent.  */
1080
  cfun->calls_alloca = 1;
1081

Richard Kenner committed
1082 1083 1084 1085
  /* Ensure the size is in the proper mode.  */
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    size = convert_to_mode (Pmode, size, 1);

1086 1087 1088
  /* We can't attempt to minimize alignment necessary, because we don't
     know the final value of preferred_stack_boundary yet while executing
     this code.  */
1089
  crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1090

Richard Kenner committed
1091 1092
  /* We will need to ensure that the address we return is aligned to
     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1093
     always know its final value at this point in the compilation (it
Richard Kenner committed
1094 1095
     might depend on the size of the outgoing parameter lists, for
     example), so we must align the value to be returned in that case.
1096
     (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
Richard Kenner committed
1097 1098 1099 1100 1101 1102 1103
     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
     We must also do an alignment operation on the returned value if
     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.

     If we have to align, we must leave space in SIZE for the hole
     that might result from the alignment operation.  */

1104
#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1105 1106
#define MUST_ALIGN 1
#else
1107
#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
Richard Kenner committed
1108 1109
#endif

1110
  if (MUST_ALIGN)
1111
    size
1112
      = force_operand (plus_constant (size,
1113 1114
				      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
		       NULL_RTX);
1115

Richard Kenner committed
1116 1117 1118 1119 1120
#ifdef SETJMP_VIA_SAVE_AREA
  /* If setjmp restores regs from a save area in the stack frame,
     avoid clobbering the reg save area.  Note that the offset of
     virtual_incoming_args_rtx includes the preallocated stack args space.
     It would be no problem to clobber that, but it's on the wrong side
1121 1122 1123 1124 1125 1126 1127
     of the old save area.

     What used to happen is that, since we did not know for sure
     whether setjmp() was invoked until after RTL generation, we
     would use reg notes to store the "optimized" size and fix things
     up later.  These days we know this information before we ever
     start building RTL so the reg notes are unnecessary.  */
1128
  if (!cfun->calls_setjmp)
1129 1130
    {
      int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1131

1132 1133 1134
      /* ??? Code below assumes that the save area needs maximal
	 alignment.  This constraint may be too strong.  */
      gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
1135

1136 1137
      if (GET_CODE (size) == CONST_INT)
	{
1138
	  HOST_WIDE_INT new_size = INTVAL (size) / align * align;
1139

1140 1141
	  if (INTVAL (size) != new_size)
	    size = GEN_INT (new_size);
1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161
	}
      else
	{
	  /* Since we know overflow is not possible, we avoid using
	     CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
	  size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
				GEN_INT (align), NULL_RTX, 1);
	  size = expand_mult (Pmode, size,
			      GEN_INT (align), NULL_RTX, 1);
	}
    }
  else
    {
      rtx dynamic_offset
	= expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
			stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);

      size = expand_binop (Pmode, add_optab, size, dynamic_offset,
			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
    }
Richard Kenner committed
1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177
#endif /* SETJMP_VIA_SAVE_AREA */

  /* Round the size to a multiple of the required stack alignment.
     Since the stack if presumed to be rounded before this allocation,
     this will maintain the required alignment.

     If the stack grows downward, we could save an insn by subtracting
     SIZE from the stack pointer and then aligning the stack pointer.
     The problem with this is that the stack pointer may be unaligned
     between the execution of the subtraction and alignment insns and
     some machines do not allow this.  Even on those that do, some
     signal handlers malfunction if a signal should occur between those
     insns.  Since this is an extremely rare event, we have no reliable
     way of knowing which systems have this problem.  So we avoid even
     momentarily mis-aligning the stack.  */

1178 1179
  /* If we added a variable amount to SIZE,
     we can no longer assume it is aligned.  */
1180
#if !defined (SETJMP_VIA_SAVE_AREA)
1181
  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1182
#endif
1183
    size = round_push (size);
Richard Kenner committed
1184 1185 1186

  do_pending_stack_adjust ();

1187
 /* We ought to be called always on the toplevel and stack ought to be aligned
1188
    properly.  */
1189 1190
  gcc_assert (!(stack_pointer_delta
		% (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1191

1192 1193 1194 1195 1196 1197 1198
  /* If needed, check that we have the required amount of stack.
     Take into account what has already been checked.  */
  if (flag_stack_check == GENERIC_STACK_CHECK)
    probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
		       size);
  else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
    probe_stack_range (STACK_CHECK_PROTECT, size);
1199

1200
  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1201
  if (target == 0 || !REG_P (target)
1202 1203
      || REGNO (target) < FIRST_PSEUDO_REGISTER
      || GET_MODE (target) != Pmode)
Richard Kenner committed
1204 1205
    target = gen_reg_rtx (Pmode);

1206
  mark_reg_pointer (target, known_align);
1207

Richard Kenner committed
1208 1209
  /* Perform the required allocation from the stack.  Some systems do
     this differently than simply incrementing/decrementing from the
1210
     stack pointer, such as acquiring the space by calling malloc().  */
Richard Kenner committed
1211 1212 1213
#ifdef HAVE_allocate_stack
  if (HAVE_allocate_stack)
    {
1214
      enum machine_mode mode = STACK_SIZE_MODE;
1215
      insn_operand_predicate_fn pred;
1216

1217 1218 1219 1220
      /* We don't have to check against the predicate for operand 0 since
	 TARGET is known to be a pseudo of the proper mode, which must
	 be valid for the operand.  For operand 1, convert to the
	 proper mode and validate.  */
Jeff Law committed
1221
      if (mode == VOIDmode)
1222
	mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
Jeff Law committed
1223

1224 1225
      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
      if (pred && ! ((*pred) (size, mode)))
1226
	size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
Richard Kenner committed
1227

1228
      emit_insn (gen_allocate_stack (target, size));
Richard Kenner committed
1229 1230 1231
    }
  else
#endif
1232
    {
1233 1234 1235
#ifndef STACK_GROWS_DOWNWARD
      emit_move_insn (target, virtual_stack_dynamic_rtx);
#endif
1236 1237

      /* Check stack bounds if necessary.  */
1238
      if (crtl->limit_stack)
1239 1240 1241 1242
	{
	  rtx available;
	  rtx space_available = gen_label_rtx ();
#ifdef STACK_GROWS_DOWNWARD
1243
	  available = expand_binop (Pmode, sub_optab,
1244 1245 1246
				    stack_pointer_rtx, stack_limit_rtx,
				    NULL_RTX, 1, OPTAB_WIDEN);
#else
1247
	  available = expand_binop (Pmode, sub_optab,
1248 1249 1250 1251
				    stack_limit_rtx, stack_pointer_rtx,
				    NULL_RTX, 1, OPTAB_WIDEN);
#endif
	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1252
				   space_available);
1253 1254 1255 1256 1257 1258 1259 1260 1261 1262
#ifdef HAVE_trap
	  if (HAVE_trap)
	    emit_insn (gen_trap ());
	  else
#endif
	    error ("stack limits not supported on this target");
	  emit_barrier ();
	  emit_label (space_available);
	}

1263
      anti_adjust_stack (size);
1264

Richard Kenner committed
1265
#ifdef STACK_GROWS_DOWNWARD
Richard Henderson committed
1266
      emit_move_insn (target, virtual_stack_dynamic_rtx);
Richard Kenner committed
1267
#endif
1268
    }
Richard Kenner committed
1269

1270
  if (MUST_ALIGN)
1271
    {
1272
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
Mike Stump committed
1273 1274
	 but we know it can't.  So add ourselves and then do
	 TRUNC_DIV_EXPR.  */
1275
      target = expand_binop (Pmode, add_optab, target,
1276 1277 1278
			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
Charles Hannum committed
1279 1280
			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
			      NULL_RTX, 1);
1281
      target = expand_mult (Pmode, target,
Charles Hannum committed
1282 1283
			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
			    NULL_RTX, 1);
1284
    }
1285

1286
  /* Record the new stack level for nonlocal gotos.  */
1287 1288
  if (cfun->nonlocal_goto_save_area != 0)
    update_nonlocal_goto_save_area ();
1289

Richard Kenner committed
1290 1291 1292
  return target;
}

1293
/* A front end may want to override GCC's stack checking by providing a
Richard Kenner committed
1294 1295 1296
   run-time routine to call to check the stack, so provide a mechanism for
   calling that routine.  */

1297
static GTY(()) rtx stack_check_libfunc;
Richard Kenner committed
1298 1299

void
1300
set_stack_check_libfunc (rtx libfunc)
Richard Kenner committed
1301 1302 1303 1304
{
  stack_check_libfunc = libfunc;
}

1305 1306 1307
/* Emit one stack probe at ADDRESS, an address within the stack.  */

static void
1308
emit_stack_probe (rtx address)
1309
{
1310
  rtx memref = gen_rtx_MEM (word_mode, address);
1311 1312 1313 1314 1315 1316 1317 1318 1319

  MEM_VOLATILE_P (memref) = 1;

  if (STACK_CHECK_PROBE_LOAD)
    emit_move_insn (gen_reg_rtx (word_mode), memref);
  else
    emit_move_insn (memref, const0_rtx);
}

1320
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332
   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
   subtract from the stack.  If SIZE is constant, this is done
   with a fixed number of probes.  Otherwise, we must make a loop.  */

#ifdef STACK_GROWS_DOWNWARD
#define STACK_GROW_OP MINUS
#else
#define STACK_GROW_OP PLUS
#endif

void
1333
probe_stack_range (HOST_WIDE_INT first, rtx size)
1334
{
1335 1336 1337 1338 1339
  /* First ensure SIZE is Pmode.  */
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    size = convert_to_mode (Pmode, size, 1);

  /* Next see if the front end has set up a function for us to call to
Richard Kenner committed
1340 1341
     check the stack.  */
  if (stack_check_libfunc != 0)
1342 1343
    {
      rtx addr = memory_address (QImode,
1344 1345 1346
				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					         stack_pointer_rtx,
					         plus_constant (size, first)));
1347

1348
      addr = convert_memory_address (ptr_mode, addr);
1349
      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1350 1351
			 ptr_mode);
    }
Richard Kenner committed
1352 1353

  /* Next see if we have an insn to check the stack.  Use it if so.  */
1354
#ifdef HAVE_check_stack
Richard Kenner committed
1355
  else if (HAVE_check_stack)
1356
    {
1357
      insn_operand_predicate_fn pred;
1358
      rtx last_addr
1359 1360 1361
	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					 stack_pointer_rtx,
					 plus_constant (size, first)),
1362
			 NULL_RTX);
1363

1364 1365
      pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
      if (pred && ! ((*pred) (last_addr, Pmode)))
Jeff Law committed
1366
	last_addr = copy_to_mode_reg (Pmode, last_addr);
1367

Jeff Law committed
1368
      emit_insn (gen_check_stack (last_addr));
1369 1370 1371 1372
    }
#endif

  /* If we have to generate explicit probes, see if we have a constant
1373
     small number of them to generate.  If so, that's the easy case.  */
Richard Kenner committed
1374 1375
  else if (GET_CODE (size) == CONST_INT
	   && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1376 1377 1378 1379 1380 1381 1382 1383 1384 1385
    {
      HOST_WIDE_INT offset;

      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
	 for values of N from 1 until it exceeds LAST.  If only one
	 probe is needed, this will not generate any code.  Then probe
	 at LAST.  */
      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
	   offset < INTVAL (size);
	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1386 1387 1388
	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					  stack_pointer_rtx,
					  GEN_INT (offset)));
1389

1390 1391 1392
      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					stack_pointer_rtx,
					plus_constant (size, first)));
1393 1394 1395 1396 1397 1398 1399
    }

  /* In the variable case, do the same as above, but in a loop.  We emit loop
     notes so that loop optimization can be done.  */
  else
    {
      rtx test_addr
1400 1401 1402
	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					 stack_pointer_rtx,
					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1403 1404
			 NULL_RTX);
      rtx last_addr
1405 1406 1407
	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
					 stack_pointer_rtx,
					 plus_constant (size, first)),
1408 1409 1410 1411 1412 1413 1414
			 NULL_RTX);
      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
      rtx loop_lab = gen_label_rtx ();
      rtx test_lab = gen_label_rtx ();
      rtx end_lab = gen_label_rtx ();
      rtx temp;

1415
      if (!REG_P (test_addr)
1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433
	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
	test_addr = force_reg (Pmode, test_addr);

      emit_jump (test_lab);

      emit_label (loop_lab);
      emit_stack_probe (test_addr);

#ifdef STACK_GROWS_DOWNWARD
#define CMP_OPCODE GTU
      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
			   1, OPTAB_WIDEN);
#else
#define CMP_OPCODE LTU
      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
			   1, OPTAB_WIDEN);
#endif

1434
      gcc_assert (temp == test_addr);
1435 1436

      emit_label (test_lab);
1437
      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1438
			       NULL_RTX, Pmode, 1, loop_lab);
1439 1440 1441 1442 1443 1444 1445
      emit_jump (end_lab);
      emit_label (end_lab);

      emit_stack_probe (last_addr);
    }
}

Richard Kenner committed
1446 1447 1448
/* Return an rtx representing the register or memory location
   in which a scalar value of data type VALTYPE
   was returned by a function call to function FUNC.
1449 1450
   FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
   function is known, otherwise 0.
1451 1452
   OUTGOING is 1 if on a machine with register windows this function
   should return the register in which the function will put its result
1453
   and 0 otherwise.  */
Richard Kenner committed
1454 1455

rtx
1456
hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1457
		     int outgoing ATTRIBUTE_UNUSED)
Richard Kenner committed
1458
{
1459
  rtx val;
1460

1461
  val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1462

1463
  if (REG_P (val)
1464 1465
      && GET_MODE (val) == BLKmode)
    {
1466
      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1467
      enum machine_mode tmpmode;
1468

1469
      /* int_size_in_bytes can return -1.  We don't need a check here
1470 1471
	 since the value of bytes will then be large enough that no
	 mode will match anyway.  */
1472

1473
      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
Kazu Hirata committed
1474 1475 1476 1477 1478 1479 1480
	   tmpmode != VOIDmode;
	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
	{
	  /* Have we found a large enough mode?  */
	  if (GET_MODE_SIZE (tmpmode) >= bytes)
	    break;
	}
1481 1482

      /* No suitable mode found.  */
1483
      gcc_assert (tmpmode != VOIDmode);
1484 1485

      PUT_MODE (val, tmpmode);
1486
    }
1487
  return val;
Richard Kenner committed
1488 1489 1490 1491 1492 1493
}

/* Return an rtx representing the register or memory location
   in which a scalar value of mode MODE was returned by a library call.  */

rtx
1494
hard_libcall_value (enum machine_mode mode)
Richard Kenner committed
1495 1496 1497
{
  return LIBCALL_VALUE (mode);
}
1498 1499 1500 1501 1502 1503 1504

/* Look up the tree code for a given rtx code
   to provide the arithmetic operation for REAL_ARITHMETIC.
   The function returns an int because the caller may not know
   what `enum tree_code' means.  */

int
1505
rtx_to_tree_code (enum rtx_code code)
1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534
{
  enum tree_code tcode;

  switch (code)
    {
    case PLUS:
      tcode = PLUS_EXPR;
      break;
    case MINUS:
      tcode = MINUS_EXPR;
      break;
    case MULT:
      tcode = MULT_EXPR;
      break;
    case DIV:
      tcode = RDIV_EXPR;
      break;
    case SMIN:
      tcode = MIN_EXPR;
      break;
    case SMAX:
      tcode = MAX_EXPR;
      break;
    default:
      tcode = LAST_AND_UNUSED_TREE_CODE;
      break;
    }
  return ((int) tcode);
}
1535 1536

#include "gt-explow.h"