stmt.c 97.1 KB
Newer Older
1
/* Expands front end tree to back end RTL for GCC
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4
   Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9 10 11
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING.  If not, write to the Free
Kelley Cook committed
20 21
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.  */
Richard Kenner committed
22 23 24 25

/* This file handles the generation of rtl code from tree structure
   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
   The functions whose names start with `expand_' are called by the
26
   expander to generate RTL instructions for various kinds of constructs.  */
Richard Kenner committed
27 28

#include "config.h"
29
#include "system.h"
30 31
#include "coretypes.h"
#include "tm.h"
32

Richard Kenner committed
33
#include "rtl.h"
34
#include "hard-reg-set.h"
Richard Kenner committed
35
#include "tree.h"
36
#include "tm_p.h"
Richard Kenner committed
37
#include "flags.h"
Mike Stump committed
38
#include "except.h"
Richard Kenner committed
39 40 41
#include "function.h"
#include "insn-config.h"
#include "expr.h"
42
#include "libfuncs.h"
Richard Kenner committed
43
#include "recog.h"
Jan Brittenson committed
44
#include "machmode.h"
Robert Lipe committed
45
#include "toplev.h"
Kaveh R. Ghazi committed
46
#include "output.h"
47
#include "ggc.h"
48
#include "langhooks.h"
49
#include "predict.h"
50
#include "optabs.h"
DJ Delorie committed
51
#include "target.h"
52
#include "regs.h"
Richard Kenner committed
53 54 55 56 57 58 59

/* Functions and data structures for expanding case statements.  */

/* Case label structure, used to hold info on labels within case
   statements.  We handle "range" labels; for a single-value label
   as in C, the high and low limits are the same.

60 61 62 63 64 65 66 67
   We start with a vector of case nodes sorted in ascending order, and
   the default label as the last element in the vector.  Before expanding
   to RTL, we transform this vector into a list linked via the RIGHT
   fields in the case_node struct.  Nodes with higher case values are
   later in the list.

   Switch statements can be output in three forms.  A branch table is
   used if there are more than a few labels and the labels are dense
Richard Kenner committed
68 69 70 71 72 73 74
   within the range between the smallest and largest case value.  If a
   branch table is used, no further manipulations are done with the case
   node chain.

   The alternative to the use of a branch table is to generate a series
   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
   and PARENT fields to hold a binary tree.  Initially the tree is
75 76
   totally unbalanced, with everything on the right.  We balance the tree
   with nodes on the left having lower case values than the parent
Richard Kenner committed
77
   and nodes on the right having higher values.  We then output the tree
78 79 80 81
   in order.

   For very small, suitable switch statements, we can generate a series
   of simple bit test and branches instead.  */
Richard Kenner committed
82

83
struct case_node GTY(())
Richard Kenner committed
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
{
  struct case_node	*left;	/* Left son in binary tree */
  struct case_node	*right;	/* Right son in binary tree; also node chain */
  struct case_node	*parent; /* Parent of node in binary tree */
  tree			low;	/* Lowest index value for this label */
  tree			high;	/* Highest index value for this label */
  tree			code_label; /* Label to jump to when node matches */
};

typedef struct case_node case_node;
typedef struct case_node *case_node_ptr;

/* These are used by estimate_case_costs and balance_case_nodes.  */

/* This must be a signed type, and non-ANSI compilers lack signed char.  */
99
static short cost_table_[129];
Richard Kenner committed
100
static int use_cost_table;
101 102 103 104
static int cost_table_initialized;

/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
   is unsigned.  */
Kazu Hirata committed
105
#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
Richard Kenner committed
106

107
static int n_occurrences (int, const char *);
108
static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
109 110 111 112
static void expand_nl_goto_receiver (void);
static bool check_operand_nalternatives (tree, tree);
static bool check_unique_operand_names (tree, tree);
static char *resolve_operand_name_1 (char *, tree, tree);
113
static void expand_null_return_1 (void);
114 115 116 117 118 119 120 121 122 123
static void expand_value_return (rtx);
static int estimate_case_costs (case_node_ptr);
static bool lshift_cheap_p (void);
static int case_bit_test_cmp (const void *, const void *);
static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
static int node_has_low_bound (case_node_ptr, tree);
static int node_has_high_bound (case_node_ptr, tree);
static int node_is_bounded (case_node_ptr, tree);
static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
124 125
static struct case_node *add_case_node (struct case_node *, tree,
					tree, tree, tree);
126

Richard Kenner committed
127 128 129 130 131

/* Return the rtx-label that corresponds to a LABEL_DECL,
   creating it if necessary.  */

rtx
132
label_rtx (tree label)
Richard Kenner committed
133
{
134
  gcc_assert (TREE_CODE (label) == LABEL_DECL);
Richard Kenner committed
135

136
  if (!DECL_RTL_SET_P (label))
137 138 139 140 141 142
    {
      rtx r = gen_label_rtx ();
      SET_DECL_RTL (label, r);
      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
	LABEL_PRESERVE_P (r) = 1;
    }
Richard Kenner committed
143

144
  return DECL_RTL (label);
Richard Kenner committed
145 146
}

147 148 149
/* As above, but also put it on the forced-reference list of the
   function that contains it.  */
rtx
150
force_label_rtx (tree label)
151 152 153 154 155
{
  rtx ref = label_rtx (label);
  tree function = decl_function_context (label);
  struct function *p;

156
  gcc_assert (function);
157

158
  if (function != current_function_decl)
159 160 161 162 163 164 165 166
    p = find_function_data (function);
  else
    p = cfun;

  p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
						p->expr->x_forced_labels);
  return ref;
}
167

Richard Kenner committed
168 169 170
/* Add an unconditional jump to LABEL as the next sequential instruction.  */

void
171
emit_jump (rtx label)
Richard Kenner committed
172 173 174 175 176 177 178 179 180 181
{
  do_pending_stack_adjust ();
  emit_jump_insn (gen_jump (label));
  emit_barrier ();
}

/* Emit code to jump to the address
   specified by the pointer expression EXP.  */

void
182
expand_computed_goto (tree exp)
Richard Kenner committed
183
{
184
  rtx x = expand_normal (exp);
185

186
  x = convert_memory_address (Pmode, x);
187

188 189
  do_pending_stack_adjust ();
  emit_indirect_jump (x);
Richard Kenner committed
190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
}

/* Handle goto statements and the labels that they can go to.  */

/* Specify the location in the RTL code of a label LABEL,
   which is a LABEL_DECL tree node.

   This is used for the kind of label that the user can jump to with a
   goto statement, and for alternatives of a switch or case statement.
   RTL labels generated for loops and conditionals don't go through here;
   they are generated directly at the RTL level, by other functions below.

   Note that this has nothing to do with defining label *names*.
   Languages vary in how they do that and what that even means.  */

void
206
expand_label (tree label)
Richard Kenner committed
207
{
208
  rtx label_r = label_rtx (label);
Richard Kenner committed
209 210

  do_pending_stack_adjust ();
211
  emit_label (label_r);
Richard Kenner committed
212 213 214
  if (DECL_NAME (label))
    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));

215 216 217 218 219 220 221 222 223 224
  if (DECL_NONLOCAL (label))
    {
      expand_nl_goto_receiver ();
      nonlocal_goto_handler_labels
	= gen_rtx_EXPR_LIST (VOIDmode, label_r,
			     nonlocal_goto_handler_labels);
    }

  if (FORCED_LABEL (label))
    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
225

226 227
  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
    maybe_set_first_label_num (label_r);
Richard Kenner committed
228 229 230 231 232 233 234
}

/* Generate RTL code for a `goto' statement with target label LABEL.
   LABEL should be a LABEL_DECL tree node that was or will later be
   defined with `expand_label'.  */

void
235
expand_goto (tree label)
Richard Kenner committed
236
{
237 238 239 240
#ifdef ENABLE_CHECKING
  /* Check for a nonlocal goto to a containing function.  Should have
     gotten translated to __builtin_nonlocal_goto.  */
  tree context = decl_function_context (label);
241
  gcc_assert (!context || context == current_function_decl);
Richard Kenner committed
242
#endif
243

244
  emit_jump (label_rtx (label));
Richard Kenner committed
245
}
246 247 248

/* Return the number of times character C occurs in string S.  */
static int
249
n_occurrences (int c, const char *s)
250 251 252 253 254 255
{
  int n = 0;
  while (*s)
    n += (*s++ == c);
  return n;
}
Richard Kenner committed
256 257

/* Generate RTL for an asm statement (explicit assembler code).
258 259 260
   STRING is a STRING_CST node containing the assembler code text,
   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
   insn is volatile; don't optimize it.  */
Richard Kenner committed
261

262
static void
263
expand_asm (tree string, int vol)
Richard Kenner committed
264
{
265 266 267 268 269
  rtx body;

  if (TREE_CODE (string) == ADDR_EXPR)
    string = TREE_OPERAND (string, 0);

270 271
  body = gen_rtx_ASM_INPUT (VOIDmode,
			    ggc_strdup (TREE_STRING_POINTER (string)));
272 273

  MEM_VOLATILE_P (body) = vol;
Richard Kenner committed
274

275
  emit_insn (body);
Richard Kenner committed
276 277
}

278 279 280 281 282 283 284 285 286
/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
   constraint allows the use of a register operand.  And, *IS_INOUT
   will be true if the operand is read-write, i.e., if it is used as
   an input as well as an output.  If *CONSTRAINT_P is not in
   canonical form, it will be made canonical.  (Note that `+' will be
287
   replaced with `=' as part of this process.)
288 289 290 291

   Returns TRUE if all went well; FALSE if an error occurred.  */

bool
292 293 294
parse_output_constraint (const char **constraint_p, int operand_num,
			 int ninputs, int noutputs, bool *allows_mem,
			 bool *allows_reg, bool *is_inout)
295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315
{
  const char *constraint = *constraint_p;
  const char *p;

  /* Assume the constraint doesn't allow the use of either a register
     or memory.  */
  *allows_mem = false;
  *allows_reg = false;

  /* Allow the `=' or `+' to not be at the beginning of the string,
     since it wasn't explicitly documented that way, and there is a
     large body of code that puts it last.  Swap the character to
     the front, so as not to uglify any place else.  */
  p = strchr (constraint, '=');
  if (!p)
    p = strchr (constraint, '+');

  /* If the string doesn't contain an `=', issue an error
     message.  */
  if (!p)
    {
316
      error ("output operand constraint lacks %<=%>");
317 318 319 320 321 322 323 324
      return false;
    }

  /* If the constraint begins with `+', then the operand is both read
     from and written to.  */
  *is_inout = (*p == '+');

  /* Canonicalize the output constraint so that it begins with `='.  */
325
  if (p != constraint || *is_inout)
326 327 328 329 330
    {
      char *buf;
      size_t c_len = strlen (constraint);

      if (p != constraint)
331
	warning (0, "output constraint %qc for operand %d "
332
		 "is not at the beginning",
333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348
		 *p, operand_num);

      /* Make a copy of the constraint.  */
      buf = alloca (c_len + 1);
      strcpy (buf, constraint);
      /* Swap the first character and the `=' or `+'.  */
      buf[p - constraint] = buf[0];
      /* Make sure the first character is an `='.  (Until we do this,
	 it might be a `+'.)  */
      buf[0] = '=';
      /* Replace the constraint with the canonicalized string.  */
      *constraint_p = ggc_alloc_string (buf, c_len);
      constraint = *constraint_p;
    }

  /* Loop through the constraint string.  */
349
  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
350 351 352 353
    switch (*p)
      {
      case '+':
      case '=':
354 355
	error ("operand constraint contains incorrectly positioned "
	       "%<+%> or %<=%>");
356
	return false;
Kazu Hirata committed
357

358 359 360
      case '%':
	if (operand_num + 1 == ninputs + noutputs)
	  {
361
	    error ("%<%%%> constraint used with last operand");
362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378
	    return false;
	  }
	break;

      case 'V':  case 'm':  case 'o':
	*allows_mem = true;
	break;

      case '?':  case '!':  case '*':  case '&':  case '#':
      case 'E':  case 'F':  case 'G':  case 'H':
      case 's':  case 'i':  case 'n':
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
      case 'N':  case 'O':  case 'P':  case ',':
	break;

      case '0':  case '1':  case '2':  case '3':  case '4':
      case '5':  case '6':  case '7':  case '8':  case '9':
379
      case '[':
380 381 382 383 384 385 386 387 388 389 390 391 392 393
	error ("matching constraint not valid in output operand");
	return false;

      case '<':  case '>':
	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
	   excepting those that expand_call created.  So match memory
	   and hope.  */
	*allows_mem = true;
	break;

      case 'g':  case 'X':
	*allows_reg = true;
	*allows_mem = true;
	break;
Kazu Hirata committed
394

395 396 397 398 399 400 401
      case 'p': case 'r':
	*allows_reg = true;
	break;

      default:
	if (!ISALPHA (*p))
	  break;
402
	if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
403
	  *allows_reg = true;
404 405
#ifdef EXTRA_CONSTRAINT_STR
	else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
406
	  *allows_reg = true;
407
	else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
408
	  *allows_mem = true;
409 410 411 412 413 414 415 416 417 418 419 420 421 422 423
	else
	  {
	    /* Otherwise we can't assume anything about the nature of
	       the constraint except that it isn't purely registers.
	       Treat it like "g" and hope for the best.  */
	    *allows_reg = true;
	    *allows_mem = true;
	  }
#endif
	break;
      }

  return true;
}

424 425
/* Similar, but for input constraints.  */

426
bool
427 428 429 430
parse_input_constraint (const char **constraint_p, int input_num,
			int ninputs, int noutputs, int ninout,
			const char * const * constraints,
			bool *allows_mem, bool *allows_reg)
431 432 433 434 435
{
  const char *constraint = *constraint_p;
  const char *orig_constraint = constraint;
  size_t c_len = strlen (constraint);
  size_t j;
436
  bool saw_match = false;
437 438 439 440 441 442 443 444

  /* Assume the constraint doesn't allow the use of either
     a register or memory.  */
  *allows_mem = false;
  *allows_reg = false;

  /* Make sure constraint has neither `=', `+', nor '&'.  */

445
  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
446 447 448 449 450
    switch (constraint[j])
      {
      case '+':  case '=':  case '&':
	if (constraint == orig_constraint)
	  {
451
	    error ("input operand constraint contains %qc", constraint[j]);
452 453 454 455 456 457 458 459
	    return false;
	  }
	break;

      case '%':
	if (constraint == orig_constraint
	    && input_num + 1 == ninputs - ninout)
	  {
460
	    error ("%<%%%> constraint used with last operand");
461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487
	    return false;
	  }
	break;

      case 'V':  case 'm':  case 'o':
	*allows_mem = true;
	break;

      case '<':  case '>':
      case '?':  case '!':  case '*':  case '#':
      case 'E':  case 'F':  case 'G':  case 'H':
      case 's':  case 'i':  case 'n':
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
      case 'N':  case 'O':  case 'P':  case ',':
	break;

	/* Whether or not a numeric constraint allows a register is
	   decided by the matching constraint, and so there is no need
	   to do anything special with them.  We must handle them in
	   the default case, so that we don't unnecessarily force
	   operands to memory.  */
      case '0':  case '1':  case '2':  case '3':  case '4':
      case '5':  case '6':  case '7':  case '8':  case '9':
	{
	  char *end;
	  unsigned long match;

488 489
	  saw_match = true;

490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505
	  match = strtoul (constraint + j, &end, 10);
	  if (match >= (unsigned long) noutputs)
	    {
	      error ("matching constraint references invalid operand number");
	      return false;
	    }

	  /* Try and find the real constraint for this dup.  Only do this
	     if the matching constraint is the only alternative.  */
	  if (*end == '\0'
	      && (j == 0 || (j == 1 && constraint[0] == '%')))
	    {
	      constraint = constraints[match];
	      *constraint_p = constraint;
	      c_len = strlen (constraint);
	      j = 0;
506 507 508 509
	      /* ??? At the end of the loop, we will skip the first part of
		 the matched constraint.  This assumes not only that the
		 other constraint is an output constraint, but also that
		 the '=' or '+' come first.  */
510 511 512 513
	      break;
	    }
	  else
	    j = end - constraint;
514 515
	  /* Anticipate increment at end of loop.  */
	  j--;
516 517 518 519 520 521 522 523 524 525 526 527 528 529 530
	}
	/* Fall through.  */

      case 'p':  case 'r':
	*allows_reg = true;
	break;

      case 'g':  case 'X':
	*allows_reg = true;
	*allows_mem = true;
	break;

      default:
	if (! ISALPHA (constraint[j]))
	  {
531
	    error ("invalid punctuation %qc in constraint", constraint[j]);
532 533
	    return false;
	  }
534 535
	if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
	    != NO_REGS)
536
	  *allows_reg = true;
537 538
#ifdef EXTRA_CONSTRAINT_STR
	else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
539
	  *allows_reg = true;
540
	else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
541
	  *allows_mem = true;
542 543 544 545 546 547 548 549 550 551 552 553
	else
	  {
	    /* Otherwise we can't assume anything about the nature of
	       the constraint except that it isn't purely registers.
	       Treat it like "g" and hope for the best.  */
	    *allows_reg = true;
	    *allows_mem = true;
	  }
#endif
	break;
      }

554
  if (saw_match && !*allows_reg)
555
    warning (0, "matching constraint does not allow a register");
556

557 558 559
  return true;
}

560 561
/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
   can be an asm-declared register.  Called via walk_tree.  */
562

563 564 565
static tree
decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
			      void *data)
566
{
567 568 569
  tree decl = *declp;
  const HARD_REG_SET *regs = data;

570
  if (TREE_CODE (decl) == VAR_DECL)
571
    {
572
      if (DECL_HARD_REGISTER (decl)
573 574 575 576 577 578 579 580 581 582 583 584 585 586
	  && REG_P (DECL_RTL (decl))
	  && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
	{
	  rtx reg = DECL_RTL (decl);
	  unsigned int regno;

	  for (regno = REGNO (reg);
	       regno < (REGNO (reg)
			+ hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
	       regno++)
	    if (TEST_HARD_REG_BIT (*regs, regno))
	      return decl;
	}
      walk_subtrees = 0;
587
    }
588
  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
589 590
    walk_subtrees = 0;
  return NULL_TREE;
591 592
}

593 594 595 596 597 598 599
/* If there is an overlap between *REGS and DECL, return the first overlap
   found.  */
tree
tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
{
  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
}
600 601

/* Check for overlap between registers marked in CLOBBERED_REGS and
602 603
   anything inappropriate in T.  Emit error and return the register
   variable definition for error, NULL_TREE for ok.  */
604 605

static bool
606
tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
607 608 609
{
  /* Conflicts between asm-declared register variables and the clobber
     list are not allowed.  */
610 611 612
  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);

  if (overlap)
613 614
    {
      error ("asm-specifier for variable %qs conflicts with asm clobber list",
615
	     IDENTIFIER_POINTER (DECL_NAME (overlap)));
616 617 618

      /* Reset registerness to stop multiple errors emitted for a single
	 variable.  */
619
      DECL_REGISTER (overlap) = 0;
620
      return true;
621
    }
622

623 624 625
  return false;
}

Richard Kenner committed
626 627 628 629
/* Generate RTL for an asm statement with arguments.
   STRING is the instruction template.
   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
   Each output or input has an expression in the TREE_VALUE and
630
   and a tree list in TREE_PURPOSE which in turn contains a constraint
Kazu Hirata committed
631
   name in TREE_VALUE (or NULL_TREE) and a constraint string
632
   in TREE_PURPOSE.
Richard Kenner committed
633 634 635 636 637 638 639 640 641 642
   CLOBBERS is a list of STRING_CST nodes each naming a hard register
   that is clobbered by this insn.

   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
   Some elements of OUTPUTS may be replaced with trees representing temporary
   values.  The caller should copy those temporary values to the originally
   specified lvalues.

   VOL nonzero means the insn is volatile; don't optimize it.  */

643
static void
644
expand_asm_operands (tree string, tree outputs, tree inputs,
645
		     tree clobbers, int vol, location_t locus)
Richard Kenner committed
646
{
647
  rtvec argvec, constraintvec;
Richard Kenner committed
648 649 650
  rtx body;
  int ninputs = list_length (inputs);
  int noutputs = list_length (outputs);
651
  int ninout;
652
  int nclobbers;
653 654
  HARD_REG_SET clobbered_regs;
  int clobber_conflict_found = 0;
Richard Kenner committed
655
  tree tail;
656
  tree t;
657
  int i;
Richard Kenner committed
658
  /* Vector of RTX's of evaluated output operands.  */
659 660 661
  rtx *output_rtx = alloca (noutputs * sizeof (rtx));
  int *inout_opnum = alloca (noutputs * sizeof (int));
  rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
662
  enum machine_mode *inout_mode
663
    = alloca (noutputs * sizeof (enum machine_mode));
664
  const char **constraints
665
    = alloca ((noutputs + ninputs) * sizeof (const char *));
666
  int old_generating_concat_p = generating_concat_p;
Richard Kenner committed
667

668
  /* An ASM with no outputs needs to be treated as volatile, for now.  */
669 670 671
  if (noutputs == 0)
    vol = 1;

672 673 674
  if (! check_operand_nalternatives (outputs, inputs))
    return;

675 676 677 678 679 680 681 682
  string = resolve_asm_operand_names (string, outputs, inputs);

  /* Collect constraints.  */
  i = 0;
  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
683

684 685
  /* Sometimes we wish to automatically clobber registers across an asm.
     Case in point is when the i386 backend moved from cc0 to a hard reg --
686
     maintaining source-level compatibility means automatically clobbering
687
     the flags register.  */
688
  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
689

690 691 692
  /* Count the number of meaningful clobbered registers, ignoring what
     we would ignore later.  */
  nclobbers = 0;
693
  CLEAR_HARD_REG_SET (clobbered_regs);
694 695
  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
    {
696 697 698 699 700
      const char *regname;

      if (TREE_VALUE (tail) == error_mark_node)
	return;
      regname = TREE_STRING_POINTER (TREE_VALUE (tail));
Richard Kenner committed
701

702 703
      i = decode_reg_name (regname);
      if (i >= 0 || i == -4)
704
	++nclobbers;
705
      else if (i == -2)
706
	error ("unknown register name %qs in %<asm%>", regname);
707 708 709

      /* Mark clobbered registers.  */
      if (i >= 0)
710
        {
711
	  /* Clobbering the PIC register is an error.  */
712
	  if (i == (int) PIC_OFFSET_TABLE_REGNUM)
713
	    {
714
	      error ("PIC register %qs clobbered in %<asm%>", regname);
715 716 717 718 719
	      return;
	    }

	  SET_HARD_REG_BIT (clobbered_regs, i);
	}
720 721
    }

722 723 724 725
  /* First pass over inputs and outputs checks validity and sets
     mark_addressable if needed.  */

  ninout = 0;
Richard Kenner committed
726 727 728
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    {
      tree val = TREE_VALUE (tail);
729
      tree type = TREE_TYPE (val);
730
      const char *constraint;
731 732 733
      bool is_inout;
      bool allows_reg;
      bool allows_mem;
Richard Kenner committed
734 735

      /* If there's an erroneous arg, emit no insn.  */
736
      if (type == error_mark_node)
Richard Kenner committed
737 738
	return;

739 740
      /* Try to parse the output constraint.  If that fails, there's
	 no point in going further.  */
741 742 743 744 745 746 747 748 749
      constraint = constraints[i];
      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
				    &allows_mem, &allows_reg, &is_inout))
	return;

      if (! allows_reg
	  && (allows_mem
	      || is_inout
	      || (DECL_P (val)
750
		  && REG_P (DECL_RTL (val))
751
		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
752
	lang_hooks.mark_addressable (val);
753 754 755 756 757 758 759 760

      if (is_inout)
	ninout++;
    }

  ninputs += ninout;
  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
    {
761
      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777
      return;
    }

  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
    {
      bool allows_reg, allows_mem;
      const char *constraint;

      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
	 would get VOIDmode and that could cause a crash in reload.  */
      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
	return;

      constraint = constraints[i + noutputs];
      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
				    constraints, &allows_mem, &allows_reg))
778
	return;
779

780
      if (! allows_reg && allows_mem)
781
	lang_hooks.mark_addressable (TREE_VALUE (tail));
782 783 784 785 786 787 788 789 790 791 792 793
    }

  /* Second pass evaluates arguments.  */

  ninout = 0;
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    {
      tree val = TREE_VALUE (tail);
      tree type = TREE_TYPE (val);
      bool is_inout;
      bool allows_reg;
      bool allows_mem;
794
      rtx op;
795
      bool ok;
796

797
      ok = parse_output_constraint (&constraints[i], i, ninputs,
798
				    noutputs, &allows_mem, &allows_reg,
799 800
				    &is_inout);
      gcc_assert (ok);
801

802 803 804 805
      /* If an output operand is not a decl or indirect ref and our constraint
	 allows a register, make a temporary to act as an intermediate.
	 Make the asm insn write into that, then our caller will copy it to
	 the real output operand.  Likewise for promoted variables.  */
Richard Kenner committed
806

807 808
      generating_concat_p = 0;

809
      real_output_rtx[i] = NULL_RTX;
810 811
      if ((TREE_CODE (val) == INDIRECT_REF
	   && allows_mem)
812
	  || (DECL_P (val)
813 814
	      && (allows_mem || REG_P (DECL_RTL (val)))
	      && ! (REG_P (DECL_RTL (val))
815
		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
816
	  || ! allows_reg
817
	  || is_inout)
818
	{
819
	  op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
820
	  if (MEM_P (op))
821
	    op = validize_mem (op);
822

823
	  if (! allows_reg && !MEM_P (op))
824
	    error ("output number %d not directly addressable", i);
825
	  if ((! allows_mem && MEM_P (op))
826
	      || GET_CODE (op) == CONCAT)
827
	    {
828
	      real_output_rtx[i] = op;
829
	      op = gen_reg_rtx (GET_MODE (op));
830
	      if (is_inout)
831
		emit_move_insn (op, real_output_rtx[i]);
832
	    }
833
	}
834
      else
835
	{
836 837 838
	  op = assign_temp (type, 0, 0, 1);
	  op = validize_mem (op);
	  TREE_VALUE (tail) = make_tree (type, op);
839
	}
840
      output_rtx[i] = op;
841

842 843
      generating_concat_p = old_generating_concat_p;

844
      if (is_inout)
845
	{
846
	  inout_mode[ninout] = TYPE_MODE (type);
847 848
	  inout_opnum[ninout++] = i;
	}
849

850
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
851
	clobber_conflict_found = 1;
Richard Kenner committed
852 853
    }

854 855
  /* Make vectors for the expression-rtx, constraint strings,
     and named operands.  */
Richard Kenner committed
856 857

  argvec = rtvec_alloc (ninputs);
858
  constraintvec = rtvec_alloc (ninputs);
Richard Kenner committed
859

860 861
  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
				: GET_MODE (output_rtx[0])),
862
			       ggc_strdup (TREE_STRING_POINTER (string)),
863
			       empty_string, 0, argvec, constraintvec,
864
			       locus);
865

866
  MEM_VOLATILE_P (body) = vol;
Richard Kenner committed
867 868 869 870

  /* Eval the inputs and put them into ARGVEC.
     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */

871
  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
Richard Kenner committed
872
    {
873 874 875
      bool allows_reg, allows_mem;
      const char *constraint;
      tree val, type;
876
      rtx op;
877
      bool ok;
Richard Kenner committed
878

879
      constraint = constraints[i + noutputs];
880 881 882
      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
				   constraints, &allows_mem, &allows_reg);
      gcc_assert (ok);
883

884
      generating_concat_p = 0;
885

886 887
      val = TREE_VALUE (tail);
      type = TREE_TYPE (val);
888 889 890
      /* EXPAND_INITIALIZER will not generate code for valid initializer
	 constants, but will still generate code for other types of operand.
	 This is the behavior we want for constant constraints.  */
891
      op = expand_expr (val, NULL_RTX, VOIDmode,
892 893 894
			allows_reg ? EXPAND_NORMAL
			: allows_mem ? EXPAND_MEMORY
			: EXPAND_INITIALIZER);
895

896 897 898
      /* Never pass a CONCAT to an ASM.  */
      if (GET_CODE (op) == CONCAT)
	op = force_reg (GET_MODE (op), op);
899
      else if (MEM_P (op))
900
	op = validize_mem (op);
901

902
      if (asm_operand_ok (op, constraint) <= 0)
903
	{
904
	  if (allows_reg && TYPE_MODE (type) != BLKmode)
905
	    op = force_reg (TYPE_MODE (type), op);
906
	  else if (!allows_mem)
907
	    warning (0, "asm operand %d probably doesn%'t match constraints",
908
		     i + noutputs);
909
	  else if (MEM_P (op))
910
	    {
911 912 913
	      /* We won't recognize either volatile memory or memory
		 with a queued address as available a memory_operand
		 at this point.  Ignore it: clearly this *is* a memory.  */
914
	    }
915
	  else
916
	    {
917
	      warning (0, "use of memory input without lvalue in "
918
		       "asm operand %d is deprecated", i + noutputs);
919 920 921

	      if (CONSTANT_P (op))
		{
922 923 924 925 926
		  rtx mem = force_const_mem (TYPE_MODE (type), op);
		  if (mem)
		    op = validize_mem (mem);
		  else
		    op = force_reg (TYPE_MODE (type), op);
927
		}
928
	      if (REG_P (op)
929 930
		  || GET_CODE (op) == SUBREG
		  || GET_CODE (op) == CONCAT)
931 932 933 934 935 936 937 938 939 940
		{
		  tree qual_type = build_qualified_type (type,
							 (TYPE_QUALS (type)
							  | TYPE_QUAL_CONST));
		  rtx memloc = assign_temp (qual_type, 1, 1, 1);
		  memloc = validize_mem (memloc);
		  emit_move_insn (memloc, op);
		  op = memloc;
		}
	    }
941
	}
942

943
      generating_concat_p = old_generating_concat_p;
944
      ASM_OPERANDS_INPUT (body, i) = op;
945

946
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
947 948
	= gen_rtx_ASM_INPUT (TYPE_MODE (type), 
			     ggc_strdup (constraints[i + noutputs]));
949

950
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
951
	clobber_conflict_found = 1;
Richard Kenner committed
952 953
    }

Richard Kenner committed
954 955
  /* Protect all the operands from the queue now that they have all been
     evaluated.  */
Richard Kenner committed
956

957 958
  generating_concat_p = 0;

Kazu Hirata committed
959
  /* For in-out operands, copy output rtx to input rtx.  */
960 961 962
  for (i = 0; i < ninout; i++)
    {
      int j = inout_opnum[i];
963
      char buffer[16];
964

965
      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
966
	= output_rtx[j];
967 968

      sprintf (buffer, "%d", j);
969
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
970
	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
971 972
    }

973 974
  generating_concat_p = old_generating_concat_p;

Richard Kenner committed
975
  /* Now, for each output, construct an rtx
976 977
     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
			       ARGVEC CONSTRAINTS OPNAMES))
Richard Kenner committed
978 979 980 981
     If there is more than one, put them inside a PARALLEL.  */

  if (noutputs == 1 && nclobbers == 0)
    {
982
      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
983
      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
Richard Kenner committed
984
    }
Richard Kenner committed
985

Richard Kenner committed
986 987 988
  else if (noutputs == 0 && nclobbers == 0)
    {
      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
989
      emit_insn (body);
Richard Kenner committed
990
    }
Richard Kenner committed
991

Richard Kenner committed
992 993 994 995
  else
    {
      rtx obody = body;
      int num = noutputs;
Richard Kenner committed
996 997 998 999

      if (num == 0)
	num = 1;

1000
      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
Richard Kenner committed
1001 1002 1003 1004 1005

      /* For each output operand, store a SET.  */
      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
	{
	  XVECEXP (body, 0, i)
1006 1007
	    = gen_rtx_SET (VOIDmode,
			   output_rtx[i],
Jeff Law committed
1008
			   gen_rtx_ASM_OPERANDS
1009
			   (GET_MODE (output_rtx[i]),
1010 1011 1012
			    ggc_strdup (TREE_STRING_POINTER (string)),
			    ggc_strdup (constraints[i]),
			    i, argvec, constraintvec, locus));
Jeff Law committed
1013

Richard Kenner committed
1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
	}

      /* If there are no outputs (but there are some clobbers)
	 store the bare ASM_OPERANDS into the PARALLEL.  */

      if (i == 0)
	XVECEXP (body, 0, i++) = obody;

      /* Store (clobber REG) for each clobbered register specified.  */

1025
      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
Richard Kenner committed
1026
	{
1027
	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1028
	  int j = decode_reg_name (regname);
1029
	  rtx clobbered_reg;
Richard Kenner committed
1030

1031
	  if (j < 0)
Richard Kenner committed
1032
	    {
1033
	      if (j == -3)	/* `cc', which is not a register */
1034 1035
		continue;

1036 1037
	      if (j == -4)	/* `memory', don't cache memory across asm */
		{
1038
		  XVECEXP (body, 0, i++)
1039
		    = gen_rtx_CLOBBER (VOIDmode,
Jeff Law committed
1040 1041 1042
				       gen_rtx_MEM
				       (BLKmode,
					gen_rtx_SCRATCH (VOIDmode)));
1043 1044 1045
		  continue;
		}

Jeff Law committed
1046
	      /* Ignore unknown register, error already signaled.  */
1047
	      continue;
Richard Kenner committed
1048 1049 1050
	    }

	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071
	  clobbered_reg = gen_rtx_REG (QImode, j);

	  /* Do sanity check for overlap between clobbers and respectively
	     input and outputs that hasn't been handled.  Such overlap
	     should have been detected and reported above.  */
	  if (!clobber_conflict_found)
	    {
	      int opno;

	      /* We test the old body (obody) contents to avoid tripping
		 over the under-construction body.  */
	      for (opno = 0; opno < noutputs; opno++)
		if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
		  internal_error ("asm clobber conflict with output operand");

	      for (opno = 0; opno < ninputs - ninout; opno++)
		if (reg_overlap_mentioned_p (clobbered_reg,
					     ASM_OPERANDS_INPUT (obody, opno)))
		  internal_error ("asm clobber conflict with input operand");
	    }

1072
	  XVECEXP (body, 0, i++)
1073
	    = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
Richard Kenner committed
1074 1075
	}

1076
      emit_insn (body);
Richard Kenner committed
1077 1078
    }

1079 1080 1081 1082 1083 1084
  /* For any outputs that needed reloading into registers, spill them
     back to where they belong.  */
  for (i = 0; i < noutputs; ++i)
    if (real_output_rtx[i])
      emit_move_insn (real_output_rtx[i], output_rtx[i]);

Richard Kenner committed
1085 1086
  free_temp_slots ();
}
1087

1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120
void
expand_asm_expr (tree exp)
{
  int noutputs, i;
  tree outputs, tail;
  tree *o;

  if (ASM_INPUT_P (exp))
    {
      expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
      return;
    }

  outputs = ASM_OUTPUTS (exp);
  noutputs = list_length (outputs);
  /* o[I] is the place that output number I should be written.  */
  o = (tree *) alloca (noutputs * sizeof (tree));

  /* Record the contents of OUTPUTS before it is modified.  */
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    o[i] = TREE_VALUE (tail);

  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
     OUTPUTS some trees for where the values were actually stored.  */
  expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
		       ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
		       input_location);

  /* Copy all the intermediate outputs into the specified outputs.  */
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    {
      if (o[i] != TREE_VALUE (tail))
	{
1121
	  expand_assignment (o[i], TREE_VALUE (tail));
1122 1123 1124 1125 1126 1127 1128 1129 1130
	  free_temp_slots ();

	  /* Restore the original value so that it's correct the next
	     time we expand this function.  */
	  TREE_VALUE (tail) = o[i];
	}
    }
}

1131 1132 1133 1134
/* A subroutine of expand_asm_operands.  Check that all operands have
   the same number of alternatives.  Return true if so.  */

static bool
1135
check_operand_nalternatives (tree outputs, tree inputs)
1136 1137 1138 1139 1140 1141 1142 1143 1144 1145
{
  if (outputs || inputs)
    {
      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
      int nalternatives
	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
      tree next = inputs;

      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
	{
1146
	  error ("too many alternatives in %<asm%>");
1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157
	  return false;
	}

      tmp = outputs;
      while (tmp)
	{
	  const char *constraint
	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));

	  if (n_occurrences (',', constraint) != nalternatives)
	    {
1158 1159
	      error ("operand constraints for %<asm%> differ "
		     "in number of alternatives");
1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178
	      return false;
	    }

	  if (TREE_CHAIN (tmp))
	    tmp = TREE_CHAIN (tmp);
	  else
	    tmp = next, next = 0;
	}
    }

  return true;
}

/* A subroutine of expand_asm_operands.  Check that all operand names
   are unique.  Return true if so.  We rely on the fact that these names
   are identifiers, and so have been canonicalized by get_identifier,
   so all we need are pointer comparisons.  */

static bool
1179
check_unique_operand_names (tree outputs, tree inputs)
1180 1181 1182 1183 1184 1185 1186 1187 1188 1189
{
  tree i, j;

  for (i = outputs; i ; i = TREE_CHAIN (i))
    {
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
      if (! i_name)
	continue;

      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1190
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1191 1192 1193 1194 1195 1196 1197 1198 1199 1200
	  goto failure;
    }

  for (i = inputs; i ; i = TREE_CHAIN (i))
    {
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
      if (! i_name)
	continue;

      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1201
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1202 1203
	  goto failure;
      for (j = outputs; j ; j = TREE_CHAIN (j))
1204
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1205 1206 1207 1208 1209 1210
	  goto failure;
    }

  return true;

 failure:
1211
  error ("duplicate asm operand name %qs",
1212
	 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1213 1214 1215 1216 1217 1218 1219
  return false;
}

/* A subroutine of expand_asm_operands.  Resolve the names of the operands
   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
   STRING and in the constraints to those numbers.  */

1220 1221
tree
resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1222
{
1223
  char *buffer;
1224
  char *p;
1225
  const char *c;
1226 1227
  tree t;

1228 1229
  check_unique_operand_names (outputs, inputs);

1230 1231 1232 1233
  /* Substitute [<name>] in input constraint strings.  There should be no
     named operands in output constraints.  */
  for (t = inputs; t ; t = TREE_CHAIN (t))
    {
1234
      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245
      if (strchr (c, '[') != NULL)
	{
	  p = buffer = xstrdup (c);
	  while ((p = strchr (p, '[')) != NULL)
	    p = resolve_operand_name_1 (p, outputs, inputs);
	  TREE_VALUE (TREE_PURPOSE (t))
	    = build_string (strlen (buffer), buffer);
	  free (buffer);
	}
    }

1246 1247 1248
  /* Now check for any needed substitutions in the template.  */
  c = TREE_STRING_POINTER (string);
  while ((c = strchr (c, '%')) != NULL)
1249
    {
1250 1251 1252 1253
      if (c[1] == '[')
	break;
      else if (ISALPHA (c[1]) && c[2] == '[')
	break;
1254 1255
      else
	{
1256
	  c += 1;
1257 1258
	  continue;
	}
1259 1260
    }

1261 1262 1263 1264 1265 1266 1267 1268
  if (c)
    {
      /* OK, we need to make a copy so we can perform the substitutions.
	 Assume that we will not need extra space--we get to remove '['
	 and ']', which means we cannot have a problem until we have more
	 than 999 operands.  */
      buffer = xstrdup (TREE_STRING_POINTER (string));
      p = buffer + (c - TREE_STRING_POINTER (string));
1269

1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287
      while ((p = strchr (p, '%')) != NULL)
	{
	  if (p[1] == '[')
	    p += 1;
	  else if (ISALPHA (p[1]) && p[2] == '[')
	    p += 2;
	  else
	    {
	      p += 1;
	      continue;
	    }

	  p = resolve_operand_name_1 (p, outputs, inputs);
	}

      string = build_string (strlen (buffer), buffer);
      free (buffer);
    }
1288 1289 1290 1291 1292 1293

  return string;
}

/* A subroutine of resolve_operand_names.  P points to the '[' for a
   potential named operand of the form [<name>].  In place, replace
Kazu Hirata committed
1294
   the name and brackets with a number.  Return a pointer to the
1295 1296 1297
   balance of the string after substitution.  */

static char *
1298
resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316
{
  char *q;
  int op;
  tree t;
  size_t len;

  /* Collect the operand name.  */
  q = strchr (p, ']');
  if (!q)
    {
      error ("missing close brace for named operand");
      return strchr (p, '\0');
    }
  len = q - p - 1;

  /* Resolve the name to a number.  */
  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
    {
1317 1318
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
      if (name)
1319
	{
1320
	  const char *c = TREE_STRING_POINTER (name);
1321 1322 1323
	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
	    goto found;
	}
1324 1325 1326
    }
  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
    {
1327 1328
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
      if (name)
1329
	{
1330
	  const char *c = TREE_STRING_POINTER (name);
1331 1332 1333
	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
	    goto found;
	}
1334 1335 1336
    }

  *q = '\0';
1337
  error ("undefined named operand %qs", p + 1);
1338 1339 1340 1341 1342 1343 1344 1345 1346 1347
  op = 0;
 found:

  /* Replace the name with the number.  Unfortunately, not all libraries
     get the return value of sprintf correct, so search for the end of the
     generated string by hand.  */
  sprintf (p, "%d", op);
  p = strchr (p, '\0');

  /* Verify the no extra buffer space assumption.  */
1348
  gcc_assert (p <= q);
1349 1350 1351 1352 1353 1354

  /* Shift the rest of the buffer down to fill the gap.  */
  memmove (p, q + 1, strlen (q + 1) + 1);

  return p;
}
Richard Kenner committed
1355

1356
/* Generate RTL to evaluate the expression EXP.  */
Richard Kenner committed
1357 1358

void
1359
expand_expr_stmt (tree exp)
Richard Kenner committed
1360
{
1361 1362
  rtx value;
  tree type;
1363

1364
  value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1365 1366 1367
  if (GIMPLE_TUPLE_P (exp))
    type = void_type_node;
  else
1368
  type = TREE_TYPE (exp);
Richard Kenner committed
1369 1370 1371

  /* If all we do is reference a volatile value in memory,
     copy it to a register to be sure it is actually touched.  */
1372
  if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
Richard Kenner committed
1373
    {
1374
      if (TYPE_MODE (type) == VOIDmode)
1375
	;
1376 1377
      else if (TYPE_MODE (type) != BLKmode)
	value = copy_to_reg (value);
Richard Kenner committed
1378
      else
1379 1380
	{
	  rtx lab = gen_label_rtx ();
Kazu Hirata committed
1381

1382
	  /* Compare the value with itself to reference it.  */
1383
	  emit_cmp_and_jump_insns (value, value, EQ,
1384
				   expand_normal (TYPE_SIZE (type)),
1385
				   BLKmode, 0, lab);
1386 1387
	  emit_label (lab);
	}
Richard Kenner committed
1388 1389
    }

1390
  /* Free any temporaries used to evaluate this expression.  */
Richard Kenner committed
1391 1392 1393 1394
  free_temp_slots ();
}

/* Warn if EXP contains any computations whose results are not used.
1395
   Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1396
   (potential) location of the expression.  */
Richard Kenner committed
1397

1398
int
1399
warn_if_unused_value (tree exp, location_t locus)
Richard Kenner committed
1400
{
1401
 restart:
1402
  if (TREE_USED (exp) || TREE_NO_WARNING (exp))
Richard Kenner committed
1403 1404
    return 0;

1405 1406 1407 1408 1409 1410
  /* Don't warn about void constructs.  This includes casting to void,
     void function calls, and statement expressions with a final cast
     to void.  */
  if (VOID_TYPE_P (TREE_TYPE (exp)))
    return 0;

1411 1412
  if (EXPR_HAS_LOCATION (exp))
    locus = EXPR_LOCATION (exp);
1413

Richard Kenner committed
1414 1415 1416 1417 1418 1419 1420
  switch (TREE_CODE (exp))
    {
    case PREINCREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case PREDECREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case MODIFY_EXPR:
1421
    case GIMPLE_MODIFY_STMT:
Richard Kenner committed
1422 1423 1424
    case INIT_EXPR:
    case TARGET_EXPR:
    case CALL_EXPR:
1425
    case TRY_CATCH_EXPR:
Richard Kenner committed
1426 1427 1428 1429 1430 1431
    case WITH_CLEANUP_EXPR:
    case EXIT_EXPR:
      return 0;

    case BIND_EXPR:
      /* For a binding, warn if no side effect within it.  */
1432 1433
      exp = BIND_EXPR_BODY (exp);
      goto restart;
Richard Kenner committed
1434

1435
    case SAVE_EXPR:
1436 1437
      exp = TREE_OPERAND (exp, 0);
      goto restart;
1438

Richard Kenner committed
1439 1440 1441
    case TRUTH_ORIF_EXPR:
    case TRUTH_ANDIF_EXPR:
      /* In && or ||, warn if 2nd operand has no side effect.  */
1442 1443
      exp = TREE_OPERAND (exp, 1);
      goto restart;
Richard Kenner committed
1444 1445

    case COMPOUND_EXPR:
1446
      if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
Richard Kenner committed
1447
	return 1;
1448 1449 1450
      /* Let people do `(foo (), 0)' without a warning.  */
      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
	return 0;
1451 1452
      exp = TREE_OPERAND (exp, 1);
      goto restart;
Richard Kenner committed
1453

1454 1455 1456 1457
    case COND_EXPR:
      /* If this is an expression with side effects, don't warn; this
	 case commonly appears in macro expansions.  */
      if (TREE_SIDE_EFFECTS (exp))
Richard Kenner committed
1458
	return 0;
1459
      goto warn;
Richard Kenner committed
1460

1461 1462 1463 1464
    case INDIRECT_REF:
      /* Don't warn about automatic dereferencing of references, since
	 the user cannot control it.  */
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1465 1466 1467 1468
	{
	  exp = TREE_OPERAND (exp, 0);
	  goto restart;
	}
Kazu Hirata committed
1469 1470
      /* Fall through.  */

Richard Kenner committed
1471
    default:
1472
      /* Referencing a volatile value is a side effect, so don't warn.  */
1473
      if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1474 1475
	  && TREE_THIS_VOLATILE (exp))
	return 0;
1476 1477 1478 1479

      /* If this is an expression which has no operands, there is no value
	 to be unused.  There are no such language-independent codes,
	 but front ends may define such.  */
1480
      if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1481 1482
	return 0;

1483
    warn:
1484
      warning (0, "%Hvalue computed is not used", &locus);
Richard Kenner committed
1485 1486 1487 1488 1489 1490 1491 1492 1493
      return 1;
    }
}


/* Generate RTL to return from the current function, with no value.
   (That is, we do not do anything about returning any value.)  */

void
1494
expand_null_return (void)
Richard Kenner committed
1495
{
Kazu Hirata committed
1496
  /* If this function was declared to return a value, but we
1497
     didn't, clobber the return registers so that they are not
1498
     propagated live to the rest of the function.  */
1499
  clobber_return_register ();
Richard Kenner committed
1500

1501
  expand_null_return_1 ();
Richard Kenner committed
1502 1503
}

1504 1505 1506 1507 1508 1509
/* Generate RTL to return directly from the current function.
   (That is, we bypass any return value.)  */

void
expand_naked_return (void)
{
1510
  rtx end_label;
1511 1512 1513 1514

  clear_pending_stack_adjust ();
  do_pending_stack_adjust ();

1515
  end_label = naked_return_label;
1516 1517
  if (end_label == 0)
    end_label = naked_return_label = gen_label_rtx ();
1518 1519

  emit_jump (end_label);
1520 1521
}

Richard Kenner committed
1522 1523
/* Generate RTL to return from the current function, with value VAL.  */

1524
static void
1525
expand_value_return (rtx val)
Richard Kenner committed
1526 1527 1528 1529
{
  /* Copy the value to the return location
     unless it's already there.  */

1530
  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
Richard Kenner committed
1531
  if (return_reg != val)
1532 1533
    {
      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
DJ Delorie committed
1534 1535
      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
      {
1536
	int unsignedp = TYPE_UNSIGNED (type);
DJ Delorie committed
1537 1538 1539 1540 1541 1542 1543 1544
	enum machine_mode old_mode
	  = DECL_MODE (DECL_RESULT (current_function_decl));
	enum machine_mode mode
	  = promote_mode (type, old_mode, &unsignedp, 1);

	if (mode != old_mode)
	  val = convert_modes (mode, old_mode, val, unsignedp);
      }
Richard Kenner committed
1545
      if (GET_CODE (return_reg) == PARALLEL)
1546
	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
Richard Kenner committed
1547
      else
1548 1549
	emit_move_insn (return_reg, val);
    }
Richard Kenner committed
1550

1551
  expand_null_return_1 ();
Richard Kenner committed
1552 1553
}

1554
/* Output a return with no value.  */
Richard Kenner committed
1555 1556

static void
1557
expand_null_return_1 (void)
Richard Kenner committed
1558 1559 1560
{
  clear_pending_stack_adjust ();
  do_pending_stack_adjust ();
1561
  emit_jump (return_label);
Richard Kenner committed
1562 1563 1564 1565 1566 1567
}

/* Generate RTL to evaluate the expression RETVAL and return it
   from the current function.  */

void
1568
expand_return (tree retval)
Richard Kenner committed
1569
{
1570
  rtx result_rtl;
1571
  rtx val = 0;
Richard Kenner committed
1572 1573 1574 1575 1576
  tree retval_rhs;

  /* If function wants no value, give it none.  */
  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
    {
1577
      expand_normal (retval);
Richard Kenner committed
1578 1579 1580 1581
      expand_null_return ();
      return;
    }

1582
  if (retval == error_mark_node)
1583 1584 1585 1586
    {
      /* Treat this like a return of no value from a function that
	 returns a value.  */
      expand_null_return ();
Kazu Hirata committed
1587
      return;
1588
    }
1589
  else if ((TREE_CODE (retval) == GIMPLE_MODIFY_STMT
1590
	    || TREE_CODE (retval) == INIT_EXPR)
1591 1592
	   && TREE_CODE (GENERIC_TREE_OPERAND (retval, 0)) == RESULT_DECL)
    retval_rhs = GENERIC_TREE_OPERAND (retval, 1);
Richard Kenner committed
1593
  else
1594
    retval_rhs = retval;
Richard Kenner committed
1595

1596 1597
  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));

1598 1599 1600 1601 1602
  /* If we are returning the RESULT_DECL, then the value has already
     been stored into it, so we don't have to do anything special.  */
  if (TREE_CODE (retval_rhs) == RESULT_DECL)
    expand_value_return (result_rtl);

1603 1604 1605 1606 1607
  /* If the result is an aggregate that is being returned in one (or more)
     registers, load the registers here.  The compiler currently can't handle
     copying a BLKmode value into registers.  We could put this code in a
     more general area (for use by everyone instead of just function
     call/return), but until this feature is generally usable it is kept here
1608
     (and in expand_call).  */
1609

1610
  else if (retval_rhs != 0
1611
	   && TYPE_MODE (GENERIC_TREE_TYPE (retval_rhs)) == BLKmode
1612
	   && REG_P (result_rtl))
1613
    {
1614 1615
      int i;
      unsigned HOST_WIDE_INT bitpos, xbitpos;
1616
      unsigned HOST_WIDE_INT padding_correction = 0;
1617 1618
      unsigned HOST_WIDE_INT bytes
	= int_size_in_bytes (TREE_TYPE (retval_rhs));
1619
      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1620 1621
      unsigned int bitsize
	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1622
      rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1623
      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1624
      rtx result_val = expand_normal (retval_rhs);
1625
      enum machine_mode tmpmode, result_reg_mode;
1626

1627 1628 1629 1630 1631 1632
      if (bytes == 0)
	{
	  expand_null_return ();
	  return;
	}

1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648
      /* If the structure doesn't take up a whole number of words, see
	 whether the register value should be padded on the left or on
	 the right.  Set PADDING_CORRECTION to the number of padding
	 bits needed on the left side.

	 In most ABIs, the structure will be returned at the least end of
	 the register, which translates to right padding on little-endian
	 targets and left padding on big-endian targets.  The opposite
	 holds if the structure is returned at the most significant
	 end of the register.  */
      if (bytes % UNITS_PER_WORD != 0
	  && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
	      ? !BYTES_BIG_ENDIAN
	      : BYTES_BIG_ENDIAN))
	padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
					       * BITS_PER_UNIT));
1649

Kazu Hirata committed
1650
      /* Copy the structure BITSIZE bits at a time.  */
1651
      for (bitpos = 0, xbitpos = padding_correction;
1652 1653
	   bitpos < bytes * BITS_PER_UNIT;
	   bitpos += bitsize, xbitpos += bitsize)
1654
	{
1655
	  /* We need a new destination pseudo each time xbitpos is
1656
	     on a word boundary and when xbitpos == padding_correction
1657 1658
	     (the first time through).  */
	  if (xbitpos % BITS_PER_WORD == 0
1659
	      || xbitpos == padding_correction)
1660
	    {
1661 1662 1663 1664
	      /* Generate an appropriate register.  */
	      dst = gen_reg_rtx (word_mode);
	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;

1665 1666
	      /* Clear the destination before we move anything into it.  */
	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1667
	    }
1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680

	  /* We need a new source operand each time bitpos is on a word
	     boundary.  */
	  if (bitpos % BITS_PER_WORD == 0)
	    src = operand_subword_force (result_val,
					 bitpos / BITS_PER_WORD,
					 BLKmode);

	  /* Use bitpos for the source extraction (left justified) and
	     xbitpos for the destination store (right justified).  */
	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
			   extract_bit_field (src, bitsize,
					      bitpos % BITS_PER_WORD, 1,
1681
					      NULL_RTX, word_mode, word_mode));
1682 1683
	}

1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695
      tmpmode = GET_MODE (result_rtl);
      if (tmpmode == BLKmode)
	{
	  /* Find the smallest integer mode large enough to hold the
	     entire structure and use that mode instead of BLKmode
	     on the USE insn for the return register.  */
	  for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
	       tmpmode != VOIDmode;
	       tmpmode = GET_MODE_WIDER_MODE (tmpmode))
	    /* Have we found a large enough mode?  */
	    if (GET_MODE_SIZE (tmpmode) >= bytes)
	      break;
1696

1697 1698
	  /* A suitable mode should have been found.  */
	  gcc_assert (tmpmode != VOIDmode);
1699

1700 1701
	  PUT_MODE (result_rtl, tmpmode);
	}
1702

1703 1704 1705 1706 1707 1708
      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
	result_reg_mode = word_mode;
      else
	result_reg_mode = tmpmode;
      result_reg = gen_reg_rtx (result_reg_mode);

1709
      for (i = 0; i < n_regs; i++)
1710
	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1711
			result_pseudos[i]);
1712

1713 1714 1715
      if (tmpmode != result_reg_mode)
	result_reg = gen_lowpart (tmpmode, result_reg);

1716 1717
      expand_value_return (result_reg);
    }
1718 1719
  else if (retval_rhs != 0
	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1720
	   && (REG_P (result_rtl)
1721
	       || (GET_CODE (result_rtl) == PARALLEL)))
Richard Kenner committed
1722
    {
Richard Kenner committed
1723 1724
      /* Calculate the return value into a temporary (usually a pseudo
         reg).  */
1725 1726 1727 1728
      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);

      val = assign_temp (nt, 0, 0, 1);
Jason Merrill committed
1729 1730
      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
      val = force_not_mem (val);
1731
      /* Return the calculated value.  */
1732
      expand_value_return (val);
Richard Kenner committed
1733 1734 1735
    }
  else
    {
1736
      /* No hard reg used; calculate value into hard return reg.  */
1737
      expand_expr (retval, const0_rtx, VOIDmode, 0);
Richard Kenner committed
1738
      expand_value_return (result_rtl);
Richard Kenner committed
1739 1740 1741
    }
}

1742
/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1743 1744 1745 1746 1747 1748 1749
   in question represents the outermost pair of curly braces (i.e. the "body
   block") of a function or method.

   For any BLOCK node representing a "body block" of a function or method, the
   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
   represents the outermost (function) scope for the function or method (i.e.
   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
Kazu Hirata committed
1750
   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
1751 1752

int
1753
is_body_block (tree stmt)
1754
{
1755 1756 1757
  if (lang_hooks.no_body_blocks)
    return 0;

1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773
  if (TREE_CODE (stmt) == BLOCK)
    {
      tree parent = BLOCK_SUPERCONTEXT (stmt);

      if (parent && TREE_CODE (parent) == BLOCK)
	{
	  tree grandparent = BLOCK_SUPERCONTEXT (parent);

	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
	    return 1;
	}
    }

  return 0;
}

1774 1775 1776
/* Emit code to restore vital registers at the beginning of a nonlocal goto
   handler.  */
static void
1777
expand_nl_goto_receiver (void)
1778
{
1779
  /* Clobber the FP when we get here, so we have to make sure it's
1780 1781 1782 1783 1784 1785 1786
     marked as used by this function.  */
  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));

  /* Mark the static chain as clobbered here so life information
     doesn't get messed up for it.  */
  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));

1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811
#ifdef HAVE_nonlocal_goto
  if (! HAVE_nonlocal_goto)
#endif
    /* First adjust our frame pointer to its actual value.  It was
       previously set to the start of the virtual area corresponding to
       the stacked variables when we branched here and now needs to be
       adjusted to the actual hardware fp value.

       Assignments are to virtual registers are converted by
       instantiate_virtual_regs into the corresponding assignment
       to the underlying register (fp in this case) that makes
       the original assignment true.
       So the following insn will actually be
       decrementing fp by STARTING_FRAME_OFFSET.  */
    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);

#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
  if (fixed_regs[ARG_POINTER_REGNUM])
    {
#ifdef ELIMINABLE_REGS
      /* If the argument pointer can be eliminated in favor of the
	 frame pointer, we don't need to restore it.  We assume here
	 that if such an elimination is present, it can always be used.
	 This is the case on all known machines; if we don't make this
	 assumption, we do unnecessary saving on many machines.  */
1812
      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1813 1814
      size_t i;

1815
      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1816 1817 1818 1819
	if (elim_regs[i].from == ARG_POINTER_REGNUM
	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
	  break;

1820
      if (i == ARRAY_SIZE (elim_regs))
1821 1822 1823
#endif
	{
	  /* Now restore our arg pointer from the address at which it
1824
	     was saved in our stack frame.  */
1825
	  emit_move_insn (virtual_incoming_args_rtx,
1826
			  copy_to_reg (get_arg_pointer_save_area (cfun)));
1827 1828 1829 1830 1831 1832 1833 1834
	}
    }
#endif

#ifdef HAVE_nonlocal_goto_receiver
  if (HAVE_nonlocal_goto_receiver)
    emit_insn (gen_nonlocal_goto_receiver ());
#endif
1835 1836 1837 1838 1839 1840 1841

  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
     insn, but we must not allow the code we just generated to be reordered
     by scheduling.  Specifically, the update of the frame pointer must
     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
     insn.  */
  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1842
}
Richard Kenner committed
1843 1844

/* Generate RTL for the automatic variable declaration DECL.
1845
   (Other kinds of declarations are simply ignored if seen here.)  */
Richard Kenner committed
1846 1847

void
1848
expand_decl (tree decl)
Richard Kenner committed
1849
{
Jan Brittenson committed
1850 1851 1852
  tree type;

  type = TREE_TYPE (decl);
Richard Kenner committed
1853

1854 1855 1856 1857 1858 1859 1860 1861 1862 1863
  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
     type in case this node is used in a reference.  */
  if (TREE_CODE (decl) == CONST_DECL)
    {
      DECL_MODE (decl) = TYPE_MODE (type);
      DECL_ALIGN (decl) = TYPE_ALIGN (type);
      DECL_SIZE (decl) = TYPE_SIZE (type);
      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
      return;
    }
Richard Kenner committed
1864

1865 1866 1867 1868
  /* Otherwise, only automatic variables need any expansion done.  Static and
     external variables, and external functions, will be handled by
     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
     nothing.  PARM_DECLs are handled in `assign_parms'.  */
Richard Kenner committed
1869 1870
  if (TREE_CODE (decl) != VAR_DECL)
    return;
1871

Richard Stallman committed
1872
  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
Richard Kenner committed
1873 1874 1875 1876 1877
    return;

  /* Create the RTL representation for the variable.  */

  if (type == error_mark_node)
1878
    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1879

Richard Kenner committed
1880 1881 1882
  else if (DECL_SIZE (decl) == 0)
    /* Variable with incomplete type.  */
    {
Jan Hubicka committed
1883
      rtx x;
Richard Kenner committed
1884 1885
      if (DECL_INITIAL (decl) == 0)
	/* Error message was already done; now avoid a crash.  */
Jan Hubicka committed
1886
	x = gen_rtx_MEM (BLKmode, const0_rtx);
Richard Kenner committed
1887 1888 1889
      else
	/* An initializer is going to decide the size of this array.
	   Until we know the size, represent its address with a reg.  */
Jan Hubicka committed
1890
	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1891

Jan Hubicka committed
1892 1893
      set_mem_attributes (x, decl, 1);
      SET_DECL_RTL (decl, x);
Richard Kenner committed
1894
    }
1895
  else if (use_register_for_decl (decl))
Richard Kenner committed
1896 1897
    {
      /* Automatic variable that can go in a register.  */
1898
      int unsignedp = TYPE_UNSIGNED (type);
1899 1900
      enum machine_mode reg_mode
	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1901

1902
      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1903

1904
      /* Note if the object is a user variable.  */
1905
      if (!DECL_ARTIFICIAL (decl))
1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918
	{
	  mark_user_reg (DECL_RTL (decl));

	  /* Trust user variables which have a pointer type to really
	     be pointers.  Do not trust compiler generated temporaries
	     as our type system is totally busted as it relates to
	     pointer arithmetic which translates into lots of compiler
	     generated objects with pointer types, but which are not really
	     pointers.  */
	  if (POINTER_TYPE_P (type))
	    mark_reg_pointer (DECL_RTL (decl),
			      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
	}
Richard Kenner committed
1919
    }
1920

1921
  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1922
	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1923 1924
		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
					  STACK_CHECK_MAX_VAR_SIZE)))
Richard Kenner committed
1925 1926 1927 1928
    {
      /* Variable of fixed size that goes on the stack.  */
      rtx oldaddr = 0;
      rtx addr;
1929
      rtx x;
Richard Kenner committed
1930 1931 1932 1933 1934

      /* If we previously made RTL for this decl, it must be an array
	 whose size was determined by the initializer.
	 The old address was a register; set that register now
	 to the proper address.  */
1935
      if (DECL_RTL_SET_P (decl))
Richard Kenner committed
1936
	{
1937 1938
	  gcc_assert (MEM_P (DECL_RTL (decl)));
	  gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
Richard Kenner committed
1939 1940 1941 1942 1943 1944
	  oldaddr = XEXP (DECL_RTL (decl), 0);
	}

      /* Set alignment we actually gave this decl.  */
      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
1945
      DECL_USER_ALIGN (decl) = 0;
Richard Kenner committed
1946

1947
      x = assign_temp (decl, 1, 1, 1);
1948 1949 1950
      set_mem_attributes (x, decl, 1);
      SET_DECL_RTL (decl, x);

Richard Kenner committed
1951 1952 1953 1954 1955 1956 1957 1958 1959 1960
      if (oldaddr)
	{
	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
	  if (addr != oldaddr)
	    emit_move_insn (oldaddr, addr);
	}
    }
  else
    /* Dynamic-size object: must push space on the stack.  */
    {
Jan Hubicka committed
1961
      rtx address, size, x;
Richard Kenner committed
1962 1963 1964

      /* Record the stack pointer on entry to block, if have
	 not already done so.  */
1965
      do_pending_stack_adjust ();
Richard Kenner committed
1966

1967 1968
      /* Compute the variable's size, in bytes.  This will expand any
	 needed SAVE_EXPRs for the first time.  */
1969
      size = expand_normal (DECL_SIZE_UNIT (decl));
Richard Kenner committed
1970 1971
      free_temp_slots ();

1972
      /* Allocate space on the stack for the variable.  Note that
Kazu Hirata committed
1973
	 DECL_ALIGN says how the variable is to be aligned and we
1974 1975
	 cannot use it to conclude anything about the alignment of
	 the size.  */
1976
      address = allocate_dynamic_stack_space (size, NULL_RTX,
1977
					      TYPE_ALIGN (TREE_TYPE (decl)));
Richard Kenner committed
1978 1979

      /* Reference the variable indirect through that rtx.  */
Jan Hubicka committed
1980 1981 1982
      x = gen_rtx_MEM (DECL_MODE (decl), address);
      set_mem_attributes (x, decl, 1);
      SET_DECL_RTL (decl, x);
Richard Kenner committed
1983

1984

Richard Kenner committed
1985 1986 1987 1988 1989 1990
      /* Indicate the alignment we actually gave this variable.  */
#ifdef STACK_BOUNDARY
      DECL_ALIGN (decl) = STACK_BOUNDARY;
#else
      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
#endif
1991
      DECL_USER_ALIGN (decl) = 0;
Richard Kenner committed
1992 1993 1994
    }
}

1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014
/* Emit code to save the current value of stack.  */
rtx
expand_stack_save (void)
{
  rtx ret = NULL_RTX;

  do_pending_stack_adjust ();
  emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
  return ret;
}

/* Emit code to restore the current value of stack.  */
void
expand_stack_restore (tree var)
{
  rtx sa = DECL_RTL (var);

  emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
}

Richard Kenner committed
2015 2016 2017 2018 2019
/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
   DECL_ELTS is the list of elements that belong to DECL's type.
   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */

void
2020 2021
expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
			tree decl_elts)
Richard Kenner committed
2022 2023
{
  rtx x;
2024
  tree t;
Richard Kenner committed
2025

2026 2027 2028 2029 2030 2031 2032
  /* If any of the elements are addressable, so is the entire union.  */
  for (t = decl_elts; t; t = TREE_CHAIN (t))
    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
      {
	TREE_ADDRESSABLE (decl) = 1;
	break;
      }
Kazu Hirata committed
2033

2034
  expand_decl (decl);
Richard Kenner committed
2035 2036
  x = DECL_RTL (decl);

2037 2038
  /* Go through the elements, assigning RTL to each.  */
  for (t = decl_elts; t; t = TREE_CHAIN (t))
Richard Kenner committed
2039
    {
2040
      tree decl_elt = TREE_VALUE (t);
Richard Kenner committed
2041
      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2042
      rtx decl_rtl;
Richard Kenner committed
2043

2044 2045 2046 2047 2048
      /* If any of the elements are addressable, so is the entire
	 union.  */
      if (TREE_USED (decl_elt))
	TREE_USED (decl) = 1;

2049 2050
      /* Propagate the union's alignment to the elements.  */
      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2051
      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2052 2053 2054 2055 2056 2057

      /* If the element has BLKmode and the union doesn't, the union is
         aligned such that the element doesn't need to have BLKmode, so
         change the element's mode to the appropriate one for its size.  */
      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
	DECL_MODE (decl_elt) = mode
2058
	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2059

2060 2061 2062 2063 2064 2065 2066
      if (mode == GET_MODE (x))
	decl_rtl = x;
      else if (MEM_P (x))
        /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
           instead create a new MEM rtx with the proper mode.  */
	decl_rtl = adjust_address_nv (x, mode, 0);
      else
Richard Kenner committed
2067
	{
2068 2069
	  gcc_assert (REG_P (x));
	  decl_rtl = gen_lowpart_SUBREG (mode, x);
Richard Kenner committed
2070
	}
2071
      SET_DECL_RTL (decl_elt, decl_rtl);
Richard Kenner committed
2072 2073 2074
    }
}

2075 2076
/* Do the insertion of a case label into case_list.  The labels are
   fed to us in descending order from the sorted vector of case labels used
2077
   in the tree part of the middle end.  So the list we construct is
2078 2079
   sorted in ascending order.  The bounds on the case range, LOW and HIGH,
   are converted to case's index type TYPE.  */
2080

2081 2082 2083
static struct case_node *
add_case_node (struct case_node *head, tree type, tree low, tree high,
	       tree label)
2084
{
2085
  tree min_value, max_value;
2086
  struct case_node *r;
2087

2088 2089 2090 2091 2092 2093
  gcc_assert (TREE_CODE (low) == INTEGER_CST);
  gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);

  min_value = TYPE_MIN_VALUE (type);
  max_value = TYPE_MAX_VALUE (type);

2094 2095
  /* If there's no HIGH value, then this is not a case range; it's
     just a simple case label.  But that's just a degenerate case
2096 2097 2098
     range.
     If the bounds are equal, turn this into the one-value case.  */
  if (!high || tree_int_cst_equal (low, high))
2099 2100
    {
      /* If the simple case value is unreachable, ignore it.  */
2101 2102 2103 2104
      if ((TREE_CODE (min_value) == INTEGER_CST
            && tree_int_cst_compare (low, min_value) < 0)
	  || (TREE_CODE (max_value) == INTEGER_CST
	      && tree_int_cst_compare (low, max_value) > 0))
2105 2106 2107 2108 2109 2110 2111
	return head;
      low = fold_convert (type, low);
      high = low;
    }
  else
    {
      /* If the entire case range is unreachable, ignore it.  */
2112 2113 2114 2115
      if ((TREE_CODE (min_value) == INTEGER_CST
            && tree_int_cst_compare (high, min_value) < 0)
	  || (TREE_CODE (max_value) == INTEGER_CST
	      && tree_int_cst_compare (low, max_value) > 0))
2116 2117 2118 2119
	return head;

      /* If the lower bound is less than the index type's minimum
	 value, truncate the range bounds.  */
2120 2121
      if (TREE_CODE (min_value) == INTEGER_CST
            && tree_int_cst_compare (low, min_value) < 0)
2122 2123 2124 2125 2126
	low = min_value;
      low = fold_convert (type, low);

      /* If the upper bound is greater than the index type's maximum
	 value, truncate the range bounds.  */
2127 2128
      if (TREE_CODE (max_value) == INTEGER_CST
	  && tree_int_cst_compare (high, max_value) > 0)
2129 2130 2131 2132
	high = max_value;
      high = fold_convert (type, high);
    }

2133

2134
  /* Add this label to the chain.  Make sure to drop overflow flags.  */
2135
  r = ggc_alloc (sizeof (struct case_node));
2136 2137 2138 2139
  r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
			       TREE_INT_CST_HIGH (low));
  r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
				TREE_INT_CST_HIGH (high));
2140
  r->code_label = label;
2141
  r->parent = r->left = NULL;
2142 2143
  r->right = head;
  return r;
Richard Kenner committed
2144 2145
}

2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172
/* Maximum number of case bit tests.  */
#define MAX_CASE_BIT_TESTS  3

/* By default, enable case bit tests on targets with ashlsi3.  */
#ifndef CASE_USE_BIT_TESTS
#define CASE_USE_BIT_TESTS  (ashl_optab->handlers[word_mode].insn_code \
			     != CODE_FOR_nothing)
#endif


/* A case_bit_test represents a set of case nodes that may be
   selected from using a bit-wise comparison.  HI and LO hold
   the integer to be tested against, LABEL contains the label
   to jump to upon success and BITS counts the number of case
   nodes handled by this test, typically the number of bits
   set in HI:LO.  */

struct case_bit_test
{
  HOST_WIDE_INT hi;
  HOST_WIDE_INT lo;
  rtx label;
  int bits;
};

/* Determine whether "1 << x" is relatively cheap in word_mode.  */

2173 2174
static
bool lshift_cheap_p (void)
2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193
{
  static bool init = false;
  static bool cheap = true;

  if (!init)
    {
      rtx reg = gen_rtx_REG (word_mode, 10000);
      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
      cheap = cost < COSTS_N_INSNS (3);
      init = true;
    }

  return cheap;
}

/* Comparison function for qsort to order bit tests by decreasing
   number of case nodes, i.e. the node with the most cases gets
   tested first.  */

2194 2195
static int
case_bit_test_cmp (const void *p1, const void *p2)
2196 2197 2198 2199
{
  const struct case_bit_test *d1 = p1;
  const struct case_bit_test *d2 = p2;

2200 2201 2202 2203 2204
  if (d2->bits != d1->bits)
    return d2->bits - d1->bits;

  /* Stabilize the sort.  */
  return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222
}

/*  Expand a switch statement by a short sequence of bit-wise
    comparisons.  "switch(x)" is effectively converted into
    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
    integer constants.

    INDEX_EXPR is the value being switched on, which is of
    type INDEX_TYPE.  MINVAL is the lowest case value of in
    the case nodes, of INDEX_TYPE type, and RANGE is highest
    value minus MINVAL, also of type INDEX_TYPE.  NODES is
    the set of case nodes, and DEFAULT_LABEL is the label to
    branch to should none of the cases match.

    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
    node targets.  */

static void
2223 2224
emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
		     tree range, case_node_ptr nodes, rtx default_label)
2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237
{
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
  enum machine_mode mode;
  rtx expr, index, label;
  unsigned int i,j,lo,hi;
  struct case_node *n;
  unsigned int count;

  count = 0;
  for (n = nodes; n; n = n->right)
    {
      label = label_rtx (n->code_label);
      for (i = 0; i < count; i++)
2238
	if (label == test[i].label)
2239 2240 2241 2242
	  break;

      if (i == count)
	{
2243 2244 2245
	  gcc_assert (count < MAX_CASE_BIT_TESTS);
	  test[i].hi = 0;
	  test[i].lo = 0;
2246 2247 2248 2249 2250 2251 2252
	  test[i].label = label;
	  test[i].bits = 1;
	  count++;
	}
      else
        test[i].bits++;

2253 2254 2255 2256
      lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
				      n->low, minval), 1);
      hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
				      n->high, minval), 1);
2257 2258 2259 2260 2261 2262 2263 2264 2265
      for (j = lo; j <= hi; j++)
        if (j >= HOST_BITS_PER_WIDE_INT)
	  test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
	else
	  test[i].lo |= (HOST_WIDE_INT) 1 << j;
    }

  qsort (test, count, sizeof(*test), case_bit_test_cmp);

2266 2267 2268
  index_expr = fold_build2 (MINUS_EXPR, index_type,
			    fold_convert (index_type, index_expr),
			    fold_convert (index_type, minval));
2269
  index = expand_normal (index_expr);
2270 2271 2272
  do_pending_stack_adjust ();

  mode = TYPE_MODE (index_type);
2273
  expr = expand_normal (range);
2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291
  emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
			   default_label);

  index = convert_to_mode (word_mode, index, 0);
  index = expand_binop (word_mode, ashl_optab, const1_rtx,
			index, NULL_RTX, 1, OPTAB_WIDEN);

  for (i = 0; i < count; i++)
    {
      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
      expr = expand_binop (word_mode, and_optab, index, expr,
			   NULL_RTX, 1, OPTAB_WIDEN);
      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
			       word_mode, 1, test[i].label);
    }

  emit_jump (default_label);
}
2292

2293 2294 2295 2296 2297 2298 2299 2300
#ifndef HAVE_casesi
#define HAVE_casesi 0
#endif

#ifndef HAVE_tablejump
#define HAVE_tablejump 0
#endif

2301
/* Terminate a case (Pascal/Ada) or switch (C) statement
2302
   in which ORIG_INDEX is the expression to be tested.
Jakub Jelinek committed
2303 2304
   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
   type as given in the source before any compiler conversions.
Richard Kenner committed
2305 2306 2307
   Generate the code to test it and jump to the right place.  */

void
2308
expand_case (tree exp)
Richard Kenner committed
2309
{
2310
  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
Richard Kenner committed
2311
  rtx default_label = 0;
2312
  struct case_node *n;
2313
  unsigned int count, uniq;
Richard Kenner committed
2314
  rtx index;
Jan Brittenson committed
2315
  rtx table_label;
Richard Kenner committed
2316 2317
  int ncases;
  rtx *labelvec;
2318
  int i, fail;
2319
  rtx before_case, end, lab;
Jan Brittenson committed
2320

2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335
  tree vec = SWITCH_LABELS (exp);
  tree orig_type = TREE_TYPE (exp);
  tree index_expr = SWITCH_COND (exp);
  tree index_type = TREE_TYPE (index_expr);
  int unsignedp = TYPE_UNSIGNED (index_type);

  /* The insn after which the case dispatch should finally
     be emitted.  Zero for a dummy.  */
  rtx start;

  /* A list of case labels; it is first built as a list and it may then
     be rearranged into a nearly balanced binary tree.  */
  struct case_node *case_list = 0;

  /* Label to jump to if no case matches.  */
2336
  tree default_label_decl;
2337 2338 2339

  /* The switch body is lowered in gimplify.c, we should never have
     switches with a non-NULL SWITCH_BODY here.  */
2340 2341
  gcc_assert (!SWITCH_BODY (exp));
  gcc_assert (SWITCH_LABELS (exp));
2342

Richard Kenner committed
2343 2344 2345
  do_pending_stack_adjust ();

  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2346
  if (index_type != error_mark_node)
Richard Kenner committed
2347
    {
2348
      tree elt;
2349
      bitmap label_bitmap;
2350

2351 2352 2353 2354
      /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
	 expressions being INTEGER_CST.  */
      gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);

2355 2356 2357 2358 2359 2360 2361 2362
      /* The default case is at the end of TREE_VEC.  */
      elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1);
      gcc_assert (!CASE_HIGH (elt));
      gcc_assert (!CASE_LOW (elt));
      default_label_decl = CASE_LABEL (elt);

      for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; )
	{
2363
	  tree low, high;
2364
	  elt = TREE_VEC_ELT (vec, i);
2365 2366 2367 2368 2369 2370 2371 2372 2373 2374

	  low = CASE_LOW (elt);
	  gcc_assert (low);
	  high = CASE_HIGH (elt);

	  /* Discard empty ranges.  */
	  if (high && INT_CST_LT (high, low))
	    continue;

	  case_list = add_case_node (case_list, index_type, low, high,
2375
				     CASE_LABEL (elt));
2376 2377 2378
	}


2379
      before_case = start = get_last_insn ();
2380
      default_label = label_rtx (default_label_decl);
Richard Kenner committed
2381

2382
      /* Get upper and lower bounds of case values.  */
Richard Kenner committed
2383

2384
      uniq = 0;
Richard Kenner committed
2385
      count = 0;
2386
      label_bitmap = BITMAP_ALLOC (NULL);
2387
      for (n = case_list; n; n = n->right)
Richard Kenner committed
2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405
	{
	  /* Count the elements and track the largest and smallest
	     of them (treating them as signed even if they are not).  */
	  if (count++ == 0)
	    {
	      minval = n->low;
	      maxval = n->high;
	    }
	  else
	    {
	      if (INT_CST_LT (n->low, minval))
		minval = n->low;
	      if (INT_CST_LT (maxval, n->high))
		maxval = n->high;
	    }
	  /* A range counts double, since it requires two compares.  */
	  if (! tree_int_cst_equal (n->low, n->high))
	    count++;
2406

2407 2408
	  /* If we have not seen this label yet, then increase the
	     number of unique case node targets seen.  */
2409
	  lab = label_rtx (n->code_label);
2410 2411 2412 2413 2414
	  if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
	    {
	      bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
	      uniq++;
	    }
Richard Kenner committed
2415 2416
	}

2417
      BITMAP_FREE (label_bitmap);
2418

2419
      /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2420 2421 2422 2423 2424 2425 2426 2427
	 destination, such as one with a default case only.  However,
	 it doesn't remove cases that are out of range for the switch
	 type, so we may still get a zero here.  */
      if (count == 0)
	{
	  emit_jump (default_label);
	  return;
	}
Richard Kenner committed
2428

2429
      /* Compute span of values.  */
2430
      range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2431

2432 2433 2434
      /* Try implementing this switch statement by a short sequence of
	 bit-wise comparisons.  However, we let the binary-tree case
	 below handle constant index expressions.  */
2435 2436 2437 2438 2439 2440 2441 2442
      if (CASE_USE_BIT_TESTS
	  && ! TREE_CONSTANT (index_expr)
	  && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
	  && compare_tree_int (range, 0) > 0
	  && lshift_cheap_p ()
	  && ((uniq == 1 && count >= 3)
	      || (uniq == 2 && count >= 5)
	      || (uniq == 3 && count >= 6)))
2443 2444 2445 2446 2447 2448 2449
	{
	  /* Optimize the case where all the case values fit in a
	     word without having to subtract MINVAL.  In this case,
	     we can optimize away the subtraction.  */
	  if (compare_tree_int (minval, 0) > 0
	      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
	    {
2450
	      minval = build_int_cst (index_type, 0);
2451 2452 2453
	      range = maxval;
	    }
	  emit_case_bit_tests (index_type, index_expr, minval, range,
2454
			       case_list, default_label);
2455 2456
	}

Richard Kenner committed
2457 2458 2459 2460
      /* If range of values is much bigger than number of values,
	 make a sequence of conditional branches instead of a dispatch.
	 If the switch-index is a constant, do it this way
	 because we can optimize it.  */
Tom Wood committed
2461

2462
      else if (count < case_values_threshold ()
2463 2464
	       || compare_tree_int (range,
				    (optimize_size ? 3 : 10) * count) > 0
2465 2466 2467
	       /* RANGE may be signed, and really large ranges will show up
		  as negative numbers.  */
	       || compare_tree_int (range, 0) < 0
2468 2469 2470
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
	       || flag_pic
#endif
2471
	       || !flag_jump_tables
2472 2473 2474 2475
	       || TREE_CONSTANT (index_expr)
	       /* If neither casesi or tablejump is available, we can
		  only go this way.  */
	       || (!HAVE_casesi && !HAVE_tablejump))
Richard Kenner committed
2476
	{
2477
	  index = expand_normal (index_expr);
Richard Kenner committed
2478 2479 2480 2481 2482 2483 2484

	  /* If the index is a short or char that we do not have
	     an insn to handle comparisons directly, convert it to
	     a full integer now, rather than letting each comparison
	     generate the conversion.  */

	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2485
	      && ! have_insn_for (COMPARE, GET_MODE (index)))
Richard Kenner committed
2486 2487 2488 2489
	    {
	      enum machine_mode wider_mode;
	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2490
		if (have_insn_for (COMPARE, wider_mode))
Richard Kenner committed
2491 2492 2493 2494 2495 2496 2497 2498
		  {
		    index = convert_to_mode (wider_mode, index, unsignedp);
		    break;
		  }
	    }

	  do_pending_stack_adjust ();

2499
	  if (MEM_P (index))
Richard Kenner committed
2500 2501
	    index = copy_to_reg (index);

2502 2503
	  /* We generate a binary decision tree to select the
	     appropriate target code.  This is done as follows:
2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520

	     The list of cases is rearranged into a binary tree,
	     nearly optimal assuming equal probability for each case.

	     The tree is transformed into RTL, eliminating
	     redundant test conditions at the same time.

	     If program flow could reach the end of the
	     decision tree an unconditional jump to the
	     default code is emitted.  */

	  use_cost_table
	    = (TREE_CODE (orig_type) != ENUMERAL_TYPE
	       && estimate_case_costs (case_list));
	  balance_case_nodes (&case_list, NULL);
	  emit_case_nodes (index, case_list, default_label, index_type);
	  emit_jump (default_label);
Richard Kenner committed
2521 2522 2523
	}
      else
	{
2524
	  table_label = gen_label_rtx ();
2525 2526
	  if (! try_casesi (index_type, index_expr, minval, range,
			    table_label, default_label))
Richard Kenner committed
2527
	    {
2528
	      bool ok;
2529

Kazu Hirata committed
2530
	      /* Index jumptables from zero for suitable values of
2531
                 minval to avoid a subtraction.  */
Kazu Hirata committed
2532 2533 2534 2535
	      if (! optimize_size
		  && compare_tree_int (minval, 0) > 0
		  && compare_tree_int (minval, 3) < 0)
		{
2536
		  minval = build_int_cst (index_type, 0);
Kazu Hirata committed
2537 2538
		  range = maxval;
		}
2539

2540 2541 2542
	      ok = try_tablejump (index_type, index_expr, minval, range,
				  table_label, default_label);
	      gcc_assert (ok);
Richard Kenner committed
2543
	    }
Kazu Hirata committed
2544

Richard Kenner committed
2545 2546
	  /* Get table of labels to jump to, in order of case index.  */

2547
	  ncases = tree_low_cst (range, 0) + 1;
2548 2549
	  labelvec = alloca (ncases * sizeof (rtx));
	  memset (labelvec, 0, ncases * sizeof (rtx));
Richard Kenner committed
2550

2551
	  for (n = case_list; n; n = n->right)
Richard Kenner committed
2552
	    {
2553 2554 2555 2556
	      /* Compute the low and high bounds relative to the minimum
		 value since that should fit in a HOST_WIDE_INT while the
		 actual values may not.  */
	      HOST_WIDE_INT i_low
2557 2558
		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
					     n->low, minval), 1);
2559
	      HOST_WIDE_INT i_high
2560 2561
		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
					     n->high, minval), 1);
2562 2563 2564 2565 2566
	      HOST_WIDE_INT i;

	      for (i = i_low; i <= i_high; i ++)
		labelvec[i]
		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
Richard Kenner committed
2567 2568 2569 2570 2571
	    }

	  /* Fill in the gaps with the default.  */
	  for (i = 0; i < ncases; i++)
	    if (labelvec[i] == 0)
2572
	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
Richard Kenner committed
2573

2574
	  /* Output the table.  */
Richard Kenner committed
2575 2576
	  emit_label (table_label);

2577
	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2578 2579
	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
						   gen_rtx_LABEL_REF (Pmode, table_label),
2580
						   gen_rtvec_v (ncases, labelvec),
Kazu Hirata committed
2581
						   const0_rtx, const0_rtx));
Richard Kenner committed
2582
	  else
2583 2584
	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
					      gen_rtvec_v (ncases, labelvec)));
Richard Kenner committed
2585

2586
	  /* Record no drop-through after the table.  */
Richard Kenner committed
2587 2588 2589
	  emit_barrier ();
	}

2590 2591
      before_case = NEXT_INSN (before_case);
      end = get_last_insn ();
2592 2593
      fail = squeeze_notes (&before_case, &end);
      gcc_assert (!fail);
2594
      reorder_insns (before_case, end, start);
Richard Kenner committed
2595
    }
2596

Richard Kenner committed
2597 2598 2599
  free_temp_slots ();
}

2600
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE.  */
Richard Kenner committed
2601 2602

static void
2603 2604
do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
		  int unsignedp)
Richard Kenner committed
2605
{
2606 2607
  do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
			   NULL_RTX, NULL_RTX, label);
Richard Kenner committed
2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619 2620 2621 2622 2623 2624 2625 2626 2627 2628 2629 2630 2631 2632 2633
}

/* Not all case values are encountered equally.  This function
   uses a heuristic to weight case labels, in cases where that
   looks like a reasonable thing to do.

   Right now, all we try to guess is text, and we establish the
   following weights:

	chars above space:	16
	digits:			16
	default:		12
	space, punct:		8
	tab:			4
	newline:		2
	other "\" chars:	1
	remaining chars:	0

   If we find any cases in the switch that are not either -1 or in the range
   of valid ASCII characters, or are control characters other than those
   commonly used with "\", don't treat this switch scanning text.

   Return 1 if these nodes are suitable for cost estimation, otherwise
   return 0.  */

static int
2634
estimate_case_costs (case_node_ptr node)
Richard Kenner committed
2635
{
2636
  tree min_ascii = integer_minus_one_node;
2637
  tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
Richard Kenner committed
2638 2639 2640 2641 2642 2643
  case_node_ptr n;
  int i;

  /* If we haven't already made the cost table, make it now.  Note that the
     lower bound of the table is -1, not zero.  */

2644
  if (! cost_table_initialized)
Richard Kenner committed
2645
    {
2646
      cost_table_initialized = 1;
Richard Kenner committed
2647 2648 2649

      for (i = 0; i < 128; i++)
	{
2650
	  if (ISALNUM (i))
2651
	    COST_TABLE (i) = 16;
2652
	  else if (ISPUNCT (i))
2653
	    COST_TABLE (i) = 8;
2654
	  else if (ISCNTRL (i))
2655
	    COST_TABLE (i) = -1;
Richard Kenner committed
2656 2657
	}

2658 2659 2660 2661 2662 2663 2664
      COST_TABLE (' ') = 8;
      COST_TABLE ('\t') = 4;
      COST_TABLE ('\0') = 4;
      COST_TABLE ('\n') = 2;
      COST_TABLE ('\f') = 1;
      COST_TABLE ('\v') = 1;
      COST_TABLE ('\b') = 1;
Richard Kenner committed
2665 2666 2667 2668 2669 2670 2671 2672 2673 2674 2675 2676 2677
    }

  /* See if all the case expressions look like text.  It is text if the
     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
     as signed arithmetic since we don't want to ever access cost_table with a
     value less than -1.  Also check that none of the constants in a range
     are strange control characters.  */

  for (n = node; n; n = n->right)
    {
      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
	return 0;

2678 2679
      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2680
	if (COST_TABLE (i) < 0)
Richard Kenner committed
2681 2682 2683 2684 2685 2686 2687 2688 2689 2690
	  return 0;
    }

  /* All interesting values are within the range of interesting
     ASCII characters.  */
  return 1;
}

/* Take an ordered list of case nodes
   and transform them into a near optimal binary tree,
2691
   on the assumption that any target code selection value is as
Richard Kenner committed
2692 2693 2694 2695 2696
   likely as any other.

   The transformation is performed by splitting the ordered
   list into two equal sections plus a pivot.  The parts are
   then attached to the pivot as left and right branches.  Each
Jeff Law committed
2697
   branch is then transformed recursively.  */
Richard Kenner committed
2698 2699

static void
2700
balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
Richard Kenner committed
2701
{
2702
  case_node_ptr np;
Richard Kenner committed
2703 2704 2705 2706 2707 2708 2709

  np = *head;
  if (np)
    {
      int cost = 0;
      int i = 0;
      int ranges = 0;
2710
      case_node_ptr *npp;
Richard Kenner committed
2711 2712 2713 2714 2715 2716 2717 2718 2719 2720
      case_node_ptr left;

      /* Count the number of entries on branch.  Also count the ranges.  */

      while (np)
	{
	  if (!tree_int_cst_equal (np->low, np->high))
	    {
	      ranges++;
	      if (use_cost_table)
2721
		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
Richard Kenner committed
2722 2723 2724
	    }

	  if (use_cost_table)
2725
	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
Richard Kenner committed
2726 2727 2728 2729 2730 2731 2732 2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745

	  i++;
	  np = np->right;
	}

      if (i > 2)
	{
	  /* Split this list if it is long enough for that to help.  */
	  npp = head;
	  left = *npp;
	  if (use_cost_table)
	    {
	      /* Find the place in the list that bisects the list's total cost,
		 Here I gets half the total cost.  */
	      int n_moved = 0;
	      i = (cost + 1) / 2;
	      while (1)
		{
		  /* Skip nodes while their cost does not reach that amount.  */
		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2746 2747
		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
Richard Kenner committed
2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811 2812 2813 2814 2815 2816
		  if (i <= 0)
		    break;
		  npp = &(*npp)->right;
		  n_moved += 1;
		}
	      if (n_moved == 0)
		{
		  /* Leave this branch lopsided, but optimize left-hand
		     side and fill in `parent' fields for right-hand side.  */
		  np = *head;
		  np->parent = parent;
		  balance_case_nodes (&np->left, np);
		  for (; np->right; np = np->right)
		    np->right->parent = np;
		  return;
		}
	    }
	  /* If there are just three nodes, split at the middle one.  */
	  else if (i == 3)
	    npp = &(*npp)->right;
	  else
	    {
	      /* Find the place in the list that bisects the list's total cost,
		 where ranges count as 2.
		 Here I gets half the total cost.  */
	      i = (i + ranges + 1) / 2;
	      while (1)
		{
		  /* Skip nodes while their cost does not reach that amount.  */
		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
		    i--;
		  i--;
		  if (i <= 0)
		    break;
		  npp = &(*npp)->right;
		}
	    }
	  *head = np = *npp;
	  *npp = 0;
	  np->parent = parent;
	  np->left = left;

	  /* Optimize each of the two split parts.  */
	  balance_case_nodes (&np->left, np);
	  balance_case_nodes (&np->right, np);
	}
      else
	{
	  /* Else leave this branch as one level,
	     but fill in `parent' fields.  */
	  np = *head;
	  np->parent = parent;
	  for (; np->right; np = np->right)
	    np->right->parent = np;
	}
    }
}

/* Search the parent sections of the case node tree
   to see if a test for the lower bound of NODE would be redundant.
   INDEX_TYPE is the type of the index expression.

   The instructions to generate the case decision tree are
   output in the same order as nodes are processed so it is
   known that if a parent node checks the range of the current
   node minus one that the current node is bounded at its lower
   span.  Thus the test would be redundant.  */

static int
2817
node_has_low_bound (case_node_ptr node, tree index_type)
Richard Kenner committed
2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834
{
  tree low_minus_one;
  case_node_ptr pnode;

  /* If the lower bound of this node is the lowest value in the index type,
     we need not test it.  */

  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
    return 1;

  /* If this node has a left branch, the value at the left must be less
     than that at this node, so it cannot be bounded at the bottom and
     we need not bother testing any further.  */

  if (node->left)
    return 0;

2835
  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2836 2837
			       node->low,
			       build_int_cst (TREE_TYPE (node->low), 1));
Richard Kenner committed
2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858 2859 2860 2861 2862

  /* If the subtraction above overflowed, we can't verify anything.
     Otherwise, look for a parent that tests our value - 1.  */

  if (! tree_int_cst_lt (low_minus_one, node->low))
    return 0;

  for (pnode = node->parent; pnode; pnode = pnode->parent)
    if (tree_int_cst_equal (low_minus_one, pnode->high))
      return 1;

  return 0;
}

/* Search the parent sections of the case node tree
   to see if a test for the upper bound of NODE would be redundant.
   INDEX_TYPE is the type of the index expression.

   The instructions to generate the case decision tree are
   output in the same order as nodes are processed so it is
   known that if a parent node checks the range of the current
   node plus one that the current node is bounded at its upper
   span.  Thus the test would be redundant.  */

static int
2863
node_has_high_bound (case_node_ptr node, tree index_type)
Richard Kenner committed
2864 2865 2866 2867
{
  tree high_plus_one;
  case_node_ptr pnode;

2868 2869 2870 2871 2872
  /* If there is no upper bound, obviously no test is needed.  */

  if (TYPE_MAX_VALUE (index_type) == NULL)
    return 1;

Richard Kenner committed
2873 2874 2875 2876 2877 2878 2879 2880 2881 2882 2883 2884 2885
  /* If the upper bound of this node is the highest value in the type
     of the index expression, we need not test against it.  */

  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
    return 1;

  /* If this node has a right branch, the value at the right must be greater
     than that at this node, so it cannot be bounded at the top and
     we need not bother testing any further.  */

  if (node->right)
    return 0;

2886
  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2887 2888
			       node->high,
			       build_int_cst (TREE_TYPE (node->high), 1));
Richard Kenner committed
2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907

  /* If the addition above overflowed, we can't verify anything.
     Otherwise, look for a parent that tests our value + 1.  */

  if (! tree_int_cst_lt (node->high, high_plus_one))
    return 0;

  for (pnode = node->parent; pnode; pnode = pnode->parent)
    if (tree_int_cst_equal (high_plus_one, pnode->low))
      return 1;

  return 0;
}

/* Search the parent sections of the
   case node tree to see if both tests for the upper and lower
   bounds of NODE would be redundant.  */

static int
2908
node_is_bounded (case_node_ptr node, tree index_type)
Richard Kenner committed
2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929
{
  return (node_has_low_bound (node, index_type)
	  && node_has_high_bound (node, index_type));
}

/* Emit step-by-step code to select a case for the value of INDEX.
   The thus generated decision tree follows the form of the
   case-node binary tree NODE, whose nodes represent test conditions.
   INDEX_TYPE is the type of the index of the switch.

   Care is taken to prune redundant tests from the decision tree
   by detecting any boundary conditions already checked by
   emitted rtx.  (See node_has_high_bound, node_has_low_bound
   and node_is_bounded, above.)

   Where the test conditions can be shown to be redundant we emit
   an unconditional jump to the target code.  As a further
   optimization, the subordinates of a tree node are examined to
   check for bounded nodes.  In this case conditional and/or
   unconditional jumps as a result of the boundary check for the
   current node are arranged to target the subordinates associated
Jeff Law committed
2930
   code for out of bound conditions on the current node.
Richard Kenner committed
2931

2932
   We can assume that when control reaches the code generated here,
Richard Kenner committed
2933 2934 2935 2936 2937 2938 2939 2940
   the index value has already been compared with the parents
   of this node, and determined to be on the same side of each parent
   as this node is.  Thus, if this node tests for the value 51,
   and a parent tested for 52, we don't need to consider
   the possibility of a value greater than 51.  If another parent
   tests for the value 50, then this node need not test anything.  */

static void
2941 2942
emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
		 tree index_type)
Richard Kenner committed
2943 2944
{
  /* If INDEX has an unsigned type, we must make unsigned branches.  */
2945
  int unsignedp = TYPE_UNSIGNED (index_type);
Richard Kenner committed
2946
  enum machine_mode mode = GET_MODE (index);
2947
  enum machine_mode imode = TYPE_MODE (index_type);
Richard Kenner committed
2948

2949 2950 2951 2952
  /* Handle indices detected as constant during RTL expansion.  */
  if (mode == VOIDmode)
    mode = imode;

Richard Kenner committed
2953 2954 2955 2956 2957 2958 2959 2960
  /* See if our parents have already tested everything for us.
     If they have, emit an unconditional jump for this node.  */
  if (node_is_bounded (node, index_type))
    emit_jump (label_rtx (node->code_label));

  else if (tree_int_cst_equal (node->low, node->high))
    {
      /* Node is single valued.  First see if the index expression matches
Mike Stump committed
2961
	 this node and then check our children, if any.  */
Richard Kenner committed
2962

2963
      do_jump_if_equal (mode, index,
2964
			convert_modes (mode, imode,
2965
				       expand_normal (node->low),
2966
				       unsignedp),
Richard Kenner committed
2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978
			label_rtx (node->code_label), unsignedp);

      if (node->right != 0 && node->left != 0)
	{
	  /* This node has children on both sides.
	     Dispatch to one side or the other
	     by comparing the index value with this node's value.
	     If one subtree is bounded, check that one first,
	     so we can avoid real branches in the tree.  */

	  if (node_is_bounded (node->right, index_type))
	    {
Kazu Hirata committed
2979
	      emit_cmp_and_jump_insns (index,
2980 2981
				       convert_modes
				       (mode, imode,
2982
					expand_normal (node->high),
2983
					unsignedp),
2984
				       GT, NULL_RTX, mode, unsignedp,
Kazu Hirata committed
2985
				       label_rtx (node->right->code_label));
Richard Kenner committed
2986 2987 2988 2989 2990
	      emit_case_nodes (index, node->left, default_label, index_type);
	    }

	  else if (node_is_bounded (node->left, index_type))
	    {
Kazu Hirata committed
2991
	      emit_cmp_and_jump_insns (index,
2992 2993
				       convert_modes
				       (mode, imode,
2994
					expand_normal (node->high),
2995
					unsignedp),
2996
				       LT, NULL_RTX, mode, unsignedp,
2997
				       label_rtx (node->left->code_label));
Richard Kenner committed
2998 2999 3000
	      emit_case_nodes (index, node->right, default_label, index_type);
	    }

3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015
	  /* If both children are single-valued cases with no
	     children, finish up all the work.  This way, we can save
	     one ordered comparison.  */
	  else if (tree_int_cst_equal (node->right->low, node->right->high)
		   && node->right->left == 0
		   && node->right->right == 0
		   && tree_int_cst_equal (node->left->low, node->left->high)
		   && node->left->left == 0
		   && node->left->right == 0)
	    {
	      /* Neither node is bounded.  First distinguish the two sides;
		 then emit the code for one side at a time.  */

	      /* See if the value matches what the right hand side
		 wants.  */
3016
	      do_jump_if_equal (mode, index,
3017
				convert_modes (mode, imode,
3018
					       expand_normal (node->right->low),
3019 3020 3021 3022 3023 3024
					       unsignedp),
				label_rtx (node->right->code_label),
				unsignedp);

	      /* See if the value matches what the left hand side
		 wants.  */
3025
	      do_jump_if_equal (mode, index,
3026
				convert_modes (mode, imode,
3027
					       expand_normal (node->left->low),
3028 3029 3030 3031 3032
					       unsignedp),
				label_rtx (node->left->code_label),
				unsignedp);
	    }

Richard Kenner committed
3033 3034 3035 3036 3037
	  else
	    {
	      /* Neither node is bounded.  First distinguish the two sides;
		 then emit the code for one side at a time.  */

Kazu Hirata committed
3038
	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
Richard Kenner committed
3039 3040

	      /* See if the value is on the right.  */
Kazu Hirata committed
3041
	      emit_cmp_and_jump_insns (index,
3042 3043
				       convert_modes
				       (mode, imode,
3044
					expand_normal (node->high),
3045
					unsignedp),
3046
				       GT, NULL_RTX, mode, unsignedp,
3047
				       label_rtx (test_label));
Richard Kenner committed
3048 3049 3050 3051 3052 3053

	      /* Value must be on the left.
		 Handle the left-hand subtree.  */
	      emit_case_nodes (index, node->left, default_label, index_type);
	      /* If left-hand subtree does nothing,
		 go to default.  */
3054
	      emit_jump (default_label);
Richard Kenner committed
3055 3056 3057 3058 3059 3060 3061 3062 3063

	      /* Code branches here for the right-hand subtree.  */
	      expand_label (test_label);
	      emit_case_nodes (index, node->right, default_label, index_type);
	    }
	}

      else if (node->right != 0 && node->left == 0)
	{
3064
	  /* Here we have a right child but no left so we issue a conditional
Richard Kenner committed
3065 3066
	     branch to default and process the right child.

3067 3068 3069
	     Omit the conditional branch to default if the right child
	     does not have any children and is single valued; it would
	     cost too much space to save so little time.  */
Richard Kenner committed
3070

3071
	  if (node->right->right || node->right->left
Richard Kenner committed
3072 3073 3074 3075
	      || !tree_int_cst_equal (node->right->low, node->right->high))
	    {
	      if (!node_has_low_bound (node, index_type))
		{
Kazu Hirata committed
3076
		  emit_cmp_and_jump_insns (index,
3077 3078
					   convert_modes
					   (mode, imode,
3079
					    expand_normal (node->high),
3080
					    unsignedp),
3081
					   LT, NULL_RTX, mode, unsignedp,
3082
					   default_label);
Richard Kenner committed
3083 3084 3085 3086 3087 3088 3089 3090
		}

	      emit_case_nodes (index, node->right, default_label, index_type);
	    }
	  else
	    /* We cannot process node->right normally
	       since we haven't ruled out the numbers less than
	       this node's value.  So handle node->right explicitly.  */
3091
	    do_jump_if_equal (mode, index,
3092 3093
			      convert_modes
			      (mode, imode,
3094
			       expand_normal (node->right->low),
3095
			       unsignedp),
Richard Kenner committed
3096 3097 3098 3099 3100 3101
			      label_rtx (node->right->code_label), unsignedp);
	}

      else if (node->right == 0 && node->left != 0)
	{
	  /* Just one subtree, on the left.  */
Kazu Hirata committed
3102
	  if (node->left->left || node->left->right
Richard Kenner committed
3103 3104 3105 3106
	      || !tree_int_cst_equal (node->left->low, node->left->high))
	    {
	      if (!node_has_high_bound (node, index_type))
		{
3107 3108 3109
		  emit_cmp_and_jump_insns (index,
					   convert_modes
					   (mode, imode,
3110
					    expand_normal (node->high),
3111
					    unsignedp),
3112
					   GT, NULL_RTX, mode, unsignedp,
3113
					   default_label);
Richard Kenner committed
3114 3115 3116 3117 3118 3119 3120 3121
		}

	      emit_case_nodes (index, node->left, default_label, index_type);
	    }
	  else
	    /* We cannot process node->left normally
	       since we haven't ruled out the numbers less than
	       this node's value.  So handle node->left explicitly.  */
3122
	    do_jump_if_equal (mode, index,
3123 3124
			      convert_modes
			      (mode, imode,
3125
			       expand_normal (node->left->low),
3126
			       unsignedp),
Richard Kenner committed
3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147
			      label_rtx (node->left->code_label), unsignedp);
	}
    }
  else
    {
      /* Node is a range.  These cases are very similar to those for a single
	 value, except that we do not start by testing whether this node
	 is the one to branch to.  */

      if (node->right != 0 && node->left != 0)
	{
	  /* Node has subtrees on both sides.
	     If the right-hand subtree is bounded,
	     test for it first, since we can go straight there.
	     Otherwise, we need to make a branch in the control structure,
	     then handle the two subtrees.  */
	  tree test_label = 0;

	  if (node_is_bounded (node->right, index_type))
	    /* Right hand node is fully bounded so we can eliminate any
	       testing and branch directly to the target code.  */
3148 3149 3150
	    emit_cmp_and_jump_insns (index,
				     convert_modes
				     (mode, imode,
3151
				      expand_normal (node->high),
3152
				      unsignedp),
3153
				     GT, NULL_RTX, mode, unsignedp,
3154
				     label_rtx (node->right->code_label));
Richard Kenner committed
3155 3156 3157 3158 3159 3160
	  else
	    {
	      /* Right hand node requires testing.
		 Branch to a label where we will handle it later.  */

	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
Kazu Hirata committed
3161
	      emit_cmp_and_jump_insns (index,
3162 3163
				       convert_modes
				       (mode, imode,
3164
					expand_normal (node->high),
3165
					unsignedp),
3166
				       GT, NULL_RTX, mode, unsignedp,
3167
				       label_rtx (test_label));
Richard Kenner committed
3168 3169 3170 3171
	    }

	  /* Value belongs to this node or to the left-hand subtree.  */

3172 3173 3174
	  emit_cmp_and_jump_insns (index,
				   convert_modes
				   (mode, imode,
3175
				    expand_normal (node->low),
3176
				    unsignedp),
3177
				   GE, NULL_RTX, mode, unsignedp,
3178
				   label_rtx (node->code_label));
Richard Kenner committed
3179 3180 3181 3182 3183 3184 3185 3186 3187 3188

	  /* Handle the left-hand subtree.  */
	  emit_case_nodes (index, node->left, default_label, index_type);

	  /* If right node had to be handled later, do that now.  */

	  if (test_label)
	    {
	      /* If the left-hand subtree fell through,
		 don't let it fall into the right-hand subtree.  */
3189
	      emit_jump (default_label);
Richard Kenner committed
3190 3191 3192 3193 3194 3195 3196 3197 3198 3199 3200 3201

	      expand_label (test_label);
	      emit_case_nodes (index, node->right, default_label, index_type);
	    }
	}

      else if (node->right != 0 && node->left == 0)
	{
	  /* Deal with values to the left of this node,
	     if they are possible.  */
	  if (!node_has_low_bound (node, index_type))
	    {
Kazu Hirata committed
3202
	      emit_cmp_and_jump_insns (index,
3203 3204
				       convert_modes
				       (mode, imode,
3205
					expand_normal (node->low),
3206
					unsignedp),
3207
				       LT, NULL_RTX, mode, unsignedp,
3208
				       default_label);
Richard Kenner committed
3209 3210 3211 3212
	    }

	  /* Value belongs to this node or to the right-hand subtree.  */

3213 3214 3215
	  emit_cmp_and_jump_insns (index,
				   convert_modes
				   (mode, imode,
3216
				    expand_normal (node->high),
3217
				    unsignedp),
3218
				   LE, NULL_RTX, mode, unsignedp,
3219
				   label_rtx (node->code_label));
Richard Kenner committed
3220 3221 3222 3223 3224 3225 3226 3227 3228 3229

	  emit_case_nodes (index, node->right, default_label, index_type);
	}

      else if (node->right == 0 && node->left != 0)
	{
	  /* Deal with values to the right of this node,
	     if they are possible.  */
	  if (!node_has_high_bound (node, index_type))
	    {
Kazu Hirata committed
3230
	      emit_cmp_and_jump_insns (index,
3231 3232
				       convert_modes
				       (mode, imode,
3233
					expand_normal (node->high),
3234
					unsignedp),
3235
				       GT, NULL_RTX, mode, unsignedp,
3236
				       default_label);
Richard Kenner committed
3237 3238 3239 3240
	    }

	  /* Value belongs to this node or to the left-hand subtree.  */

Kazu Hirata committed
3241
	  emit_cmp_and_jump_insns (index,
3242 3243
				   convert_modes
				   (mode, imode,
3244
				    expand_normal (node->low),
3245
				    unsignedp),
3246
				   GE, NULL_RTX, mode, unsignedp,
3247
				   label_rtx (node->code_label));
Richard Kenner committed
3248 3249 3250 3251 3252 3253 3254 3255 3256

	  emit_case_nodes (index, node->left, default_label, index_type);
	}

      else
	{
	  /* Node has no children so we check low and high bounds to remove
	     redundant tests.  Only one of the bounds can exist,
	     since otherwise this node is bounded--a case tested already.  */
3257 3258
	  int high_bound = node_has_high_bound (node, index_type);
	  int low_bound = node_has_low_bound (node, index_type);
Richard Kenner committed
3259

3260
	  if (!high_bound && low_bound)
Richard Kenner committed
3261
	    {
Kazu Hirata committed
3262
	      emit_cmp_and_jump_insns (index,
3263 3264
				       convert_modes
				       (mode, imode,
3265
					expand_normal (node->high),
3266
					unsignedp),
3267
				       GT, NULL_RTX, mode, unsignedp,
3268
				       default_label);
Richard Kenner committed
3269 3270
	    }

3271
	  else if (!low_bound && high_bound)
Richard Kenner committed
3272
	    {
Kazu Hirata committed
3273
	      emit_cmp_and_jump_insns (index,
3274 3275
				       convert_modes
				       (mode, imode,
3276
					expand_normal (node->low),
3277
					unsignedp),
3278
				       LT, NULL_RTX, mode, unsignedp,
3279
				       default_label);
Richard Kenner committed
3280
	    }
3281 3282
	  else if (!low_bound && !high_bound)
	    {
3283
	      /* Widen LOW and HIGH to the same width as INDEX.  */
3284
	      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3285 3286
	      tree low = build1 (CONVERT_EXPR, type, node->low);
	      tree high = build1 (CONVERT_EXPR, type, node->high);
3287
	      rtx low_rtx, new_index, new_bound;
3288 3289 3290

	      /* Instead of doing two branches, emit one unsigned branch for
		 (index-low) > (high-low).  */
3291
	      low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3292 3293 3294
	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
					       NULL_RTX, unsignedp,
					       OPTAB_WIDEN);
3295 3296
	      new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
						    high, low),
3297
				       NULL_RTX, mode, EXPAND_NORMAL);
Kazu Hirata committed
3298

3299
	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3300
				       mode, 1, default_label);
3301
	    }
Richard Kenner committed
3302 3303 3304 3305 3306

	  emit_jump (label_rtx (node->code_label));
	}
    }
}