stmt.c 97 KB
Newer Older
1
/* Expands front end tree to back end RTL for GCC
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
Richard Kenner committed
5

6
This file is part of GCC.
Richard Kenner committed
7

8 9
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
Richard Kenner committed
12

13 14 15 16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
17 18

You should have received a copy of the GNU General Public License
19 20
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
21 22 23 24

/* This file handles the generation of rtl code from tree structure
   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
   The functions whose names start with `expand_' are called by the
25
   expander to generate RTL instructions for various kinds of constructs.  */
Richard Kenner committed
26 27

#include "config.h"
28
#include "system.h"
29 30
#include "coretypes.h"
#include "tm.h"
31

Richard Kenner committed
32
#include "rtl.h"
33
#include "hard-reg-set.h"
Richard Kenner committed
34
#include "tree.h"
35
#include "tm_p.h"
Richard Kenner committed
36
#include "flags.h"
Mike Stump committed
37
#include "except.h"
Richard Kenner committed
38 39 40
#include "function.h"
#include "insn-config.h"
#include "expr.h"
41
#include "libfuncs.h"
Richard Kenner committed
42
#include "recog.h"
Jan Brittenson committed
43
#include "machmode.h"
Robert Lipe committed
44
#include "toplev.h"
Kaveh R. Ghazi committed
45
#include "output.h"
46
#include "ggc.h"
47
#include "langhooks.h"
48
#include "predict.h"
49
#include "optabs.h"
DJ Delorie committed
50
#include "target.h"
51
#include "regs.h"
52
#include "alloc-pool.h"
Richard Kenner committed
53 54 55 56 57 58 59

/* Functions and data structures for expanding case statements.  */

/* Case label structure, used to hold info on labels within case
   statements.  We handle "range" labels; for a single-value label
   as in C, the high and low limits are the same.

60 61 62 63 64 65 66 67
   We start with a vector of case nodes sorted in ascending order, and
   the default label as the last element in the vector.  Before expanding
   to RTL, we transform this vector into a list linked via the RIGHT
   fields in the case_node struct.  Nodes with higher case values are
   later in the list.

   Switch statements can be output in three forms.  A branch table is
   used if there are more than a few labels and the labels are dense
Richard Kenner committed
68 69 70 71 72 73 74
   within the range between the smallest and largest case value.  If a
   branch table is used, no further manipulations are done with the case
   node chain.

   The alternative to the use of a branch table is to generate a series
   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
   and PARENT fields to hold a binary tree.  Initially the tree is
75 76
   totally unbalanced, with everything on the right.  We balance the tree
   with nodes on the left having lower case values than the parent
Richard Kenner committed
77
   and nodes on the right having higher values.  We then output the tree
78 79 80 81
   in order.

   For very small, suitable switch statements, we can generate a series
   of simple bit test and branches instead.  */
Richard Kenner committed
82

83
struct case_node
Richard Kenner committed
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
{
  struct case_node	*left;	/* Left son in binary tree */
  struct case_node	*right;	/* Right son in binary tree; also node chain */
  struct case_node	*parent; /* Parent of node in binary tree */
  tree			low;	/* Lowest index value for this label */
  tree			high;	/* Highest index value for this label */
  tree			code_label; /* Label to jump to when node matches */
};

typedef struct case_node case_node;
typedef struct case_node *case_node_ptr;

/* These are used by estimate_case_costs and balance_case_nodes.  */

/* This must be a signed type, and non-ANSI compilers lack signed char.  */
99
static short cost_table_[129];
Richard Kenner committed
100
static int use_cost_table;
101 102 103 104
static int cost_table_initialized;

/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
   is unsigned.  */
Kazu Hirata committed
105
#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
Richard Kenner committed
106

107
static int n_occurrences (int, const char *);
108
static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
109 110 111 112
static void expand_nl_goto_receiver (void);
static bool check_operand_nalternatives (tree, tree);
static bool check_unique_operand_names (tree, tree);
static char *resolve_operand_name_1 (char *, tree, tree);
113
static void expand_null_return_1 (void);
114 115 116 117 118 119 120 121 122 123
static void expand_value_return (rtx);
static int estimate_case_costs (case_node_ptr);
static bool lshift_cheap_p (void);
static int case_bit_test_cmp (const void *, const void *);
static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
static int node_has_low_bound (case_node_ptr, tree);
static int node_has_high_bound (case_node_ptr, tree);
static int node_is_bounded (case_node_ptr, tree);
static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
124
static struct case_node *add_case_node (struct case_node *, tree,
125
                                        tree, tree, tree, alloc_pool);
126

Richard Kenner committed
127 128 129 130 131

/* Return the rtx-label that corresponds to a LABEL_DECL,
   creating it if necessary.  */

rtx
132
label_rtx (tree label)
Richard Kenner committed
133
{
134
  gcc_assert (TREE_CODE (label) == LABEL_DECL);
Richard Kenner committed
135

136
  if (!DECL_RTL_SET_P (label))
137 138 139 140 141 142
    {
      rtx r = gen_label_rtx ();
      SET_DECL_RTL (label, r);
      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
	LABEL_PRESERVE_P (r) = 1;
    }
Richard Kenner committed
143

144
  return DECL_RTL (label);
Richard Kenner committed
145 146
}

147 148 149
/* As above, but also put it on the forced-reference list of the
   function that contains it.  */
rtx
150
force_label_rtx (tree label)
151 152 153 154 155
{
  rtx ref = label_rtx (label);
  tree function = decl_function_context (label);
  struct function *p;

156
  gcc_assert (function);
157

158
  if (function != current_function_decl)
159 160 161 162 163 164 165 166
    p = find_function_data (function);
  else
    p = cfun;

  p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
						p->expr->x_forced_labels);
  return ref;
}
167

Richard Kenner committed
168 169 170
/* Add an unconditional jump to LABEL as the next sequential instruction.  */

void
171
emit_jump (rtx label)
Richard Kenner committed
172 173 174 175 176 177 178 179 180 181
{
  do_pending_stack_adjust ();
  emit_jump_insn (gen_jump (label));
  emit_barrier ();
}

/* Emit code to jump to the address
   specified by the pointer expression EXP.  */

void
182
expand_computed_goto (tree exp)
Richard Kenner committed
183
{
184
  rtx x = expand_normal (exp);
185

186
  x = convert_memory_address (Pmode, x);
187

188 189
  do_pending_stack_adjust ();
  emit_indirect_jump (x);
Richard Kenner committed
190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
}

/* Handle goto statements and the labels that they can go to.  */

/* Specify the location in the RTL code of a label LABEL,
   which is a LABEL_DECL tree node.

   This is used for the kind of label that the user can jump to with a
   goto statement, and for alternatives of a switch or case statement.
   RTL labels generated for loops and conditionals don't go through here;
   they are generated directly at the RTL level, by other functions below.

   Note that this has nothing to do with defining label *names*.
   Languages vary in how they do that and what that even means.  */

void
206
expand_label (tree label)
Richard Kenner committed
207
{
208
  rtx label_r = label_rtx (label);
Richard Kenner committed
209 210

  do_pending_stack_adjust ();
211
  emit_label (label_r);
Richard Kenner committed
212 213 214
  if (DECL_NAME (label))
    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));

215 216 217 218 219 220 221 222 223 224
  if (DECL_NONLOCAL (label))
    {
      expand_nl_goto_receiver ();
      nonlocal_goto_handler_labels
	= gen_rtx_EXPR_LIST (VOIDmode, label_r,
			     nonlocal_goto_handler_labels);
    }

  if (FORCED_LABEL (label))
    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
225

226 227
  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
    maybe_set_first_label_num (label_r);
Richard Kenner committed
228 229 230 231 232 233 234
}

/* Generate RTL code for a `goto' statement with target label LABEL.
   LABEL should be a LABEL_DECL tree node that was or will later be
   defined with `expand_label'.  */

void
235
expand_goto (tree label)
Richard Kenner committed
236
{
237 238 239 240
#ifdef ENABLE_CHECKING
  /* Check for a nonlocal goto to a containing function.  Should have
     gotten translated to __builtin_nonlocal_goto.  */
  tree context = decl_function_context (label);
241
  gcc_assert (!context || context == current_function_decl);
Richard Kenner committed
242
#endif
243

244
  emit_jump (label_rtx (label));
Richard Kenner committed
245
}
246 247 248

/* Return the number of times character C occurs in string S.  */
static int
249
n_occurrences (int c, const char *s)
250 251 252 253 254 255
{
  int n = 0;
  while (*s)
    n += (*s++ == c);
  return n;
}
Richard Kenner committed
256 257

/* Generate RTL for an asm statement (explicit assembler code).
258 259 260
   STRING is a STRING_CST node containing the assembler code text,
   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
   insn is volatile; don't optimize it.  */
Richard Kenner committed
261

262
static void
263
expand_asm_loc (tree string, int vol, location_t locus)
Richard Kenner committed
264
{
265 266 267 268 269
  rtx body;

  if (TREE_CODE (string) == ADDR_EXPR)
    string = TREE_OPERAND (string, 0);

270 271 272
  body = gen_rtx_ASM_INPUT_loc (VOIDmode,
				ggc_strdup (TREE_STRING_POINTER (string)),
				locus);
273 274

  MEM_VOLATILE_P (body) = vol;
Richard Kenner committed
275

276
  emit_insn (body);
Richard Kenner committed
277 278
}

279 280 281 282 283 284 285 286 287
/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
   constraint allows the use of a register operand.  And, *IS_INOUT
   will be true if the operand is read-write, i.e., if it is used as
   an input as well as an output.  If *CONSTRAINT_P is not in
   canonical form, it will be made canonical.  (Note that `+' will be
288
   replaced with `=' as part of this process.)
289 290 291 292

   Returns TRUE if all went well; FALSE if an error occurred.  */

bool
293 294 295
parse_output_constraint (const char **constraint_p, int operand_num,
			 int ninputs, int noutputs, bool *allows_mem,
			 bool *allows_reg, bool *is_inout)
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316
{
  const char *constraint = *constraint_p;
  const char *p;

  /* Assume the constraint doesn't allow the use of either a register
     or memory.  */
  *allows_mem = false;
  *allows_reg = false;

  /* Allow the `=' or `+' to not be at the beginning of the string,
     since it wasn't explicitly documented that way, and there is a
     large body of code that puts it last.  Swap the character to
     the front, so as not to uglify any place else.  */
  p = strchr (constraint, '=');
  if (!p)
    p = strchr (constraint, '+');

  /* If the string doesn't contain an `=', issue an error
     message.  */
  if (!p)
    {
317
      error ("output operand constraint lacks %<=%>");
318 319 320 321 322 323 324 325
      return false;
    }

  /* If the constraint begins with `+', then the operand is both read
     from and written to.  */
  *is_inout = (*p == '+');

  /* Canonicalize the output constraint so that it begins with `='.  */
326
  if (p != constraint || *is_inout)
327 328 329 330 331
    {
      char *buf;
      size_t c_len = strlen (constraint);

      if (p != constraint)
332
	warning (0, "output constraint %qc for operand %d "
333
		 "is not at the beginning",
334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349
		 *p, operand_num);

      /* Make a copy of the constraint.  */
      buf = alloca (c_len + 1);
      strcpy (buf, constraint);
      /* Swap the first character and the `=' or `+'.  */
      buf[p - constraint] = buf[0];
      /* Make sure the first character is an `='.  (Until we do this,
	 it might be a `+'.)  */
      buf[0] = '=';
      /* Replace the constraint with the canonicalized string.  */
      *constraint_p = ggc_alloc_string (buf, c_len);
      constraint = *constraint_p;
    }

  /* Loop through the constraint string.  */
350
  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
351 352 353 354
    switch (*p)
      {
      case '+':
      case '=':
355 356
	error ("operand constraint contains incorrectly positioned "
	       "%<+%> or %<=%>");
357
	return false;
Kazu Hirata committed
358

359 360 361
      case '%':
	if (operand_num + 1 == ninputs + noutputs)
	  {
362
	    error ("%<%%%> constraint used with last operand");
363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379
	    return false;
	  }
	break;

      case 'V':  case 'm':  case 'o':
	*allows_mem = true;
	break;

      case '?':  case '!':  case '*':  case '&':  case '#':
      case 'E':  case 'F':  case 'G':  case 'H':
      case 's':  case 'i':  case 'n':
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
      case 'N':  case 'O':  case 'P':  case ',':
	break;

      case '0':  case '1':  case '2':  case '3':  case '4':
      case '5':  case '6':  case '7':  case '8':  case '9':
380
      case '[':
381 382 383 384 385 386 387 388 389 390 391 392 393 394
	error ("matching constraint not valid in output operand");
	return false;

      case '<':  case '>':
	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
	   excepting those that expand_call created.  So match memory
	   and hope.  */
	*allows_mem = true;
	break;

      case 'g':  case 'X':
	*allows_reg = true;
	*allows_mem = true;
	break;
Kazu Hirata committed
395

396 397 398 399 400 401 402
      case 'p': case 'r':
	*allows_reg = true;
	break;

      default:
	if (!ISALPHA (*p))
	  break;
403
	if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
404
	  *allows_reg = true;
405 406
#ifdef EXTRA_CONSTRAINT_STR
	else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
407
	  *allows_reg = true;
408
	else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
409
	  *allows_mem = true;
410 411 412 413 414 415 416 417 418 419 420 421 422 423 424
	else
	  {
	    /* Otherwise we can't assume anything about the nature of
	       the constraint except that it isn't purely registers.
	       Treat it like "g" and hope for the best.  */
	    *allows_reg = true;
	    *allows_mem = true;
	  }
#endif
	break;
      }

  return true;
}

425 426
/* Similar, but for input constraints.  */

427
bool
428 429 430 431
parse_input_constraint (const char **constraint_p, int input_num,
			int ninputs, int noutputs, int ninout,
			const char * const * constraints,
			bool *allows_mem, bool *allows_reg)
432 433 434 435 436
{
  const char *constraint = *constraint_p;
  const char *orig_constraint = constraint;
  size_t c_len = strlen (constraint);
  size_t j;
437
  bool saw_match = false;
438 439 440 441 442 443 444 445

  /* Assume the constraint doesn't allow the use of either
     a register or memory.  */
  *allows_mem = false;
  *allows_reg = false;

  /* Make sure constraint has neither `=', `+', nor '&'.  */

446
  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
447 448 449 450 451
    switch (constraint[j])
      {
      case '+':  case '=':  case '&':
	if (constraint == orig_constraint)
	  {
452
	    error ("input operand constraint contains %qc", constraint[j]);
453 454 455 456 457 458 459 460
	    return false;
	  }
	break;

      case '%':
	if (constraint == orig_constraint
	    && input_num + 1 == ninputs - ninout)
	  {
461
	    error ("%<%%%> constraint used with last operand");
462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488
	    return false;
	  }
	break;

      case 'V':  case 'm':  case 'o':
	*allows_mem = true;
	break;

      case '<':  case '>':
      case '?':  case '!':  case '*':  case '#':
      case 'E':  case 'F':  case 'G':  case 'H':
      case 's':  case 'i':  case 'n':
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
      case 'N':  case 'O':  case 'P':  case ',':
	break;

	/* Whether or not a numeric constraint allows a register is
	   decided by the matching constraint, and so there is no need
	   to do anything special with them.  We must handle them in
	   the default case, so that we don't unnecessarily force
	   operands to memory.  */
      case '0':  case '1':  case '2':  case '3':  case '4':
      case '5':  case '6':  case '7':  case '8':  case '9':
	{
	  char *end;
	  unsigned long match;

489 490
	  saw_match = true;

491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506
	  match = strtoul (constraint + j, &end, 10);
	  if (match >= (unsigned long) noutputs)
	    {
	      error ("matching constraint references invalid operand number");
	      return false;
	    }

	  /* Try and find the real constraint for this dup.  Only do this
	     if the matching constraint is the only alternative.  */
	  if (*end == '\0'
	      && (j == 0 || (j == 1 && constraint[0] == '%')))
	    {
	      constraint = constraints[match];
	      *constraint_p = constraint;
	      c_len = strlen (constraint);
	      j = 0;
507 508 509 510
	      /* ??? At the end of the loop, we will skip the first part of
		 the matched constraint.  This assumes not only that the
		 other constraint is an output constraint, but also that
		 the '=' or '+' come first.  */
511 512 513 514
	      break;
	    }
	  else
	    j = end - constraint;
515 516
	  /* Anticipate increment at end of loop.  */
	  j--;
517 518 519 520 521 522 523 524 525 526 527 528 529 530 531
	}
	/* Fall through.  */

      case 'p':  case 'r':
	*allows_reg = true;
	break;

      case 'g':  case 'X':
	*allows_reg = true;
	*allows_mem = true;
	break;

      default:
	if (! ISALPHA (constraint[j]))
	  {
532
	    error ("invalid punctuation %qc in constraint", constraint[j]);
533 534
	    return false;
	  }
535 536
	if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
	    != NO_REGS)
537
	  *allows_reg = true;
538 539
#ifdef EXTRA_CONSTRAINT_STR
	else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
540
	  *allows_reg = true;
541
	else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
542
	  *allows_mem = true;
543 544 545 546 547 548 549 550 551 552 553 554
	else
	  {
	    /* Otherwise we can't assume anything about the nature of
	       the constraint except that it isn't purely registers.
	       Treat it like "g" and hope for the best.  */
	    *allows_reg = true;
	    *allows_mem = true;
	  }
#endif
	break;
      }

555
  if (saw_match && !*allows_reg)
556
    warning (0, "matching constraint does not allow a register");
557

558 559 560
  return true;
}

561 562
/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
   can be an asm-declared register.  Called via walk_tree.  */
563

564 565 566
static tree
decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
			      void *data)
567
{
568 569 570
  tree decl = *declp;
  const HARD_REG_SET *regs = data;

571
  if (TREE_CODE (decl) == VAR_DECL)
572
    {
573
      if (DECL_HARD_REGISTER (decl)
574 575 576 577
	  && REG_P (DECL_RTL (decl))
	  && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
	{
	  rtx reg = DECL_RTL (decl);
578 579 580

	  if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
	    return decl;
581 582
	}
      walk_subtrees = 0;
583
    }
584
  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
585 586
    walk_subtrees = 0;
  return NULL_TREE;
587 588
}

589 590 591 592 593 594 595
/* If there is an overlap between *REGS and DECL, return the first overlap
   found.  */
tree
tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
{
  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
}
596 597

/* Check for overlap between registers marked in CLOBBERED_REGS and
598 599
   anything inappropriate in T.  Emit error and return the register
   variable definition for error, NULL_TREE for ok.  */
600 601

static bool
602
tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
603 604 605
{
  /* Conflicts between asm-declared register variables and the clobber
     list are not allowed.  */
606 607 608
  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);

  if (overlap)
609 610
    {
      error ("asm-specifier for variable %qs conflicts with asm clobber list",
611
	     IDENTIFIER_POINTER (DECL_NAME (overlap)));
612 613 614

      /* Reset registerness to stop multiple errors emitted for a single
	 variable.  */
615
      DECL_REGISTER (overlap) = 0;
616
      return true;
617
    }
618

619 620 621
  return false;
}

Richard Kenner committed
622 623 624 625
/* Generate RTL for an asm statement with arguments.
   STRING is the instruction template.
   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
   Each output or input has an expression in the TREE_VALUE and
626
   and a tree list in TREE_PURPOSE which in turn contains a constraint
Kazu Hirata committed
627
   name in TREE_VALUE (or NULL_TREE) and a constraint string
628
   in TREE_PURPOSE.
Richard Kenner committed
629 630 631 632 633 634 635 636 637 638
   CLOBBERS is a list of STRING_CST nodes each naming a hard register
   that is clobbered by this insn.

   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
   Some elements of OUTPUTS may be replaced with trees representing temporary
   values.  The caller should copy those temporary values to the originally
   specified lvalues.

   VOL nonzero means the insn is volatile; don't optimize it.  */

639
static void
640
expand_asm_operands (tree string, tree outputs, tree inputs,
641
		     tree clobbers, int vol, location_t locus)
Richard Kenner committed
642
{
643
  rtvec argvec, constraintvec;
Richard Kenner committed
644 645 646
  rtx body;
  int ninputs = list_length (inputs);
  int noutputs = list_length (outputs);
647
  int ninout;
648
  int nclobbers;
649 650
  HARD_REG_SET clobbered_regs;
  int clobber_conflict_found = 0;
Richard Kenner committed
651
  tree tail;
652
  tree t;
653
  int i;
Richard Kenner committed
654
  /* Vector of RTX's of evaluated output operands.  */
655 656 657
  rtx *output_rtx = alloca (noutputs * sizeof (rtx));
  int *inout_opnum = alloca (noutputs * sizeof (int));
  rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
658
  enum machine_mode *inout_mode
659
    = alloca (noutputs * sizeof (enum machine_mode));
660
  const char **constraints
661
    = alloca ((noutputs + ninputs) * sizeof (const char *));
662
  int old_generating_concat_p = generating_concat_p;
Richard Kenner committed
663

664
  /* An ASM with no outputs needs to be treated as volatile, for now.  */
665 666 667
  if (noutputs == 0)
    vol = 1;

668 669 670
  if (! check_operand_nalternatives (outputs, inputs))
    return;

671 672 673 674 675 676 677 678
  string = resolve_asm_operand_names (string, outputs, inputs);

  /* Collect constraints.  */
  i = 0;
  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
679

680 681
  /* Sometimes we wish to automatically clobber registers across an asm.
     Case in point is when the i386 backend moved from cc0 to a hard reg --
682
     maintaining source-level compatibility means automatically clobbering
683
     the flags register.  */
684
  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
685

686 687 688
  /* Count the number of meaningful clobbered registers, ignoring what
     we would ignore later.  */
  nclobbers = 0;
689
  CLEAR_HARD_REG_SET (clobbered_regs);
690 691
  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
    {
692 693 694 695 696
      const char *regname;

      if (TREE_VALUE (tail) == error_mark_node)
	return;
      regname = TREE_STRING_POINTER (TREE_VALUE (tail));
Richard Kenner committed
697

698 699
      i = decode_reg_name (regname);
      if (i >= 0 || i == -4)
700
	++nclobbers;
701
      else if (i == -2)
702
	error ("unknown register name %qs in %<asm%>", regname);
703 704 705

      /* Mark clobbered registers.  */
      if (i >= 0)
706
        {
707
	  /* Clobbering the PIC register is an error.  */
708
	  if (i == (int) PIC_OFFSET_TABLE_REGNUM)
709
	    {
710
	      error ("PIC register %qs clobbered in %<asm%>", regname);
711 712 713 714 715
	      return;
	    }

	  SET_HARD_REG_BIT (clobbered_regs, i);
	}
716 717
    }

718 719 720 721
  /* First pass over inputs and outputs checks validity and sets
     mark_addressable if needed.  */

  ninout = 0;
Richard Kenner committed
722 723 724
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    {
      tree val = TREE_VALUE (tail);
725
      tree type = TREE_TYPE (val);
726
      const char *constraint;
727 728 729
      bool is_inout;
      bool allows_reg;
      bool allows_mem;
Richard Kenner committed
730 731

      /* If there's an erroneous arg, emit no insn.  */
732
      if (type == error_mark_node)
Richard Kenner committed
733 734
	return;

735 736
      /* Try to parse the output constraint.  If that fails, there's
	 no point in going further.  */
737 738 739 740 741 742 743 744 745
      constraint = constraints[i];
      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
				    &allows_mem, &allows_reg, &is_inout))
	return;

      if (! allows_reg
	  && (allows_mem
	      || is_inout
	      || (DECL_P (val)
746
		  && REG_P (DECL_RTL (val))
747
		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
748
	lang_hooks.mark_addressable (val);
749 750 751 752 753 754 755 756

      if (is_inout)
	ninout++;
    }

  ninputs += ninout;
  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
    {
757
      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773
      return;
    }

  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
    {
      bool allows_reg, allows_mem;
      const char *constraint;

      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
	 would get VOIDmode and that could cause a crash in reload.  */
      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
	return;

      constraint = constraints[i + noutputs];
      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
				    constraints, &allows_mem, &allows_reg))
774
	return;
775

776
      if (! allows_reg && allows_mem)
777
	lang_hooks.mark_addressable (TREE_VALUE (tail));
778 779 780 781 782 783 784 785 786 787 788 789
    }

  /* Second pass evaluates arguments.  */

  ninout = 0;
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    {
      tree val = TREE_VALUE (tail);
      tree type = TREE_TYPE (val);
      bool is_inout;
      bool allows_reg;
      bool allows_mem;
790
      rtx op;
791
      bool ok;
792

793
      ok = parse_output_constraint (&constraints[i], i, ninputs,
794
				    noutputs, &allows_mem, &allows_reg,
795 796
				    &is_inout);
      gcc_assert (ok);
797

798 799 800 801
      /* If an output operand is not a decl or indirect ref and our constraint
	 allows a register, make a temporary to act as an intermediate.
	 Make the asm insn write into that, then our caller will copy it to
	 the real output operand.  Likewise for promoted variables.  */
Richard Kenner committed
802

803 804
      generating_concat_p = 0;

805
      real_output_rtx[i] = NULL_RTX;
806 807
      if ((TREE_CODE (val) == INDIRECT_REF
	   && allows_mem)
808
	  || (DECL_P (val)
809 810
	      && (allows_mem || REG_P (DECL_RTL (val)))
	      && ! (REG_P (DECL_RTL (val))
811
		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
812
	  || ! allows_reg
813
	  || is_inout)
814
	{
815
	  op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
816
	  if (MEM_P (op))
817
	    op = validize_mem (op);
818

819
	  if (! allows_reg && !MEM_P (op))
820
	    error ("output number %d not directly addressable", i);
821
	  if ((! allows_mem && MEM_P (op))
822
	      || GET_CODE (op) == CONCAT)
823
	    {
824
	      real_output_rtx[i] = op;
825
	      op = gen_reg_rtx (GET_MODE (op));
826
	      if (is_inout)
827
		emit_move_insn (op, real_output_rtx[i]);
828
	    }
829
	}
830
      else
831
	{
832 833 834
	  op = assign_temp (type, 0, 0, 1);
	  op = validize_mem (op);
	  TREE_VALUE (tail) = make_tree (type, op);
835
	}
836
      output_rtx[i] = op;
837

838 839
      generating_concat_p = old_generating_concat_p;

840
      if (is_inout)
841
	{
842
	  inout_mode[ninout] = TYPE_MODE (type);
843 844
	  inout_opnum[ninout++] = i;
	}
845

846
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
847
	clobber_conflict_found = 1;
Richard Kenner committed
848 849
    }

850 851
  /* Make vectors for the expression-rtx, constraint strings,
     and named operands.  */
Richard Kenner committed
852 853

  argvec = rtvec_alloc (ninputs);
854
  constraintvec = rtvec_alloc (ninputs);
Richard Kenner committed
855

856 857
  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
				: GET_MODE (output_rtx[0])),
858
			       ggc_strdup (TREE_STRING_POINTER (string)),
859
			       empty_string, 0, argvec, constraintvec,
860
			       locus);
861

862
  MEM_VOLATILE_P (body) = vol;
Richard Kenner committed
863 864 865 866

  /* Eval the inputs and put them into ARGVEC.
     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */

867
  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
Richard Kenner committed
868
    {
869 870 871
      bool allows_reg, allows_mem;
      const char *constraint;
      tree val, type;
872
      rtx op;
873
      bool ok;
Richard Kenner committed
874

875
      constraint = constraints[i + noutputs];
876 877 878
      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
				   constraints, &allows_mem, &allows_reg);
      gcc_assert (ok);
879

880
      generating_concat_p = 0;
881

882 883
      val = TREE_VALUE (tail);
      type = TREE_TYPE (val);
884 885 886
      /* EXPAND_INITIALIZER will not generate code for valid initializer
	 constants, but will still generate code for other types of operand.
	 This is the behavior we want for constant constraints.  */
887
      op = expand_expr (val, NULL_RTX, VOIDmode,
888 889 890
			allows_reg ? EXPAND_NORMAL
			: allows_mem ? EXPAND_MEMORY
			: EXPAND_INITIALIZER);
891

892 893 894
      /* Never pass a CONCAT to an ASM.  */
      if (GET_CODE (op) == CONCAT)
	op = force_reg (GET_MODE (op), op);
895
      else if (MEM_P (op))
896
	op = validize_mem (op);
897

898
      if (asm_operand_ok (op, constraint) <= 0)
899
	{
900
	  if (allows_reg && TYPE_MODE (type) != BLKmode)
901
	    op = force_reg (TYPE_MODE (type), op);
902
	  else if (!allows_mem)
903
	    warning (0, "asm operand %d probably doesn%'t match constraints",
904
		     i + noutputs);
905
	  else if (MEM_P (op))
906
	    {
907 908 909
	      /* We won't recognize either volatile memory or memory
		 with a queued address as available a memory_operand
		 at this point.  Ignore it: clearly this *is* a memory.  */
910
	    }
911
	  else
912
	    {
913
	      warning (0, "use of memory input without lvalue in "
914
		       "asm operand %d is deprecated", i + noutputs);
915 916 917

	      if (CONSTANT_P (op))
		{
918 919 920 921 922
		  rtx mem = force_const_mem (TYPE_MODE (type), op);
		  if (mem)
		    op = validize_mem (mem);
		  else
		    op = force_reg (TYPE_MODE (type), op);
923
		}
924
	      if (REG_P (op)
925 926
		  || GET_CODE (op) == SUBREG
		  || GET_CODE (op) == CONCAT)
927 928 929 930 931 932 933 934 935 936
		{
		  tree qual_type = build_qualified_type (type,
							 (TYPE_QUALS (type)
							  | TYPE_QUAL_CONST));
		  rtx memloc = assign_temp (qual_type, 1, 1, 1);
		  memloc = validize_mem (memloc);
		  emit_move_insn (memloc, op);
		  op = memloc;
		}
	    }
937
	}
938

939
      generating_concat_p = old_generating_concat_p;
940
      ASM_OPERANDS_INPUT (body, i) = op;
941

942
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
943 944
	= gen_rtx_ASM_INPUT (TYPE_MODE (type), 
			     ggc_strdup (constraints[i + noutputs]));
945

946
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
947
	clobber_conflict_found = 1;
Richard Kenner committed
948 949
    }

Richard Kenner committed
950 951
  /* Protect all the operands from the queue now that they have all been
     evaluated.  */
Richard Kenner committed
952

953 954
  generating_concat_p = 0;

Kazu Hirata committed
955
  /* For in-out operands, copy output rtx to input rtx.  */
956 957 958
  for (i = 0; i < ninout; i++)
    {
      int j = inout_opnum[i];
959
      char buffer[16];
960

961
      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
962
	= output_rtx[j];
963 964

      sprintf (buffer, "%d", j);
965
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
966
	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
967 968
    }

969 970
  generating_concat_p = old_generating_concat_p;

Richard Kenner committed
971
  /* Now, for each output, construct an rtx
972 973
     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
			       ARGVEC CONSTRAINTS OPNAMES))
Richard Kenner committed
974 975 976 977
     If there is more than one, put them inside a PARALLEL.  */

  if (noutputs == 1 && nclobbers == 0)
    {
978
      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
979
      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
Richard Kenner committed
980
    }
Richard Kenner committed
981

Richard Kenner committed
982 983 984
  else if (noutputs == 0 && nclobbers == 0)
    {
      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
985
      emit_insn (body);
Richard Kenner committed
986
    }
Richard Kenner committed
987

Richard Kenner committed
988 989 990 991
  else
    {
      rtx obody = body;
      int num = noutputs;
Richard Kenner committed
992 993 994 995

      if (num == 0)
	num = 1;

996
      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
Richard Kenner committed
997 998 999 1000 1001

      /* For each output operand, store a SET.  */
      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
	{
	  XVECEXP (body, 0, i)
1002 1003
	    = gen_rtx_SET (VOIDmode,
			   output_rtx[i],
Jeff Law committed
1004
			   gen_rtx_ASM_OPERANDS
1005
			   (GET_MODE (output_rtx[i]),
1006 1007 1008
			    ggc_strdup (TREE_STRING_POINTER (string)),
			    ggc_strdup (constraints[i]),
			    i, argvec, constraintvec, locus));
Jeff Law committed
1009

Richard Kenner committed
1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020
	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
	}

      /* If there are no outputs (but there are some clobbers)
	 store the bare ASM_OPERANDS into the PARALLEL.  */

      if (i == 0)
	XVECEXP (body, 0, i++) = obody;

      /* Store (clobber REG) for each clobbered register specified.  */

1021
      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
Richard Kenner committed
1022
	{
1023
	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1024
	  int j = decode_reg_name (regname);
1025
	  rtx clobbered_reg;
Richard Kenner committed
1026

1027
	  if (j < 0)
Richard Kenner committed
1028
	    {
1029
	      if (j == -3)	/* `cc', which is not a register */
1030 1031
		continue;

1032 1033
	      if (j == -4)	/* `memory', don't cache memory across asm */
		{
1034
		  XVECEXP (body, 0, i++)
1035
		    = gen_rtx_CLOBBER (VOIDmode,
Jeff Law committed
1036 1037 1038
				       gen_rtx_MEM
				       (BLKmode,
					gen_rtx_SCRATCH (VOIDmode)));
1039 1040 1041
		  continue;
		}

Jeff Law committed
1042
	      /* Ignore unknown register, error already signaled.  */
1043
	      continue;
Richard Kenner committed
1044 1045 1046
	    }

	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067
	  clobbered_reg = gen_rtx_REG (QImode, j);

	  /* Do sanity check for overlap between clobbers and respectively
	     input and outputs that hasn't been handled.  Such overlap
	     should have been detected and reported above.  */
	  if (!clobber_conflict_found)
	    {
	      int opno;

	      /* We test the old body (obody) contents to avoid tripping
		 over the under-construction body.  */
	      for (opno = 0; opno < noutputs; opno++)
		if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
		  internal_error ("asm clobber conflict with output operand");

	      for (opno = 0; opno < ninputs - ninout; opno++)
		if (reg_overlap_mentioned_p (clobbered_reg,
					     ASM_OPERANDS_INPUT (obody, opno)))
		  internal_error ("asm clobber conflict with input operand");
	    }

1068
	  XVECEXP (body, 0, i++)
1069
	    = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
Richard Kenner committed
1070 1071
	}

1072
      emit_insn (body);
Richard Kenner committed
1073 1074
    }

1075 1076 1077 1078 1079 1080
  /* For any outputs that needed reloading into registers, spill them
     back to where they belong.  */
  for (i = 0; i < noutputs; ++i)
    if (real_output_rtx[i])
      emit_move_insn (real_output_rtx[i], output_rtx[i]);

1081
  cfun->has_asm_statement = 1;
Richard Kenner committed
1082 1083
  free_temp_slots ();
}
1084

1085 1086 1087 1088 1089 1090 1091 1092 1093
void
expand_asm_expr (tree exp)
{
  int noutputs, i;
  tree outputs, tail;
  tree *o;

  if (ASM_INPUT_P (exp))
    {
1094
      expand_asm_loc (ASM_STRING (exp), ASM_VOLATILE_P (exp), input_location);
1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117
      return;
    }

  outputs = ASM_OUTPUTS (exp);
  noutputs = list_length (outputs);
  /* o[I] is the place that output number I should be written.  */
  o = (tree *) alloca (noutputs * sizeof (tree));

  /* Record the contents of OUTPUTS before it is modified.  */
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    o[i] = TREE_VALUE (tail);

  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
     OUTPUTS some trees for where the values were actually stored.  */
  expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
		       ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
		       input_location);

  /* Copy all the intermediate outputs into the specified outputs.  */
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
    {
      if (o[i] != TREE_VALUE (tail))
	{
1118
	  expand_assignment (o[i], TREE_VALUE (tail), false);
1119 1120 1121 1122 1123 1124 1125 1126 1127
	  free_temp_slots ();

	  /* Restore the original value so that it's correct the next
	     time we expand this function.  */
	  TREE_VALUE (tail) = o[i];
	}
    }
}

1128 1129 1130 1131
/* A subroutine of expand_asm_operands.  Check that all operands have
   the same number of alternatives.  Return true if so.  */

static bool
1132
check_operand_nalternatives (tree outputs, tree inputs)
1133 1134 1135 1136 1137 1138 1139 1140 1141 1142
{
  if (outputs || inputs)
    {
      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
      int nalternatives
	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
      tree next = inputs;

      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
	{
1143
	  error ("too many alternatives in %<asm%>");
1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154
	  return false;
	}

      tmp = outputs;
      while (tmp)
	{
	  const char *constraint
	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));

	  if (n_occurrences (',', constraint) != nalternatives)
	    {
1155 1156
	      error ("operand constraints for %<asm%> differ "
		     "in number of alternatives");
1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175
	      return false;
	    }

	  if (TREE_CHAIN (tmp))
	    tmp = TREE_CHAIN (tmp);
	  else
	    tmp = next, next = 0;
	}
    }

  return true;
}

/* A subroutine of expand_asm_operands.  Check that all operand names
   are unique.  Return true if so.  We rely on the fact that these names
   are identifiers, and so have been canonicalized by get_identifier,
   so all we need are pointer comparisons.  */

static bool
1176
check_unique_operand_names (tree outputs, tree inputs)
1177 1178 1179 1180 1181 1182 1183 1184 1185 1186
{
  tree i, j;

  for (i = outputs; i ; i = TREE_CHAIN (i))
    {
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
      if (! i_name)
	continue;

      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1187
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1188 1189 1190 1191 1192 1193 1194 1195 1196 1197
	  goto failure;
    }

  for (i = inputs; i ; i = TREE_CHAIN (i))
    {
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
      if (! i_name)
	continue;

      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1198
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1199 1200
	  goto failure;
      for (j = outputs; j ; j = TREE_CHAIN (j))
1201
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1202 1203 1204 1205 1206 1207
	  goto failure;
    }

  return true;

 failure:
1208
  error ("duplicate asm operand name %qs",
1209
	 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1210 1211 1212 1213 1214 1215 1216
  return false;
}

/* A subroutine of expand_asm_operands.  Resolve the names of the operands
   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
   STRING and in the constraints to those numbers.  */

1217 1218
tree
resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1219
{
1220
  char *buffer;
1221
  char *p;
1222
  const char *c;
1223 1224
  tree t;

1225 1226
  check_unique_operand_names (outputs, inputs);

1227 1228 1229 1230
  /* Substitute [<name>] in input constraint strings.  There should be no
     named operands in output constraints.  */
  for (t = inputs; t ; t = TREE_CHAIN (t))
    {
1231
      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242
      if (strchr (c, '[') != NULL)
	{
	  p = buffer = xstrdup (c);
	  while ((p = strchr (p, '[')) != NULL)
	    p = resolve_operand_name_1 (p, outputs, inputs);
	  TREE_VALUE (TREE_PURPOSE (t))
	    = build_string (strlen (buffer), buffer);
	  free (buffer);
	}
    }

1243 1244 1245
  /* Now check for any needed substitutions in the template.  */
  c = TREE_STRING_POINTER (string);
  while ((c = strchr (c, '%')) != NULL)
1246
    {
1247 1248 1249 1250
      if (c[1] == '[')
	break;
      else if (ISALPHA (c[1]) && c[2] == '[')
	break;
1251 1252
      else
	{
1253
	  c += 1;
1254 1255
	  continue;
	}
1256 1257
    }

1258 1259 1260 1261 1262 1263 1264 1265
  if (c)
    {
      /* OK, we need to make a copy so we can perform the substitutions.
	 Assume that we will not need extra space--we get to remove '['
	 and ']', which means we cannot have a problem until we have more
	 than 999 operands.  */
      buffer = xstrdup (TREE_STRING_POINTER (string));
      p = buffer + (c - TREE_STRING_POINTER (string));
1266

1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284
      while ((p = strchr (p, '%')) != NULL)
	{
	  if (p[1] == '[')
	    p += 1;
	  else if (ISALPHA (p[1]) && p[2] == '[')
	    p += 2;
	  else
	    {
	      p += 1;
	      continue;
	    }

	  p = resolve_operand_name_1 (p, outputs, inputs);
	}

      string = build_string (strlen (buffer), buffer);
      free (buffer);
    }
1285 1286 1287 1288 1289 1290

  return string;
}

/* A subroutine of resolve_operand_names.  P points to the '[' for a
   potential named operand of the form [<name>].  In place, replace
Kazu Hirata committed
1291
   the name and brackets with a number.  Return a pointer to the
1292 1293 1294
   balance of the string after substitution.  */

static char *
1295
resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313
{
  char *q;
  int op;
  tree t;
  size_t len;

  /* Collect the operand name.  */
  q = strchr (p, ']');
  if (!q)
    {
      error ("missing close brace for named operand");
      return strchr (p, '\0');
    }
  len = q - p - 1;

  /* Resolve the name to a number.  */
  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
    {
1314 1315
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
      if (name)
1316
	{
1317
	  const char *c = TREE_STRING_POINTER (name);
1318 1319 1320
	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
	    goto found;
	}
1321 1322 1323
    }
  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
    {
1324 1325
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
      if (name)
1326
	{
1327
	  const char *c = TREE_STRING_POINTER (name);
1328 1329 1330
	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
	    goto found;
	}
1331 1332 1333
    }

  *q = '\0';
1334
  error ("undefined named operand %qs", p + 1);
1335 1336 1337 1338 1339 1340 1341 1342 1343 1344
  op = 0;
 found:

  /* Replace the name with the number.  Unfortunately, not all libraries
     get the return value of sprintf correct, so search for the end of the
     generated string by hand.  */
  sprintf (p, "%d", op);
  p = strchr (p, '\0');

  /* Verify the no extra buffer space assumption.  */
1345
  gcc_assert (p <= q);
1346 1347 1348 1349 1350 1351

  /* Shift the rest of the buffer down to fill the gap.  */
  memmove (p, q + 1, strlen (q + 1) + 1);

  return p;
}
Richard Kenner committed
1352

1353
/* Generate RTL to evaluate the expression EXP.  */
Richard Kenner committed
1354 1355

void
1356
expand_expr_stmt (tree exp)
Richard Kenner committed
1357
{
1358 1359
  rtx value;
  tree type;
1360

1361
  value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1362 1363 1364
  if (GIMPLE_TUPLE_P (exp))
    type = void_type_node;
  else
1365
  type = TREE_TYPE (exp);
Richard Kenner committed
1366 1367 1368

  /* If all we do is reference a volatile value in memory,
     copy it to a register to be sure it is actually touched.  */
1369
  if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
Richard Kenner committed
1370
    {
1371
      if (TYPE_MODE (type) == VOIDmode)
1372
	;
1373 1374
      else if (TYPE_MODE (type) != BLKmode)
	value = copy_to_reg (value);
Richard Kenner committed
1375
      else
1376 1377
	{
	  rtx lab = gen_label_rtx ();
Kazu Hirata committed
1378

1379
	  /* Compare the value with itself to reference it.  */
1380
	  emit_cmp_and_jump_insns (value, value, EQ,
1381
				   expand_normal (TYPE_SIZE (type)),
1382
				   BLKmode, 0, lab);
1383 1384
	  emit_label (lab);
	}
Richard Kenner committed
1385 1386
    }

1387
  /* Free any temporaries used to evaluate this expression.  */
Richard Kenner committed
1388 1389 1390 1391
  free_temp_slots ();
}

/* Warn if EXP contains any computations whose results are not used.
1392
   Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1393
   (potential) location of the expression.  */
Richard Kenner committed
1394

1395
int
1396
warn_if_unused_value (const_tree exp, location_t locus)
Richard Kenner committed
1397
{
1398
 restart:
1399
  if (TREE_USED (exp) || TREE_NO_WARNING (exp))
Richard Kenner committed
1400 1401
    return 0;

1402 1403 1404 1405 1406 1407
  /* Don't warn about void constructs.  This includes casting to void,
     void function calls, and statement expressions with a final cast
     to void.  */
  if (VOID_TYPE_P (TREE_TYPE (exp)))
    return 0;

1408 1409
  if (EXPR_HAS_LOCATION (exp))
    locus = EXPR_LOCATION (exp);
1410

Richard Kenner committed
1411 1412 1413 1414 1415 1416 1417
  switch (TREE_CODE (exp))
    {
    case PREINCREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case PREDECREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case MODIFY_EXPR:
1418
    case GIMPLE_MODIFY_STMT:
Richard Kenner committed
1419 1420 1421
    case INIT_EXPR:
    case TARGET_EXPR:
    case CALL_EXPR:
1422
    case TRY_CATCH_EXPR:
Richard Kenner committed
1423 1424
    case WITH_CLEANUP_EXPR:
    case EXIT_EXPR:
1425
    case VA_ARG_EXPR:
Richard Kenner committed
1426 1427 1428 1429
      return 0;

    case BIND_EXPR:
      /* For a binding, warn if no side effect within it.  */
1430 1431
      exp = BIND_EXPR_BODY (exp);
      goto restart;
Richard Kenner committed
1432

1433
    case SAVE_EXPR:
1434 1435
      exp = TREE_OPERAND (exp, 0);
      goto restart;
1436

Richard Kenner committed
1437 1438 1439
    case TRUTH_ORIF_EXPR:
    case TRUTH_ANDIF_EXPR:
      /* In && or ||, warn if 2nd operand has no side effect.  */
1440 1441
      exp = TREE_OPERAND (exp, 1);
      goto restart;
Richard Kenner committed
1442 1443

    case COMPOUND_EXPR:
1444
      if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
Richard Kenner committed
1445
	return 1;
1446 1447 1448
      /* Let people do `(foo (), 0)' without a warning.  */
      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
	return 0;
1449 1450
      exp = TREE_OPERAND (exp, 1);
      goto restart;
Richard Kenner committed
1451

1452 1453 1454 1455
    case COND_EXPR:
      /* If this is an expression with side effects, don't warn; this
	 case commonly appears in macro expansions.  */
      if (TREE_SIDE_EFFECTS (exp))
Richard Kenner committed
1456
	return 0;
1457
      goto warn;
Richard Kenner committed
1458

1459 1460 1461 1462
    case INDIRECT_REF:
      /* Don't warn about automatic dereferencing of references, since
	 the user cannot control it.  */
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1463 1464 1465 1466
	{
	  exp = TREE_OPERAND (exp, 0);
	  goto restart;
	}
Kazu Hirata committed
1467 1468
      /* Fall through.  */

Richard Kenner committed
1469
    default:
1470
      /* Referencing a volatile value is a side effect, so don't warn.  */
1471
      if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1472 1473
	  && TREE_THIS_VOLATILE (exp))
	return 0;
1474 1475 1476 1477

      /* If this is an expression which has no operands, there is no value
	 to be unused.  There are no such language-independent codes,
	 but front ends may define such.  */
1478
      if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1479 1480
	return 0;

1481
    warn:
1482
      warning (OPT_Wunused_value, "%Hvalue computed is not used", &locus);
Richard Kenner committed
1483 1484 1485 1486 1487 1488 1489 1490 1491
      return 1;
    }
}


/* Generate RTL to return from the current function, with no value.
   (That is, we do not do anything about returning any value.)  */

void
1492
expand_null_return (void)
Richard Kenner committed
1493
{
Kazu Hirata committed
1494
  /* If this function was declared to return a value, but we
1495
     didn't, clobber the return registers so that they are not
1496
     propagated live to the rest of the function.  */
1497
  clobber_return_register ();
Richard Kenner committed
1498

1499
  expand_null_return_1 ();
Richard Kenner committed
1500 1501
}

1502 1503 1504 1505 1506 1507
/* Generate RTL to return directly from the current function.
   (That is, we bypass any return value.)  */

void
expand_naked_return (void)
{
1508
  rtx end_label;
1509 1510 1511 1512

  clear_pending_stack_adjust ();
  do_pending_stack_adjust ();

1513
  end_label = naked_return_label;
1514 1515
  if (end_label == 0)
    end_label = naked_return_label = gen_label_rtx ();
1516 1517

  emit_jump (end_label);
1518 1519
}

Richard Kenner committed
1520 1521
/* Generate RTL to return from the current function, with value VAL.  */

1522
static void
1523
expand_value_return (rtx val)
Richard Kenner committed
1524 1525 1526 1527
{
  /* Copy the value to the return location
     unless it's already there.  */

1528
  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
Richard Kenner committed
1529
  if (return_reg != val)
1530 1531
    {
      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
DJ Delorie committed
1532 1533
      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
      {
1534
	int unsignedp = TYPE_UNSIGNED (type);
DJ Delorie committed
1535 1536 1537 1538 1539 1540 1541 1542
	enum machine_mode old_mode
	  = DECL_MODE (DECL_RESULT (current_function_decl));
	enum machine_mode mode
	  = promote_mode (type, old_mode, &unsignedp, 1);

	if (mode != old_mode)
	  val = convert_modes (mode, old_mode, val, unsignedp);
      }
Richard Kenner committed
1543
      if (GET_CODE (return_reg) == PARALLEL)
1544
	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
Richard Kenner committed
1545
      else
1546 1547
	emit_move_insn (return_reg, val);
    }
Richard Kenner committed
1548

1549
  expand_null_return_1 ();
Richard Kenner committed
1550 1551
}

1552
/* Output a return with no value.  */
Richard Kenner committed
1553 1554

static void
1555
expand_null_return_1 (void)
Richard Kenner committed
1556 1557 1558
{
  clear_pending_stack_adjust ();
  do_pending_stack_adjust ();
1559
  emit_jump (return_label);
Richard Kenner committed
1560 1561 1562 1563 1564 1565
}

/* Generate RTL to evaluate the expression RETVAL and return it
   from the current function.  */

void
1566
expand_return (tree retval)
Richard Kenner committed
1567
{
1568
  rtx result_rtl;
1569
  rtx val = 0;
Richard Kenner committed
1570 1571 1572 1573 1574
  tree retval_rhs;

  /* If function wants no value, give it none.  */
  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
    {
1575
      expand_normal (retval);
Richard Kenner committed
1576 1577 1578 1579
      expand_null_return ();
      return;
    }

1580
  if (retval == error_mark_node)
1581 1582 1583 1584
    {
      /* Treat this like a return of no value from a function that
	 returns a value.  */
      expand_null_return ();
Kazu Hirata committed
1585
      return;
1586
    }
1587
  else if ((TREE_CODE (retval) == GIMPLE_MODIFY_STMT
1588
	    || TREE_CODE (retval) == INIT_EXPR)
1589 1590
	   && TREE_CODE (GENERIC_TREE_OPERAND (retval, 0)) == RESULT_DECL)
    retval_rhs = GENERIC_TREE_OPERAND (retval, 1);
Richard Kenner committed
1591
  else
1592
    retval_rhs = retval;
Richard Kenner committed
1593

1594 1595
  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));

1596 1597 1598 1599 1600
  /* If we are returning the RESULT_DECL, then the value has already
     been stored into it, so we don't have to do anything special.  */
  if (TREE_CODE (retval_rhs) == RESULT_DECL)
    expand_value_return (result_rtl);

1601 1602 1603 1604 1605
  /* If the result is an aggregate that is being returned in one (or more)
     registers, load the registers here.  The compiler currently can't handle
     copying a BLKmode value into registers.  We could put this code in a
     more general area (for use by everyone instead of just function
     call/return), but until this feature is generally usable it is kept here
1606
     (and in expand_call).  */
1607

1608
  else if (retval_rhs != 0
1609
	   && TYPE_MODE (GENERIC_TREE_TYPE (retval_rhs)) == BLKmode
1610
	   && REG_P (result_rtl))
1611
    {
1612 1613
      int i;
      unsigned HOST_WIDE_INT bitpos, xbitpos;
1614
      unsigned HOST_WIDE_INT padding_correction = 0;
1615 1616
      unsigned HOST_WIDE_INT bytes
	= int_size_in_bytes (TREE_TYPE (retval_rhs));
1617
      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1618 1619
      unsigned int bitsize
	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1620
      rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1621
      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1622
      rtx result_val = expand_normal (retval_rhs);
1623
      enum machine_mode tmpmode, result_reg_mode;
1624

1625 1626 1627 1628 1629 1630
      if (bytes == 0)
	{
	  expand_null_return ();
	  return;
	}

1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646
      /* If the structure doesn't take up a whole number of words, see
	 whether the register value should be padded on the left or on
	 the right.  Set PADDING_CORRECTION to the number of padding
	 bits needed on the left side.

	 In most ABIs, the structure will be returned at the least end of
	 the register, which translates to right padding on little-endian
	 targets and left padding on big-endian targets.  The opposite
	 holds if the structure is returned at the most significant
	 end of the register.  */
      if (bytes % UNITS_PER_WORD != 0
	  && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
	      ? !BYTES_BIG_ENDIAN
	      : BYTES_BIG_ENDIAN))
	padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
					       * BITS_PER_UNIT));
1647

Kazu Hirata committed
1648
      /* Copy the structure BITSIZE bits at a time.  */
1649
      for (bitpos = 0, xbitpos = padding_correction;
1650 1651
	   bitpos < bytes * BITS_PER_UNIT;
	   bitpos += bitsize, xbitpos += bitsize)
1652
	{
1653
	  /* We need a new destination pseudo each time xbitpos is
1654
	     on a word boundary and when xbitpos == padding_correction
1655 1656
	     (the first time through).  */
	  if (xbitpos % BITS_PER_WORD == 0
1657
	      || xbitpos == padding_correction)
1658
	    {
1659 1660 1661 1662
	      /* Generate an appropriate register.  */
	      dst = gen_reg_rtx (word_mode);
	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;

1663 1664
	      /* Clear the destination before we move anything into it.  */
	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1665
	    }
1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678

	  /* We need a new source operand each time bitpos is on a word
	     boundary.  */
	  if (bitpos % BITS_PER_WORD == 0)
	    src = operand_subword_force (result_val,
					 bitpos / BITS_PER_WORD,
					 BLKmode);

	  /* Use bitpos for the source extraction (left justified) and
	     xbitpos for the destination store (right justified).  */
	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
			   extract_bit_field (src, bitsize,
					      bitpos % BITS_PER_WORD, 1,
1679
					      NULL_RTX, word_mode, word_mode));
1680 1681
	}

1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693
      tmpmode = GET_MODE (result_rtl);
      if (tmpmode == BLKmode)
	{
	  /* Find the smallest integer mode large enough to hold the
	     entire structure and use that mode instead of BLKmode
	     on the USE insn for the return register.  */
	  for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
	       tmpmode != VOIDmode;
	       tmpmode = GET_MODE_WIDER_MODE (tmpmode))
	    /* Have we found a large enough mode?  */
	    if (GET_MODE_SIZE (tmpmode) >= bytes)
	      break;
1694

1695 1696
	  /* A suitable mode should have been found.  */
	  gcc_assert (tmpmode != VOIDmode);
1697

1698 1699
	  PUT_MODE (result_rtl, tmpmode);
	}
1700

1701 1702 1703 1704 1705 1706
      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
	result_reg_mode = word_mode;
      else
	result_reg_mode = tmpmode;
      result_reg = gen_reg_rtx (result_reg_mode);

1707
      for (i = 0; i < n_regs; i++)
1708
	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1709
			result_pseudos[i]);
1710

1711 1712 1713
      if (tmpmode != result_reg_mode)
	result_reg = gen_lowpart (tmpmode, result_reg);

1714 1715
      expand_value_return (result_reg);
    }
1716 1717
  else if (retval_rhs != 0
	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1718
	   && (REG_P (result_rtl)
1719
	       || (GET_CODE (result_rtl) == PARALLEL)))
Richard Kenner committed
1720
    {
Richard Kenner committed
1721 1722
      /* Calculate the return value into a temporary (usually a pseudo
         reg).  */
1723 1724 1725 1726
      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);

      val = assign_temp (nt, 0, 0, 1);
1727
      val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
Jason Merrill committed
1728
      val = force_not_mem (val);
1729
      /* Return the calculated value.  */
1730
      expand_value_return (val);
Richard Kenner committed
1731 1732 1733
    }
  else
    {
1734
      /* No hard reg used; calculate value into hard return reg.  */
1735
      expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
Richard Kenner committed
1736
      expand_value_return (result_rtl);
Richard Kenner committed
1737 1738 1739
    }
}

1740
/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1741 1742 1743 1744 1745 1746 1747
   in question represents the outermost pair of curly braces (i.e. the "body
   block") of a function or method.

   For any BLOCK node representing a "body block" of a function or method, the
   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
   represents the outermost (function) scope for the function or method (i.e.
   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
Kazu Hirata committed
1748
   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
1749 1750

int
1751
is_body_block (const_tree stmt)
1752
{
1753 1754 1755
  if (lang_hooks.no_body_blocks)
    return 0;

1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771
  if (TREE_CODE (stmt) == BLOCK)
    {
      tree parent = BLOCK_SUPERCONTEXT (stmt);

      if (parent && TREE_CODE (parent) == BLOCK)
	{
	  tree grandparent = BLOCK_SUPERCONTEXT (parent);

	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
	    return 1;
	}
    }

  return 0;
}

1772 1773 1774
/* Emit code to restore vital registers at the beginning of a nonlocal goto
   handler.  */
static void
1775
expand_nl_goto_receiver (void)
1776
{
1777
  /* Clobber the FP when we get here, so we have to make sure it's
1778 1779 1780 1781 1782 1783 1784
     marked as used by this function.  */
  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));

  /* Mark the static chain as clobbered here so life information
     doesn't get messed up for it.  */
  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));

1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809
#ifdef HAVE_nonlocal_goto
  if (! HAVE_nonlocal_goto)
#endif
    /* First adjust our frame pointer to its actual value.  It was
       previously set to the start of the virtual area corresponding to
       the stacked variables when we branched here and now needs to be
       adjusted to the actual hardware fp value.

       Assignments are to virtual registers are converted by
       instantiate_virtual_regs into the corresponding assignment
       to the underlying register (fp in this case) that makes
       the original assignment true.
       So the following insn will actually be
       decrementing fp by STARTING_FRAME_OFFSET.  */
    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);

#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
  if (fixed_regs[ARG_POINTER_REGNUM])
    {
#ifdef ELIMINABLE_REGS
      /* If the argument pointer can be eliminated in favor of the
	 frame pointer, we don't need to restore it.  We assume here
	 that if such an elimination is present, it can always be used.
	 This is the case on all known machines; if we don't make this
	 assumption, we do unnecessary saving on many machines.  */
1810
      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1811 1812
      size_t i;

1813
      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1814 1815 1816 1817
	if (elim_regs[i].from == ARG_POINTER_REGNUM
	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
	  break;

1818
      if (i == ARRAY_SIZE (elim_regs))
1819 1820 1821
#endif
	{
	  /* Now restore our arg pointer from the address at which it
1822
	     was saved in our stack frame.  */
1823
	  emit_move_insn (virtual_incoming_args_rtx,
1824
			  copy_to_reg (get_arg_pointer_save_area (cfun)));
1825 1826 1827 1828 1829 1830 1831 1832
	}
    }
#endif

#ifdef HAVE_nonlocal_goto_receiver
  if (HAVE_nonlocal_goto_receiver)
    emit_insn (gen_nonlocal_goto_receiver ());
#endif
1833

1834 1835 1836 1837
  /* We must not allow the code we just generated to be reordered by
     scheduling.  Specifically, the update of the frame pointer must
     happen immediately, not later.  */
  emit_insn (gen_blockage ());
1838
}
Richard Kenner committed
1839 1840

/* Generate RTL for the automatic variable declaration DECL.
1841
   (Other kinds of declarations are simply ignored if seen here.)  */
Richard Kenner committed
1842 1843

void
1844
expand_decl (tree decl)
Richard Kenner committed
1845
{
Jan Brittenson committed
1846 1847 1848
  tree type;

  type = TREE_TYPE (decl);
Richard Kenner committed
1849

1850 1851 1852 1853 1854 1855 1856 1857 1858 1859
  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
     type in case this node is used in a reference.  */
  if (TREE_CODE (decl) == CONST_DECL)
    {
      DECL_MODE (decl) = TYPE_MODE (type);
      DECL_ALIGN (decl) = TYPE_ALIGN (type);
      DECL_SIZE (decl) = TYPE_SIZE (type);
      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
      return;
    }
Richard Kenner committed
1860

1861 1862 1863 1864
  /* Otherwise, only automatic variables need any expansion done.  Static and
     external variables, and external functions, will be handled by
     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
     nothing.  PARM_DECLs are handled in `assign_parms'.  */
Richard Kenner committed
1865 1866
  if (TREE_CODE (decl) != VAR_DECL)
    return;
1867

Richard Stallman committed
1868
  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
Richard Kenner committed
1869 1870 1871 1872 1873
    return;

  /* Create the RTL representation for the variable.  */

  if (type == error_mark_node)
1874
    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1875

Richard Kenner committed
1876 1877 1878
  else if (DECL_SIZE (decl) == 0)
    /* Variable with incomplete type.  */
    {
Jan Hubicka committed
1879
      rtx x;
Richard Kenner committed
1880 1881
      if (DECL_INITIAL (decl) == 0)
	/* Error message was already done; now avoid a crash.  */
Jan Hubicka committed
1882
	x = gen_rtx_MEM (BLKmode, const0_rtx);
Richard Kenner committed
1883 1884 1885
      else
	/* An initializer is going to decide the size of this array.
	   Until we know the size, represent its address with a reg.  */
Jan Hubicka committed
1886
	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1887

Jan Hubicka committed
1888 1889
      set_mem_attributes (x, decl, 1);
      SET_DECL_RTL (decl, x);
Richard Kenner committed
1890
    }
1891
  else if (use_register_for_decl (decl))
Richard Kenner committed
1892 1893
    {
      /* Automatic variable that can go in a register.  */
1894
      int unsignedp = TYPE_UNSIGNED (type);
1895 1896
      enum machine_mode reg_mode
	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1897

1898
      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1899

1900
      /* Note if the object is a user variable.  */
1901
      if (!DECL_ARTIFICIAL (decl))
1902 1903
	  mark_user_reg (DECL_RTL (decl));

1904 1905 1906
      if (POINTER_TYPE_P (type))
	mark_reg_pointer (DECL_RTL (decl),
			  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
Richard Kenner committed
1907
    }
1908

1909
  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1910
	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1911 1912
		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
					  STACK_CHECK_MAX_VAR_SIZE)))
Richard Kenner committed
1913 1914 1915 1916
    {
      /* Variable of fixed size that goes on the stack.  */
      rtx oldaddr = 0;
      rtx addr;
1917
      rtx x;
Richard Kenner committed
1918 1919 1920 1921 1922

      /* If we previously made RTL for this decl, it must be an array
	 whose size was determined by the initializer.
	 The old address was a register; set that register now
	 to the proper address.  */
1923
      if (DECL_RTL_SET_P (decl))
Richard Kenner committed
1924
	{
1925 1926
	  gcc_assert (MEM_P (DECL_RTL (decl)));
	  gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
Richard Kenner committed
1927 1928 1929 1930 1931 1932
	  oldaddr = XEXP (DECL_RTL (decl), 0);
	}

      /* Set alignment we actually gave this decl.  */
      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
1933
      DECL_USER_ALIGN (decl) = 0;
Richard Kenner committed
1934

1935
      x = assign_temp (decl, 1, 1, 1);
1936 1937 1938
      set_mem_attributes (x, decl, 1);
      SET_DECL_RTL (decl, x);

Richard Kenner committed
1939 1940 1941 1942 1943 1944 1945 1946 1947 1948
      if (oldaddr)
	{
	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
	  if (addr != oldaddr)
	    emit_move_insn (oldaddr, addr);
	}
    }
  else
    /* Dynamic-size object: must push space on the stack.  */
    {
Jan Hubicka committed
1949
      rtx address, size, x;
Richard Kenner committed
1950 1951 1952

      /* Record the stack pointer on entry to block, if have
	 not already done so.  */
1953
      do_pending_stack_adjust ();
Richard Kenner committed
1954

1955 1956
      /* Compute the variable's size, in bytes.  This will expand any
	 needed SAVE_EXPRs for the first time.  */
1957
      size = expand_normal (DECL_SIZE_UNIT (decl));
Richard Kenner committed
1958 1959
      free_temp_slots ();

1960
      /* Allocate space on the stack for the variable.  Note that
Kazu Hirata committed
1961
	 DECL_ALIGN says how the variable is to be aligned and we
1962 1963
	 cannot use it to conclude anything about the alignment of
	 the size.  */
1964
      address = allocate_dynamic_stack_space (size, NULL_RTX,
1965
					      TYPE_ALIGN (TREE_TYPE (decl)));
Richard Kenner committed
1966 1967

      /* Reference the variable indirect through that rtx.  */
Jan Hubicka committed
1968 1969 1970
      x = gen_rtx_MEM (DECL_MODE (decl), address);
      set_mem_attributes (x, decl, 1);
      SET_DECL_RTL (decl, x);
Richard Kenner committed
1971

1972

Richard Kenner committed
1973 1974 1975 1976 1977 1978
      /* Indicate the alignment we actually gave this variable.  */
#ifdef STACK_BOUNDARY
      DECL_ALIGN (decl) = STACK_BOUNDARY;
#else
      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
#endif
1979
      DECL_USER_ALIGN (decl) = 0;
Richard Kenner committed
1980 1981 1982
    }
}

1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997
/* Emit code to save the current value of stack.  */
rtx
expand_stack_save (void)
{
  rtx ret = NULL_RTX;

  do_pending_stack_adjust ();
  emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
  return ret;
}

/* Emit code to restore the current value of stack.  */
void
expand_stack_restore (tree var)
{
1998
  rtx sa = expand_normal (var);
1999 2000 2001 2002

  emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
}

Richard Kenner committed
2003 2004 2005 2006 2007
/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
   DECL_ELTS is the list of elements that belong to DECL's type.
   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */

void
2008 2009
expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
			tree decl_elts)
Richard Kenner committed
2010 2011
{
  rtx x;
2012
  tree t;
Richard Kenner committed
2013

2014 2015 2016 2017 2018 2019 2020
  /* If any of the elements are addressable, so is the entire union.  */
  for (t = decl_elts; t; t = TREE_CHAIN (t))
    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
      {
	TREE_ADDRESSABLE (decl) = 1;
	break;
      }
Kazu Hirata committed
2021

2022
  expand_decl (decl);
Richard Kenner committed
2023 2024
  x = DECL_RTL (decl);

2025 2026
  /* Go through the elements, assigning RTL to each.  */
  for (t = decl_elts; t; t = TREE_CHAIN (t))
Richard Kenner committed
2027
    {
2028
      tree decl_elt = TREE_VALUE (t);
Richard Kenner committed
2029
      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2030
      rtx decl_rtl;
Richard Kenner committed
2031

2032 2033 2034 2035 2036
      /* If any of the elements are addressable, so is the entire
	 union.  */
      if (TREE_USED (decl_elt))
	TREE_USED (decl) = 1;

2037 2038
      /* Propagate the union's alignment to the elements.  */
      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2039
      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2040 2041 2042 2043 2044 2045

      /* If the element has BLKmode and the union doesn't, the union is
         aligned such that the element doesn't need to have BLKmode, so
         change the element's mode to the appropriate one for its size.  */
      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
	DECL_MODE (decl_elt) = mode
2046
	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2047

2048 2049 2050 2051 2052 2053 2054
      if (mode == GET_MODE (x))
	decl_rtl = x;
      else if (MEM_P (x))
        /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
           instead create a new MEM rtx with the proper mode.  */
	decl_rtl = adjust_address_nv (x, mode, 0);
      else
Richard Kenner committed
2055
	{
2056 2057
	  gcc_assert (REG_P (x));
	  decl_rtl = gen_lowpart_SUBREG (mode, x);
Richard Kenner committed
2058
	}
2059
      SET_DECL_RTL (decl_elt, decl_rtl);
Richard Kenner committed
2060 2061 2062
    }
}

2063 2064
/* Do the insertion of a case label into case_list.  The labels are
   fed to us in descending order from the sorted vector of case labels used
2065
   in the tree part of the middle end.  So the list we construct is
2066 2067
   sorted in ascending order.  The bounds on the case range, LOW and HIGH,
   are converted to case's index type TYPE.  */
2068

2069 2070
static struct case_node *
add_case_node (struct case_node *head, tree type, tree low, tree high,
2071
               tree label, alloc_pool case_node_pool)
2072
{
2073
  tree min_value, max_value;
2074
  struct case_node *r;
2075

2076 2077 2078 2079 2080 2081
  gcc_assert (TREE_CODE (low) == INTEGER_CST);
  gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);

  min_value = TYPE_MIN_VALUE (type);
  max_value = TYPE_MAX_VALUE (type);

2082 2083
  /* If there's no HIGH value, then this is not a case range; it's
     just a simple case label.  But that's just a degenerate case
2084 2085 2086
     range.
     If the bounds are equal, turn this into the one-value case.  */
  if (!high || tree_int_cst_equal (low, high))
2087 2088
    {
      /* If the simple case value is unreachable, ignore it.  */
2089 2090 2091 2092
      if ((TREE_CODE (min_value) == INTEGER_CST
            && tree_int_cst_compare (low, min_value) < 0)
	  || (TREE_CODE (max_value) == INTEGER_CST
	      && tree_int_cst_compare (low, max_value) > 0))
2093 2094 2095 2096 2097 2098 2099
	return head;
      low = fold_convert (type, low);
      high = low;
    }
  else
    {
      /* If the entire case range is unreachable, ignore it.  */
2100 2101 2102 2103
      if ((TREE_CODE (min_value) == INTEGER_CST
            && tree_int_cst_compare (high, min_value) < 0)
	  || (TREE_CODE (max_value) == INTEGER_CST
	      && tree_int_cst_compare (low, max_value) > 0))
2104 2105 2106 2107
	return head;

      /* If the lower bound is less than the index type's minimum
	 value, truncate the range bounds.  */
2108 2109
      if (TREE_CODE (min_value) == INTEGER_CST
            && tree_int_cst_compare (low, min_value) < 0)
2110 2111 2112 2113 2114
	low = min_value;
      low = fold_convert (type, low);

      /* If the upper bound is greater than the index type's maximum
	 value, truncate the range bounds.  */
2115 2116
      if (TREE_CODE (max_value) == INTEGER_CST
	  && tree_int_cst_compare (high, max_value) > 0)
2117 2118 2119 2120
	high = max_value;
      high = fold_convert (type, high);
    }

2121

2122
  /* Add this label to the chain.  Make sure to drop overflow flags.  */
2123
  r = (struct case_node *) pool_alloc (case_node_pool);
2124 2125 2126 2127
  r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
			       TREE_INT_CST_HIGH (low));
  r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
				TREE_INT_CST_HIGH (high));
2128
  r->code_label = label;
2129
  r->parent = r->left = NULL;
2130 2131
  r->right = head;
  return r;
Richard Kenner committed
2132 2133
}

2134 2135 2136 2137 2138
/* Maximum number of case bit tests.  */
#define MAX_CASE_BIT_TESTS  3

/* By default, enable case bit tests on targets with ashlsi3.  */
#ifndef CASE_USE_BIT_TESTS
2139
#define CASE_USE_BIT_TESTS  (optab_handler (ashl_optab, word_mode)->insn_code \
2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160
			     != CODE_FOR_nothing)
#endif


/* A case_bit_test represents a set of case nodes that may be
   selected from using a bit-wise comparison.  HI and LO hold
   the integer to be tested against, LABEL contains the label
   to jump to upon success and BITS counts the number of case
   nodes handled by this test, typically the number of bits
   set in HI:LO.  */

struct case_bit_test
{
  HOST_WIDE_INT hi;
  HOST_WIDE_INT lo;
  rtx label;
  int bits;
};

/* Determine whether "1 << x" is relatively cheap in word_mode.  */

2161 2162
static
bool lshift_cheap_p (void)
2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181
{
  static bool init = false;
  static bool cheap = true;

  if (!init)
    {
      rtx reg = gen_rtx_REG (word_mode, 10000);
      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
      cheap = cost < COSTS_N_INSNS (3);
      init = true;
    }

  return cheap;
}

/* Comparison function for qsort to order bit tests by decreasing
   number of case nodes, i.e. the node with the most cases gets
   tested first.  */

2182 2183
static int
case_bit_test_cmp (const void *p1, const void *p2)
2184 2185 2186 2187
{
  const struct case_bit_test *d1 = p1;
  const struct case_bit_test *d2 = p2;

2188 2189 2190 2191 2192
  if (d2->bits != d1->bits)
    return d2->bits - d1->bits;

  /* Stabilize the sort.  */
  return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210
}

/*  Expand a switch statement by a short sequence of bit-wise
    comparisons.  "switch(x)" is effectively converted into
    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
    integer constants.

    INDEX_EXPR is the value being switched on, which is of
    type INDEX_TYPE.  MINVAL is the lowest case value of in
    the case nodes, of INDEX_TYPE type, and RANGE is highest
    value minus MINVAL, also of type INDEX_TYPE.  NODES is
    the set of case nodes, and DEFAULT_LABEL is the label to
    branch to should none of the cases match.

    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
    node targets.  */

static void
2211 2212
emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
		     tree range, case_node_ptr nodes, rtx default_label)
2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225
{
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
  enum machine_mode mode;
  rtx expr, index, label;
  unsigned int i,j,lo,hi;
  struct case_node *n;
  unsigned int count;

  count = 0;
  for (n = nodes; n; n = n->right)
    {
      label = label_rtx (n->code_label);
      for (i = 0; i < count; i++)
2226
	if (label == test[i].label)
2227 2228 2229 2230
	  break;

      if (i == count)
	{
2231 2232 2233
	  gcc_assert (count < MAX_CASE_BIT_TESTS);
	  test[i].hi = 0;
	  test[i].lo = 0;
2234 2235 2236 2237 2238 2239 2240
	  test[i].label = label;
	  test[i].bits = 1;
	  count++;
	}
      else
        test[i].bits++;

2241 2242 2243 2244
      lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
				      n->low, minval), 1);
      hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
				      n->high, minval), 1);
2245 2246 2247 2248 2249 2250 2251 2252 2253
      for (j = lo; j <= hi; j++)
        if (j >= HOST_BITS_PER_WIDE_INT)
	  test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
	else
	  test[i].lo |= (HOST_WIDE_INT) 1 << j;
    }

  qsort (test, count, sizeof(*test), case_bit_test_cmp);

2254 2255 2256
  index_expr = fold_build2 (MINUS_EXPR, index_type,
			    fold_convert (index_type, index_expr),
			    fold_convert (index_type, minval));
2257
  index = expand_normal (index_expr);
2258 2259 2260
  do_pending_stack_adjust ();

  mode = TYPE_MODE (index_type);
2261
  expr = expand_normal (range);
2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279
  emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
			   default_label);

  index = convert_to_mode (word_mode, index, 0);
  index = expand_binop (word_mode, ashl_optab, const1_rtx,
			index, NULL_RTX, 1, OPTAB_WIDEN);

  for (i = 0; i < count; i++)
    {
      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
      expr = expand_binop (word_mode, and_optab, index, expr,
			   NULL_RTX, 1, OPTAB_WIDEN);
      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
			       word_mode, 1, test[i].label);
    }

  emit_jump (default_label);
}
2280

2281 2282 2283 2284 2285 2286 2287 2288
#ifndef HAVE_casesi
#define HAVE_casesi 0
#endif

#ifndef HAVE_tablejump
#define HAVE_tablejump 0
#endif

2289
/* Terminate a case (Pascal/Ada) or switch (C) statement
2290
   in which ORIG_INDEX is the expression to be tested.
Jakub Jelinek committed
2291 2292
   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
   type as given in the source before any compiler conversions.
Richard Kenner committed
2293 2294 2295
   Generate the code to test it and jump to the right place.  */

void
2296
expand_case (tree exp)
Richard Kenner committed
2297
{
2298
  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
Richard Kenner committed
2299
  rtx default_label = 0;
2300
  struct case_node *n;
2301
  unsigned int count, uniq;
Richard Kenner committed
2302
  rtx index;
Jan Brittenson committed
2303
  rtx table_label;
Richard Kenner committed
2304 2305
  int ncases;
  rtx *labelvec;
2306
  int i;
2307
  rtx before_case, end, lab;
Jan Brittenson committed
2308

2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323
  tree vec = SWITCH_LABELS (exp);
  tree orig_type = TREE_TYPE (exp);
  tree index_expr = SWITCH_COND (exp);
  tree index_type = TREE_TYPE (index_expr);
  int unsignedp = TYPE_UNSIGNED (index_type);

  /* The insn after which the case dispatch should finally
     be emitted.  Zero for a dummy.  */
  rtx start;

  /* A list of case labels; it is first built as a list and it may then
     be rearranged into a nearly balanced binary tree.  */
  struct case_node *case_list = 0;

  /* Label to jump to if no case matches.  */
2324
  tree default_label_decl;
2325

2326 2327 2328 2329
  alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
                                                 sizeof (struct case_node),
                                                 100);

2330 2331
  /* The switch body is lowered in gimplify.c, we should never have
     switches with a non-NULL SWITCH_BODY here.  */
2332 2333
  gcc_assert (!SWITCH_BODY (exp));
  gcc_assert (SWITCH_LABELS (exp));
2334

Richard Kenner committed
2335 2336 2337
  do_pending_stack_adjust ();

  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2338
  if (index_type != error_mark_node)
Richard Kenner committed
2339
    {
2340
      tree elt;
2341
      bitmap label_bitmap;
2342

2343 2344 2345 2346
      /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
	 expressions being INTEGER_CST.  */
      gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);

2347 2348 2349 2350 2351 2352 2353 2354
      /* The default case is at the end of TREE_VEC.  */
      elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1);
      gcc_assert (!CASE_HIGH (elt));
      gcc_assert (!CASE_LOW (elt));
      default_label_decl = CASE_LABEL (elt);

      for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; )
	{
2355
	  tree low, high;
2356
	  elt = TREE_VEC_ELT (vec, i);
2357 2358 2359 2360 2361 2362

	  low = CASE_LOW (elt);
	  gcc_assert (low);
	  high = CASE_HIGH (elt);

	  /* Discard empty ranges.  */
2363
	  if (high && tree_int_cst_lt (high, low))
2364 2365 2366
	    continue;

	  case_list = add_case_node (case_list, index_type, low, high,
2367
                                     CASE_LABEL (elt), case_node_pool);
2368 2369 2370
	}


2371
      before_case = start = get_last_insn ();
2372
      default_label = label_rtx (default_label_decl);
Richard Kenner committed
2373

2374
      /* Get upper and lower bounds of case values.  */
Richard Kenner committed
2375

2376
      uniq = 0;
Richard Kenner committed
2377
      count = 0;
2378
      label_bitmap = BITMAP_ALLOC (NULL);
2379
      for (n = case_list; n; n = n->right)
Richard Kenner committed
2380 2381 2382 2383 2384 2385 2386 2387 2388 2389
	{
	  /* Count the elements and track the largest and smallest
	     of them (treating them as signed even if they are not).  */
	  if (count++ == 0)
	    {
	      minval = n->low;
	      maxval = n->high;
	    }
	  else
	    {
2390
	      if (tree_int_cst_lt (n->low, minval))
Richard Kenner committed
2391
		minval = n->low;
2392
	      if (tree_int_cst_lt (maxval, n->high))
Richard Kenner committed
2393 2394 2395 2396 2397
		maxval = n->high;
	    }
	  /* A range counts double, since it requires two compares.  */
	  if (! tree_int_cst_equal (n->low, n->high))
	    count++;
2398

2399 2400
	  /* If we have not seen this label yet, then increase the
	     number of unique case node targets seen.  */
2401
	  lab = label_rtx (n->code_label);
2402 2403 2404 2405 2406
	  if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
	    {
	      bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
	      uniq++;
	    }
Richard Kenner committed
2407 2408
	}

2409
      BITMAP_FREE (label_bitmap);
2410

2411
      /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2412 2413 2414 2415 2416 2417
	 destination, such as one with a default case only.  However,
	 it doesn't remove cases that are out of range for the switch
	 type, so we may still get a zero here.  */
      if (count == 0)
	{
	  emit_jump (default_label);
2418
          free_alloc_pool (case_node_pool);
2419 2420
	  return;
	}
Richard Kenner committed
2421

2422
      /* Compute span of values.  */
2423
      range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2424

2425 2426 2427
      /* Try implementing this switch statement by a short sequence of
	 bit-wise comparisons.  However, we let the binary-tree case
	 below handle constant index expressions.  */
2428 2429 2430 2431 2432 2433 2434 2435
      if (CASE_USE_BIT_TESTS
	  && ! TREE_CONSTANT (index_expr)
	  && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
	  && compare_tree_int (range, 0) > 0
	  && lshift_cheap_p ()
	  && ((uniq == 1 && count >= 3)
	      || (uniq == 2 && count >= 5)
	      || (uniq == 3 && count >= 6)))
2436 2437 2438 2439 2440 2441 2442
	{
	  /* Optimize the case where all the case values fit in a
	     word without having to subtract MINVAL.  In this case,
	     we can optimize away the subtraction.  */
	  if (compare_tree_int (minval, 0) > 0
	      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
	    {
2443
	      minval = build_int_cst (index_type, 0);
2444 2445 2446
	      range = maxval;
	    }
	  emit_case_bit_tests (index_type, index_expr, minval, range,
2447
			       case_list, default_label);
2448 2449
	}

Richard Kenner committed
2450 2451 2452 2453
      /* If range of values is much bigger than number of values,
	 make a sequence of conditional branches instead of a dispatch.
	 If the switch-index is a constant, do it this way
	 because we can optimize it.  */
Tom Wood committed
2454

2455
      else if (count < case_values_threshold ()
2456 2457
	       || compare_tree_int (range,
				    (optimize_size ? 3 : 10) * count) > 0
2458 2459 2460
	       /* RANGE may be signed, and really large ranges will show up
		  as negative numbers.  */
	       || compare_tree_int (range, 0) < 0
2461 2462 2463
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
	       || flag_pic
#endif
2464
	       || !flag_jump_tables
2465 2466 2467 2468
	       || TREE_CONSTANT (index_expr)
	       /* If neither casesi or tablejump is available, we can
		  only go this way.  */
	       || (!HAVE_casesi && !HAVE_tablejump))
Richard Kenner committed
2469
	{
2470
	  index = expand_normal (index_expr);
Richard Kenner committed
2471 2472 2473 2474 2475 2476 2477

	  /* If the index is a short or char that we do not have
	     an insn to handle comparisons directly, convert it to
	     a full integer now, rather than letting each comparison
	     generate the conversion.  */

	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2478
	      && ! have_insn_for (COMPARE, GET_MODE (index)))
Richard Kenner committed
2479 2480 2481 2482
	    {
	      enum machine_mode wider_mode;
	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2483
		if (have_insn_for (COMPARE, wider_mode))
Richard Kenner committed
2484 2485 2486 2487 2488 2489 2490 2491
		  {
		    index = convert_to_mode (wider_mode, index, unsignedp);
		    break;
		  }
	    }

	  do_pending_stack_adjust ();

2492
	  if (MEM_P (index))
Richard Kenner committed
2493 2494
	    index = copy_to_reg (index);

2495 2496
	  /* We generate a binary decision tree to select the
	     appropriate target code.  This is done as follows:
2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513

	     The list of cases is rearranged into a binary tree,
	     nearly optimal assuming equal probability for each case.

	     The tree is transformed into RTL, eliminating
	     redundant test conditions at the same time.

	     If program flow could reach the end of the
	     decision tree an unconditional jump to the
	     default code is emitted.  */

	  use_cost_table
	    = (TREE_CODE (orig_type) != ENUMERAL_TYPE
	       && estimate_case_costs (case_list));
	  balance_case_nodes (&case_list, NULL);
	  emit_case_nodes (index, case_list, default_label, index_type);
	  emit_jump (default_label);
Richard Kenner committed
2514 2515 2516
	}
      else
	{
2517
	  table_label = gen_label_rtx ();
2518 2519
	  if (! try_casesi (index_type, index_expr, minval, range,
			    table_label, default_label))
Richard Kenner committed
2520
	    {
2521
	      bool ok;
2522

Kazu Hirata committed
2523
	      /* Index jumptables from zero for suitable values of
2524
                 minval to avoid a subtraction.  */
Kazu Hirata committed
2525 2526 2527 2528
	      if (! optimize_size
		  && compare_tree_int (minval, 0) > 0
		  && compare_tree_int (minval, 3) < 0)
		{
2529
		  minval = build_int_cst (index_type, 0);
Kazu Hirata committed
2530 2531
		  range = maxval;
		}
2532

2533 2534 2535
	      ok = try_tablejump (index_type, index_expr, minval, range,
				  table_label, default_label);
	      gcc_assert (ok);
Richard Kenner committed
2536
	    }
Kazu Hirata committed
2537

Richard Kenner committed
2538 2539
	  /* Get table of labels to jump to, in order of case index.  */

2540
	  ncases = tree_low_cst (range, 0) + 1;
2541 2542
	  labelvec = alloca (ncases * sizeof (rtx));
	  memset (labelvec, 0, ncases * sizeof (rtx));
Richard Kenner committed
2543

2544
	  for (n = case_list; n; n = n->right)
Richard Kenner committed
2545
	    {
2546 2547 2548 2549
	      /* Compute the low and high bounds relative to the minimum
		 value since that should fit in a HOST_WIDE_INT while the
		 actual values may not.  */
	      HOST_WIDE_INT i_low
2550 2551
		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
					     n->low, minval), 1);
2552
	      HOST_WIDE_INT i_high
2553 2554
		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
					     n->high, minval), 1);
2555 2556 2557 2558 2559
	      HOST_WIDE_INT i;

	      for (i = i_low; i <= i_high; i ++)
		labelvec[i]
		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
Richard Kenner committed
2560 2561 2562 2563 2564
	    }

	  /* Fill in the gaps with the default.  */
	  for (i = 0; i < ncases; i++)
	    if (labelvec[i] == 0)
2565
	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
Richard Kenner committed
2566

2567
	  /* Output the table.  */
Richard Kenner committed
2568 2569
	  emit_label (table_label);

2570
	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2571 2572
	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
						   gen_rtx_LABEL_REF (Pmode, table_label),
2573
						   gen_rtvec_v (ncases, labelvec),
Kazu Hirata committed
2574
						   const0_rtx, const0_rtx));
Richard Kenner committed
2575
	  else
2576 2577
	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
					      gen_rtvec_v (ncases, labelvec)));
Richard Kenner committed
2578

2579
	  /* Record no drop-through after the table.  */
Richard Kenner committed
2580 2581 2582
	  emit_barrier ();
	}

2583 2584
      before_case = NEXT_INSN (before_case);
      end = get_last_insn ();
2585
      reorder_insns (before_case, end, start);
Richard Kenner committed
2586
    }
2587

Richard Kenner committed
2588
  free_temp_slots ();
2589
  free_alloc_pool (case_node_pool);
Richard Kenner committed
2590 2591
}

2592
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE.  */
Richard Kenner committed
2593 2594

static void
2595 2596
do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
		  int unsignedp)
Richard Kenner committed
2597
{
2598 2599
  do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
			   NULL_RTX, NULL_RTX, label);
Richard Kenner committed
2600 2601 2602 2603 2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619 2620 2621 2622 2623 2624 2625
}

/* Not all case values are encountered equally.  This function
   uses a heuristic to weight case labels, in cases where that
   looks like a reasonable thing to do.

   Right now, all we try to guess is text, and we establish the
   following weights:

	chars above space:	16
	digits:			16
	default:		12
	space, punct:		8
	tab:			4
	newline:		2
	other "\" chars:	1
	remaining chars:	0

   If we find any cases in the switch that are not either -1 or in the range
   of valid ASCII characters, or are control characters other than those
   commonly used with "\", don't treat this switch scanning text.

   Return 1 if these nodes are suitable for cost estimation, otherwise
   return 0.  */

static int
2626
estimate_case_costs (case_node_ptr node)
Richard Kenner committed
2627
{
2628
  tree min_ascii = integer_minus_one_node;
2629
  tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
Richard Kenner committed
2630 2631 2632 2633 2634 2635
  case_node_ptr n;
  int i;

  /* If we haven't already made the cost table, make it now.  Note that the
     lower bound of the table is -1, not zero.  */

2636
  if (! cost_table_initialized)
Richard Kenner committed
2637
    {
2638
      cost_table_initialized = 1;
Richard Kenner committed
2639 2640 2641

      for (i = 0; i < 128; i++)
	{
2642
	  if (ISALNUM (i))
2643
	    COST_TABLE (i) = 16;
2644
	  else if (ISPUNCT (i))
2645
	    COST_TABLE (i) = 8;
2646
	  else if (ISCNTRL (i))
2647
	    COST_TABLE (i) = -1;
Richard Kenner committed
2648 2649
	}

2650 2651 2652 2653 2654 2655 2656
      COST_TABLE (' ') = 8;
      COST_TABLE ('\t') = 4;
      COST_TABLE ('\0') = 4;
      COST_TABLE ('\n') = 2;
      COST_TABLE ('\f') = 1;
      COST_TABLE ('\v') = 1;
      COST_TABLE ('\b') = 1;
Richard Kenner committed
2657 2658 2659 2660 2661 2662 2663 2664 2665 2666
    }

  /* See if all the case expressions look like text.  It is text if the
     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
     as signed arithmetic since we don't want to ever access cost_table with a
     value less than -1.  Also check that none of the constants in a range
     are strange control characters.  */

  for (n = node; n; n = n->right)
    {
2667 2668
      if (tree_int_cst_lt (n->low, min_ascii)
	  || tree_int_cst_lt (max_ascii, n->high))
Richard Kenner committed
2669 2670
	return 0;

2671 2672
      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2673
	if (COST_TABLE (i) < 0)
Richard Kenner committed
2674 2675 2676 2677 2678 2679 2680 2681 2682 2683
	  return 0;
    }

  /* All interesting values are within the range of interesting
     ASCII characters.  */
  return 1;
}

/* Take an ordered list of case nodes
   and transform them into a near optimal binary tree,
2684
   on the assumption that any target code selection value is as
Richard Kenner committed
2685 2686 2687 2688 2689
   likely as any other.

   The transformation is performed by splitting the ordered
   list into two equal sections plus a pivot.  The parts are
   then attached to the pivot as left and right branches.  Each
Jeff Law committed
2690
   branch is then transformed recursively.  */
Richard Kenner committed
2691 2692

static void
2693
balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
Richard Kenner committed
2694
{
2695
  case_node_ptr np;
Richard Kenner committed
2696 2697 2698 2699 2700 2701 2702

  np = *head;
  if (np)
    {
      int cost = 0;
      int i = 0;
      int ranges = 0;
2703
      case_node_ptr *npp;
Richard Kenner committed
2704 2705 2706 2707 2708 2709 2710 2711 2712 2713
      case_node_ptr left;

      /* Count the number of entries on branch.  Also count the ranges.  */

      while (np)
	{
	  if (!tree_int_cst_equal (np->low, np->high))
	    {
	      ranges++;
	      if (use_cost_table)
2714
		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
Richard Kenner committed
2715 2716 2717
	    }

	  if (use_cost_table)
2718
	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
Richard Kenner committed
2719 2720 2721 2722 2723 2724 2725 2726 2727 2728 2729 2730 2731 2732 2733 2734 2735 2736 2737 2738

	  i++;
	  np = np->right;
	}

      if (i > 2)
	{
	  /* Split this list if it is long enough for that to help.  */
	  npp = head;
	  left = *npp;
	  if (use_cost_table)
	    {
	      /* Find the place in the list that bisects the list's total cost,
		 Here I gets half the total cost.  */
	      int n_moved = 0;
	      i = (cost + 1) / 2;
	      while (1)
		{
		  /* Skip nodes while their cost does not reach that amount.  */
		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2739 2740
		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
Richard Kenner committed
2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809
		  if (i <= 0)
		    break;
		  npp = &(*npp)->right;
		  n_moved += 1;
		}
	      if (n_moved == 0)
		{
		  /* Leave this branch lopsided, but optimize left-hand
		     side and fill in `parent' fields for right-hand side.  */
		  np = *head;
		  np->parent = parent;
		  balance_case_nodes (&np->left, np);
		  for (; np->right; np = np->right)
		    np->right->parent = np;
		  return;
		}
	    }
	  /* If there are just three nodes, split at the middle one.  */
	  else if (i == 3)
	    npp = &(*npp)->right;
	  else
	    {
	      /* Find the place in the list that bisects the list's total cost,
		 where ranges count as 2.
		 Here I gets half the total cost.  */
	      i = (i + ranges + 1) / 2;
	      while (1)
		{
		  /* Skip nodes while their cost does not reach that amount.  */
		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
		    i--;
		  i--;
		  if (i <= 0)
		    break;
		  npp = &(*npp)->right;
		}
	    }
	  *head = np = *npp;
	  *npp = 0;
	  np->parent = parent;
	  np->left = left;

	  /* Optimize each of the two split parts.  */
	  balance_case_nodes (&np->left, np);
	  balance_case_nodes (&np->right, np);
	}
      else
	{
	  /* Else leave this branch as one level,
	     but fill in `parent' fields.  */
	  np = *head;
	  np->parent = parent;
	  for (; np->right; np = np->right)
	    np->right->parent = np;
	}
    }
}

/* Search the parent sections of the case node tree
   to see if a test for the lower bound of NODE would be redundant.
   INDEX_TYPE is the type of the index expression.

   The instructions to generate the case decision tree are
   output in the same order as nodes are processed so it is
   known that if a parent node checks the range of the current
   node minus one that the current node is bounded at its lower
   span.  Thus the test would be redundant.  */

static int
2810
node_has_low_bound (case_node_ptr node, tree index_type)
Richard Kenner committed
2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827
{
  tree low_minus_one;
  case_node_ptr pnode;

  /* If the lower bound of this node is the lowest value in the index type,
     we need not test it.  */

  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
    return 1;

  /* If this node has a left branch, the value at the left must be less
     than that at this node, so it cannot be bounded at the bottom and
     we need not bother testing any further.  */

  if (node->left)
    return 0;

2828
  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2829 2830
			       node->low,
			       build_int_cst (TREE_TYPE (node->low), 1));
Richard Kenner committed
2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855

  /* If the subtraction above overflowed, we can't verify anything.
     Otherwise, look for a parent that tests our value - 1.  */

  if (! tree_int_cst_lt (low_minus_one, node->low))
    return 0;

  for (pnode = node->parent; pnode; pnode = pnode->parent)
    if (tree_int_cst_equal (low_minus_one, pnode->high))
      return 1;

  return 0;
}

/* Search the parent sections of the case node tree
   to see if a test for the upper bound of NODE would be redundant.
   INDEX_TYPE is the type of the index expression.

   The instructions to generate the case decision tree are
   output in the same order as nodes are processed so it is
   known that if a parent node checks the range of the current
   node plus one that the current node is bounded at its upper
   span.  Thus the test would be redundant.  */

static int
2856
node_has_high_bound (case_node_ptr node, tree index_type)
Richard Kenner committed
2857 2858 2859 2860
{
  tree high_plus_one;
  case_node_ptr pnode;

2861 2862 2863 2864 2865
  /* If there is no upper bound, obviously no test is needed.  */

  if (TYPE_MAX_VALUE (index_type) == NULL)
    return 1;

Richard Kenner committed
2866 2867 2868 2869 2870 2871 2872 2873 2874 2875 2876 2877 2878
  /* If the upper bound of this node is the highest value in the type
     of the index expression, we need not test against it.  */

  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
    return 1;

  /* If this node has a right branch, the value at the right must be greater
     than that at this node, so it cannot be bounded at the top and
     we need not bother testing any further.  */

  if (node->right)
    return 0;

2879
  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2880 2881
			       node->high,
			       build_int_cst (TREE_TYPE (node->high), 1));
Richard Kenner committed
2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900

  /* If the addition above overflowed, we can't verify anything.
     Otherwise, look for a parent that tests our value + 1.  */

  if (! tree_int_cst_lt (node->high, high_plus_one))
    return 0;

  for (pnode = node->parent; pnode; pnode = pnode->parent)
    if (tree_int_cst_equal (high_plus_one, pnode->low))
      return 1;

  return 0;
}

/* Search the parent sections of the
   case node tree to see if both tests for the upper and lower
   bounds of NODE would be redundant.  */

static int
2901
node_is_bounded (case_node_ptr node, tree index_type)
Richard Kenner committed
2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922
{
  return (node_has_low_bound (node, index_type)
	  && node_has_high_bound (node, index_type));
}

/* Emit step-by-step code to select a case for the value of INDEX.
   The thus generated decision tree follows the form of the
   case-node binary tree NODE, whose nodes represent test conditions.
   INDEX_TYPE is the type of the index of the switch.

   Care is taken to prune redundant tests from the decision tree
   by detecting any boundary conditions already checked by
   emitted rtx.  (See node_has_high_bound, node_has_low_bound
   and node_is_bounded, above.)

   Where the test conditions can be shown to be redundant we emit
   an unconditional jump to the target code.  As a further
   optimization, the subordinates of a tree node are examined to
   check for bounded nodes.  In this case conditional and/or
   unconditional jumps as a result of the boundary check for the
   current node are arranged to target the subordinates associated
Jeff Law committed
2923
   code for out of bound conditions on the current node.
Richard Kenner committed
2924

2925
   We can assume that when control reaches the code generated here,
Richard Kenner committed
2926 2927 2928 2929 2930 2931 2932 2933
   the index value has already been compared with the parents
   of this node, and determined to be on the same side of each parent
   as this node is.  Thus, if this node tests for the value 51,
   and a parent tested for 52, we don't need to consider
   the possibility of a value greater than 51.  If another parent
   tests for the value 50, then this node need not test anything.  */

static void
2934 2935
emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
		 tree index_type)
Richard Kenner committed
2936 2937
{
  /* If INDEX has an unsigned type, we must make unsigned branches.  */
2938
  int unsignedp = TYPE_UNSIGNED (index_type);
Richard Kenner committed
2939
  enum machine_mode mode = GET_MODE (index);
2940
  enum machine_mode imode = TYPE_MODE (index_type);
Richard Kenner committed
2941

2942 2943 2944 2945
  /* Handle indices detected as constant during RTL expansion.  */
  if (mode == VOIDmode)
    mode = imode;

Richard Kenner committed
2946 2947 2948 2949 2950 2951 2952 2953
  /* See if our parents have already tested everything for us.
     If they have, emit an unconditional jump for this node.  */
  if (node_is_bounded (node, index_type))
    emit_jump (label_rtx (node->code_label));

  else if (tree_int_cst_equal (node->low, node->high))
    {
      /* Node is single valued.  First see if the index expression matches
Mike Stump committed
2954
	 this node and then check our children, if any.  */
Richard Kenner committed
2955

2956
      do_jump_if_equal (mode, index,
2957
			convert_modes (mode, imode,
2958
				       expand_normal (node->low),
2959
				       unsignedp),
Richard Kenner committed
2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971
			label_rtx (node->code_label), unsignedp);

      if (node->right != 0 && node->left != 0)
	{
	  /* This node has children on both sides.
	     Dispatch to one side or the other
	     by comparing the index value with this node's value.
	     If one subtree is bounded, check that one first,
	     so we can avoid real branches in the tree.  */

	  if (node_is_bounded (node->right, index_type))
	    {
Kazu Hirata committed
2972
	      emit_cmp_and_jump_insns (index,
2973 2974
				       convert_modes
				       (mode, imode,
2975
					expand_normal (node->high),
2976
					unsignedp),
2977
				       GT, NULL_RTX, mode, unsignedp,
Kazu Hirata committed
2978
				       label_rtx (node->right->code_label));
Richard Kenner committed
2979 2980 2981 2982 2983
	      emit_case_nodes (index, node->left, default_label, index_type);
	    }

	  else if (node_is_bounded (node->left, index_type))
	    {
Kazu Hirata committed
2984
	      emit_cmp_and_jump_insns (index,
2985 2986
				       convert_modes
				       (mode, imode,
2987
					expand_normal (node->high),
2988
					unsignedp),
2989
				       LT, NULL_RTX, mode, unsignedp,
2990
				       label_rtx (node->left->code_label));
Richard Kenner committed
2991 2992 2993
	      emit_case_nodes (index, node->right, default_label, index_type);
	    }

2994 2995 2996 2997 2998 2999 3000 3001 3002 3003 3004 3005 3006 3007 3008
	  /* If both children are single-valued cases with no
	     children, finish up all the work.  This way, we can save
	     one ordered comparison.  */
	  else if (tree_int_cst_equal (node->right->low, node->right->high)
		   && node->right->left == 0
		   && node->right->right == 0
		   && tree_int_cst_equal (node->left->low, node->left->high)
		   && node->left->left == 0
		   && node->left->right == 0)
	    {
	      /* Neither node is bounded.  First distinguish the two sides;
		 then emit the code for one side at a time.  */

	      /* See if the value matches what the right hand side
		 wants.  */
3009
	      do_jump_if_equal (mode, index,
3010
				convert_modes (mode, imode,
3011
					       expand_normal (node->right->low),
3012 3013 3014 3015 3016 3017
					       unsignedp),
				label_rtx (node->right->code_label),
				unsignedp);

	      /* See if the value matches what the left hand side
		 wants.  */
3018
	      do_jump_if_equal (mode, index,
3019
				convert_modes (mode, imode,
3020
					       expand_normal (node->left->low),
3021 3022 3023 3024 3025
					       unsignedp),
				label_rtx (node->left->code_label),
				unsignedp);
	    }

Richard Kenner committed
3026 3027 3028 3029 3030
	  else
	    {
	      /* Neither node is bounded.  First distinguish the two sides;
		 then emit the code for one side at a time.  */

Kazu Hirata committed
3031
	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
Richard Kenner committed
3032 3033

	      /* See if the value is on the right.  */
Kazu Hirata committed
3034
	      emit_cmp_and_jump_insns (index,
3035 3036
				       convert_modes
				       (mode, imode,
3037
					expand_normal (node->high),
3038
					unsignedp),
3039
				       GT, NULL_RTX, mode, unsignedp,
3040
				       label_rtx (test_label));
Richard Kenner committed
3041 3042 3043 3044 3045 3046

	      /* Value must be on the left.
		 Handle the left-hand subtree.  */
	      emit_case_nodes (index, node->left, default_label, index_type);
	      /* If left-hand subtree does nothing,
		 go to default.  */
3047
	      emit_jump (default_label);
Richard Kenner committed
3048 3049 3050 3051 3052 3053 3054 3055 3056

	      /* Code branches here for the right-hand subtree.  */
	      expand_label (test_label);
	      emit_case_nodes (index, node->right, default_label, index_type);
	    }
	}

      else if (node->right != 0 && node->left == 0)
	{
3057
	  /* Here we have a right child but no left so we issue a conditional
Richard Kenner committed
3058 3059
	     branch to default and process the right child.

3060 3061 3062
	     Omit the conditional branch to default if the right child
	     does not have any children and is single valued; it would
	     cost too much space to save so little time.  */
Richard Kenner committed
3063

3064
	  if (node->right->right || node->right->left
Richard Kenner committed
3065 3066 3067 3068
	      || !tree_int_cst_equal (node->right->low, node->right->high))
	    {
	      if (!node_has_low_bound (node, index_type))
		{
Kazu Hirata committed
3069
		  emit_cmp_and_jump_insns (index,
3070 3071
					   convert_modes
					   (mode, imode,
3072
					    expand_normal (node->high),
3073
					    unsignedp),
3074
					   LT, NULL_RTX, mode, unsignedp,
3075
					   default_label);
Richard Kenner committed
3076 3077 3078 3079 3080 3081 3082 3083
		}

	      emit_case_nodes (index, node->right, default_label, index_type);
	    }
	  else
	    /* We cannot process node->right normally
	       since we haven't ruled out the numbers less than
	       this node's value.  So handle node->right explicitly.  */
3084
	    do_jump_if_equal (mode, index,
3085 3086
			      convert_modes
			      (mode, imode,
3087
			       expand_normal (node->right->low),
3088
			       unsignedp),
Richard Kenner committed
3089 3090 3091 3092 3093 3094
			      label_rtx (node->right->code_label), unsignedp);
	}

      else if (node->right == 0 && node->left != 0)
	{
	  /* Just one subtree, on the left.  */
Kazu Hirata committed
3095
	  if (node->left->left || node->left->right
Richard Kenner committed
3096 3097 3098 3099
	      || !tree_int_cst_equal (node->left->low, node->left->high))
	    {
	      if (!node_has_high_bound (node, index_type))
		{
3100 3101 3102
		  emit_cmp_and_jump_insns (index,
					   convert_modes
					   (mode, imode,
3103
					    expand_normal (node->high),
3104
					    unsignedp),
3105
					   GT, NULL_RTX, mode, unsignedp,
3106
					   default_label);
Richard Kenner committed
3107 3108 3109 3110 3111 3112 3113 3114
		}

	      emit_case_nodes (index, node->left, default_label, index_type);
	    }
	  else
	    /* We cannot process node->left normally
	       since we haven't ruled out the numbers less than
	       this node's value.  So handle node->left explicitly.  */
3115
	    do_jump_if_equal (mode, index,
3116 3117
			      convert_modes
			      (mode, imode,
3118
			       expand_normal (node->left->low),
3119
			       unsignedp),
Richard Kenner committed
3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140
			      label_rtx (node->left->code_label), unsignedp);
	}
    }
  else
    {
      /* Node is a range.  These cases are very similar to those for a single
	 value, except that we do not start by testing whether this node
	 is the one to branch to.  */

      if (node->right != 0 && node->left != 0)
	{
	  /* Node has subtrees on both sides.
	     If the right-hand subtree is bounded,
	     test for it first, since we can go straight there.
	     Otherwise, we need to make a branch in the control structure,
	     then handle the two subtrees.  */
	  tree test_label = 0;

	  if (node_is_bounded (node->right, index_type))
	    /* Right hand node is fully bounded so we can eliminate any
	       testing and branch directly to the target code.  */
3141 3142 3143
	    emit_cmp_and_jump_insns (index,
				     convert_modes
				     (mode, imode,
3144
				      expand_normal (node->high),
3145
				      unsignedp),
3146
				     GT, NULL_RTX, mode, unsignedp,
3147
				     label_rtx (node->right->code_label));
Richard Kenner committed
3148 3149 3150 3151 3152 3153
	  else
	    {
	      /* Right hand node requires testing.
		 Branch to a label where we will handle it later.  */

	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
Kazu Hirata committed
3154
	      emit_cmp_and_jump_insns (index,
3155 3156
				       convert_modes
				       (mode, imode,
3157
					expand_normal (node->high),
3158
					unsignedp),
3159
				       GT, NULL_RTX, mode, unsignedp,
3160
				       label_rtx (test_label));
Richard Kenner committed
3161 3162 3163 3164
	    }

	  /* Value belongs to this node or to the left-hand subtree.  */

3165 3166 3167
	  emit_cmp_and_jump_insns (index,
				   convert_modes
				   (mode, imode,
3168
				    expand_normal (node->low),
3169
				    unsignedp),
3170
				   GE, NULL_RTX, mode, unsignedp,
3171
				   label_rtx (node->code_label));
Richard Kenner committed
3172 3173 3174 3175 3176 3177 3178 3179 3180 3181

	  /* Handle the left-hand subtree.  */
	  emit_case_nodes (index, node->left, default_label, index_type);

	  /* If right node had to be handled later, do that now.  */

	  if (test_label)
	    {
	      /* If the left-hand subtree fell through,
		 don't let it fall into the right-hand subtree.  */
3182
	      emit_jump (default_label);
Richard Kenner committed
3183 3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194

	      expand_label (test_label);
	      emit_case_nodes (index, node->right, default_label, index_type);
	    }
	}

      else if (node->right != 0 && node->left == 0)
	{
	  /* Deal with values to the left of this node,
	     if they are possible.  */
	  if (!node_has_low_bound (node, index_type))
	    {
Kazu Hirata committed
3195
	      emit_cmp_and_jump_insns (index,
3196 3197
				       convert_modes
				       (mode, imode,
3198
					expand_normal (node->low),
3199
					unsignedp),
3200
				       LT, NULL_RTX, mode, unsignedp,
3201
				       default_label);
Richard Kenner committed
3202 3203 3204 3205
	    }

	  /* Value belongs to this node or to the right-hand subtree.  */

3206 3207 3208
	  emit_cmp_and_jump_insns (index,
				   convert_modes
				   (mode, imode,
3209
				    expand_normal (node->high),
3210
				    unsignedp),
3211
				   LE, NULL_RTX, mode, unsignedp,
3212
				   label_rtx (node->code_label));
Richard Kenner committed
3213 3214 3215 3216 3217 3218 3219 3220 3221 3222

	  emit_case_nodes (index, node->right, default_label, index_type);
	}

      else if (node->right == 0 && node->left != 0)
	{
	  /* Deal with values to the right of this node,
	     if they are possible.  */
	  if (!node_has_high_bound (node, index_type))
	    {
Kazu Hirata committed
3223
	      emit_cmp_and_jump_insns (index,
3224 3225
				       convert_modes
				       (mode, imode,
3226
					expand_normal (node->high),
3227
					unsignedp),
3228
				       GT, NULL_RTX, mode, unsignedp,
3229
				       default_label);
Richard Kenner committed
3230 3231 3232 3233
	    }

	  /* Value belongs to this node or to the left-hand subtree.  */

Kazu Hirata committed
3234
	  emit_cmp_and_jump_insns (index,
3235 3236
				   convert_modes
				   (mode, imode,
3237
				    expand_normal (node->low),
3238
				    unsignedp),
3239
				   GE, NULL_RTX, mode, unsignedp,
3240
				   label_rtx (node->code_label));
Richard Kenner committed
3241 3242 3243 3244 3245 3246 3247 3248 3249

	  emit_case_nodes (index, node->left, default_label, index_type);
	}

      else
	{
	  /* Node has no children so we check low and high bounds to remove
	     redundant tests.  Only one of the bounds can exist,
	     since otherwise this node is bounded--a case tested already.  */
3250 3251
	  int high_bound = node_has_high_bound (node, index_type);
	  int low_bound = node_has_low_bound (node, index_type);
Richard Kenner committed
3252

3253
	  if (!high_bound && low_bound)
Richard Kenner committed
3254
	    {
Kazu Hirata committed
3255
	      emit_cmp_and_jump_insns (index,
3256 3257
				       convert_modes
				       (mode, imode,
3258
					expand_normal (node->high),
3259
					unsignedp),
3260
				       GT, NULL_RTX, mode, unsignedp,
3261
				       default_label);
Richard Kenner committed
3262 3263
	    }

3264
	  else if (!low_bound && high_bound)
Richard Kenner committed
3265
	    {
Kazu Hirata committed
3266
	      emit_cmp_and_jump_insns (index,
3267 3268
				       convert_modes
				       (mode, imode,
3269
					expand_normal (node->low),
3270
					unsignedp),
3271
				       LT, NULL_RTX, mode, unsignedp,
3272
				       default_label);
Richard Kenner committed
3273
	    }
3274 3275
	  else if (!low_bound && !high_bound)
	    {
3276
	      /* Widen LOW and HIGH to the same width as INDEX.  */
3277
	      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3278 3279
	      tree low = build1 (CONVERT_EXPR, type, node->low);
	      tree high = build1 (CONVERT_EXPR, type, node->high);
3280
	      rtx low_rtx, new_index, new_bound;
3281 3282 3283

	      /* Instead of doing two branches, emit one unsigned branch for
		 (index-low) > (high-low).  */
3284
	      low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3285 3286 3287
	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
					       NULL_RTX, unsignedp,
					       OPTAB_WIDEN);
3288 3289
	      new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
						    high, low),
3290
				       NULL_RTX, mode, EXPAND_NORMAL);
Kazu Hirata committed
3291

3292
	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3293
				       mode, 1, default_label);
3294
	    }
Richard Kenner committed
3295 3296 3297 3298 3299

	  emit_jump (label_rtx (node->code_label));
	}
    }
}