stmt.c 63.7 KB
Newer Older
1
/* Expands front end tree to back end RTL for GCC
2
   Copyright (C) 1987-2014 Free Software Foundation, Inc.
Richard Kenner committed
3

4
This file is part of GCC.
Richard Kenner committed
5

6 7
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9
version.
Richard Kenner committed
10

11 12 13 14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.
Richard Kenner committed
15 16

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Richard Kenner committed
19 20 21 22

/* This file handles the generation of rtl code from tree structure
   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
   The functions whose names start with `expand_' are called by the
23
   expander to generate RTL instructions for various kinds of constructs.  */
Richard Kenner committed
24 25

#include "config.h"
26
#include "system.h"
27 28
#include "coretypes.h"
#include "tm.h"
29

Richard Kenner committed
30
#include "rtl.h"
31
#include "hard-reg-set.h"
Richard Kenner committed
32
#include "tree.h"
33 34
#include "varasm.h"
#include "stor-layout.h"
35
#include "tm_p.h"
Richard Kenner committed
36
#include "flags.h"
Mike Stump committed
37
#include "except.h"
Richard Kenner committed
38 39 40
#include "function.h"
#include "insn-config.h"
#include "expr.h"
41
#include "libfuncs.h"
Richard Kenner committed
42
#include "recog.h"
Jan Brittenson committed
43
#include "machmode.h"
44
#include "diagnostic-core.h"
Kaveh R. Ghazi committed
45
#include "output.h"
46
#include "langhooks.h"
47
#include "predict.h"
48
#include "optabs.h"
DJ Delorie committed
49
#include "target.h"
50
#include "hash-set.h"
51 52 53 54 55
#include "basic-block.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
#include "gimple-expr.h"
#include "is-a.h"
56
#include "gimple.h"
57
#include "regs.h"
58
#include "alloc-pool.h"
59
#include "pretty-print.h"
60
#include "params.h"
61
#include "dumpfile.h"
62
#include "builtins.h"
63

Richard Kenner committed
64 65 66 67 68 69 70

/* Functions and data structures for expanding case statements.  */

/* Case label structure, used to hold info on labels within case
   statements.  We handle "range" labels; for a single-value label
   as in C, the high and low limits are the same.

71 72 73 74 75 76 77 78
   We start with a vector of case nodes sorted in ascending order, and
   the default label as the last element in the vector.  Before expanding
   to RTL, we transform this vector into a list linked via the RIGHT
   fields in the case_node struct.  Nodes with higher case values are
   later in the list.

   Switch statements can be output in three forms.  A branch table is
   used if there are more than a few labels and the labels are dense
Richard Kenner committed
79 80 81 82 83 84 85
   within the range between the smallest and largest case value.  If a
   branch table is used, no further manipulations are done with the case
   node chain.

   The alternative to the use of a branch table is to generate a series
   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
   and PARENT fields to hold a binary tree.  Initially the tree is
86 87
   totally unbalanced, with everything on the right.  We balance the tree
   with nodes on the left having lower case values than the parent
Richard Kenner committed
88
   and nodes on the right having higher values.  We then output the tree
89 90 91 92
   in order.

   For very small, suitable switch statements, we can generate a series
   of simple bit test and branches instead.  */
Richard Kenner committed
93

94
struct case_node
Richard Kenner committed
95 96 97 98 99 100 101
{
  struct case_node	*left;	/* Left son in binary tree */
  struct case_node	*right;	/* Right son in binary tree; also node chain */
  struct case_node	*parent; /* Parent of node in binary tree */
  tree			low;	/* Lowest index value for this label */
  tree			high;	/* Highest index value for this label */
  tree			code_label; /* Label to jump to when node matches */
Easwaran Raman committed
102 103 104
  int                   prob; /* Probability of taking this case.  */
  /* Probability of reaching subtree rooted at this node */
  int                   subtree_prob;
Richard Kenner committed
105 106 107 108 109
};

typedef struct case_node case_node;
typedef struct case_node *case_node_ptr;

Easwaran Raman committed
110
extern basic_block label_to_block_fn (struct function *, tree);
Richard Kenner committed
111

112 113
static bool check_unique_operand_names (tree, tree, tree);
static char *resolve_operand_name_1 (char *, tree, tree, tree);
114 115 116 117
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
static int node_has_low_bound (case_node_ptr, tree);
static int node_has_high_bound (case_node_ptr, tree);
static int node_is_bounded (case_node_ptr, tree);
Easwaran Raman committed
118
static void emit_case_nodes (rtx, case_node_ptr, rtx, int, tree);
Richard Kenner committed
119 120 121 122 123

/* Return the rtx-label that corresponds to a LABEL_DECL,
   creating it if necessary.  */

rtx
124
label_rtx (tree label)
Richard Kenner committed
125
{
126
  gcc_assert (TREE_CODE (label) == LABEL_DECL);
Richard Kenner committed
127

128
  if (!DECL_RTL_SET_P (label))
129 130 131 132 133 134
    {
      rtx r = gen_label_rtx ();
      SET_DECL_RTL (label, r);
      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
	LABEL_PRESERVE_P (r) = 1;
    }
Richard Kenner committed
135

136
  return DECL_RTL (label);
Richard Kenner committed
137 138
}

139 140 141
/* As above, but also put it on the forced-reference list of the
   function that contains it.  */
rtx
142
force_label_rtx (tree label)
143 144 145 146
{
  rtx ref = label_rtx (label);
  tree function = decl_function_context (label);

147
  gcc_assert (function);
148

149
  forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
150 151
  return ref;
}
152

Richard Kenner committed
153 154 155
/* Add an unconditional jump to LABEL as the next sequential instruction.  */

void
156
emit_jump (rtx label)
Richard Kenner committed
157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176
{
  do_pending_stack_adjust ();
  emit_jump_insn (gen_jump (label));
  emit_barrier ();
}

/* Handle goto statements and the labels that they can go to.  */

/* Specify the location in the RTL code of a label LABEL,
   which is a LABEL_DECL tree node.

   This is used for the kind of label that the user can jump to with a
   goto statement, and for alternatives of a switch or case statement.
   RTL labels generated for loops and conditionals don't go through here;
   they are generated directly at the RTL level, by other functions below.

   Note that this has nothing to do with defining label *names*.
   Languages vary in how they do that and what that even means.  */

void
177
expand_label (tree label)
Richard Kenner committed
178
{
179
  rtx label_r = label_rtx (label);
Richard Kenner committed
180 181

  do_pending_stack_adjust ();
182
  emit_label (label_r);
Richard Kenner committed
183 184 185
  if (DECL_NAME (label))
    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));

186 187
  if (DECL_NONLOCAL (label))
    {
188
      expand_builtin_setjmp_receiver (NULL);
189 190 191 192 193 194 195
      nonlocal_goto_handler_labels
	= gen_rtx_EXPR_LIST (VOIDmode, label_r,
			     nonlocal_goto_handler_labels);
    }

  if (FORCED_LABEL (label))
    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
196

197 198
  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
    maybe_set_first_label_num (label_r);
Richard Kenner committed
199
}
200

201 202 203 204 205 206 207 208 209
/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
   constraint allows the use of a register operand.  And, *IS_INOUT
   will be true if the operand is read-write, i.e., if it is used as
   an input as well as an output.  If *CONSTRAINT_P is not in
   canonical form, it will be made canonical.  (Note that `+' will be
210
   replaced with `=' as part of this process.)
211 212 213 214

   Returns TRUE if all went well; FALSE if an error occurred.  */

bool
215 216 217
parse_output_constraint (const char **constraint_p, int operand_num,
			 int ninputs, int noutputs, bool *allows_mem,
			 bool *allows_reg, bool *is_inout)
218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238
{
  const char *constraint = *constraint_p;
  const char *p;

  /* Assume the constraint doesn't allow the use of either a register
     or memory.  */
  *allows_mem = false;
  *allows_reg = false;

  /* Allow the `=' or `+' to not be at the beginning of the string,
     since it wasn't explicitly documented that way, and there is a
     large body of code that puts it last.  Swap the character to
     the front, so as not to uglify any place else.  */
  p = strchr (constraint, '=');
  if (!p)
    p = strchr (constraint, '+');

  /* If the string doesn't contain an `=', issue an error
     message.  */
  if (!p)
    {
239
      error ("output operand constraint lacks %<=%>");
240 241 242 243 244 245 246 247
      return false;
    }

  /* If the constraint begins with `+', then the operand is both read
     from and written to.  */
  *is_inout = (*p == '+');

  /* Canonicalize the output constraint so that it begins with `='.  */
248
  if (p != constraint || *is_inout)
249 250 251 252 253
    {
      char *buf;
      size_t c_len = strlen (constraint);

      if (p != constraint)
254
	warning (0, "output constraint %qc for operand %d "
255
		 "is not at the beginning",
256 257 258
		 *p, operand_num);

      /* Make a copy of the constraint.  */
259
      buf = XALLOCAVEC (char, c_len + 1);
260 261 262 263 264 265 266 267 268 269 270 271
      strcpy (buf, constraint);
      /* Swap the first character and the `=' or `+'.  */
      buf[p - constraint] = buf[0];
      /* Make sure the first character is an `='.  (Until we do this,
	 it might be a `+'.)  */
      buf[0] = '=';
      /* Replace the constraint with the canonicalized string.  */
      *constraint_p = ggc_alloc_string (buf, c_len);
      constraint = *constraint_p;
    }

  /* Loop through the constraint string.  */
272
  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
273 274 275 276
    switch (*p)
      {
      case '+':
      case '=':
277 278
	error ("operand constraint contains incorrectly positioned "
	       "%<+%> or %<=%>");
279
	return false;
Kazu Hirata committed
280

281 282 283
      case '%':
	if (operand_num + 1 == ninputs + noutputs)
	  {
284
	    error ("%<%%%> constraint used with last operand");
285 286 287 288 289 290 291 292 293 294 295 296 297
	    return false;
	  }
	break;

      case '?':  case '!':  case '*':  case '&':  case '#':
      case 'E':  case 'F':  case 'G':  case 'H':
      case 's':  case 'i':  case 'n':
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
      case 'N':  case 'O':  case 'P':  case ',':
	break;

      case '0':  case '1':  case '2':  case '3':  case '4':
      case '5':  case '6':  case '7':  case '8':  case '9':
298
      case '[':
299 300 301 302 303 304 305 306 307 308 309 310 311 312
	error ("matching constraint not valid in output operand");
	return false;

      case '<':  case '>':
	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
	   excepting those that expand_call created.  So match memory
	   and hope.  */
	*allows_mem = true;
	break;

      case 'g':  case 'X':
	*allows_reg = true;
	*allows_mem = true;
	break;
Kazu Hirata committed
313

314 315 316
      default:
	if (!ISALPHA (*p))
	  break;
317 318 319
	enum constraint_num cn = lookup_constraint (p);
	if (reg_class_for_constraint (cn) != NO_REGS
	    || insn_extra_address_constraint (cn))
320
	  *allows_reg = true;
321
	else if (insn_extra_memory_constraint (cn))
322
	  *allows_mem = true;
323 324 325 326 327 328 329 330 331 332 333 334 335 336
	else
	  {
	    /* Otherwise we can't assume anything about the nature of
	       the constraint except that it isn't purely registers.
	       Treat it like "g" and hope for the best.  */
	    *allows_reg = true;
	    *allows_mem = true;
	  }
	break;
      }

  return true;
}

337 338
/* Similar, but for input constraints.  */

339
bool
340 341 342 343
parse_input_constraint (const char **constraint_p, int input_num,
			int ninputs, int noutputs, int ninout,
			const char * const * constraints,
			bool *allows_mem, bool *allows_reg)
344 345 346 347 348
{
  const char *constraint = *constraint_p;
  const char *orig_constraint = constraint;
  size_t c_len = strlen (constraint);
  size_t j;
349
  bool saw_match = false;
350 351 352 353 354 355 356 357

  /* Assume the constraint doesn't allow the use of either
     a register or memory.  */
  *allows_mem = false;
  *allows_reg = false;

  /* Make sure constraint has neither `=', `+', nor '&'.  */

358
  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
359 360 361 362 363
    switch (constraint[j])
      {
      case '+':  case '=':  case '&':
	if (constraint == orig_constraint)
	  {
364
	    error ("input operand constraint contains %qc", constraint[j]);
365 366 367 368 369 370 371 372
	    return false;
	  }
	break;

      case '%':
	if (constraint == orig_constraint
	    && input_num + 1 == ninputs - ninout)
	  {
373
	    error ("%<%%%> constraint used with last operand");
374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396
	    return false;
	  }
	break;

      case '<':  case '>':
      case '?':  case '!':  case '*':  case '#':
      case 'E':  case 'F':  case 'G':  case 'H':
      case 's':  case 'i':  case 'n':
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
      case 'N':  case 'O':  case 'P':  case ',':
	break;

	/* Whether or not a numeric constraint allows a register is
	   decided by the matching constraint, and so there is no need
	   to do anything special with them.  We must handle them in
	   the default case, so that we don't unnecessarily force
	   operands to memory.  */
      case '0':  case '1':  case '2':  case '3':  case '4':
      case '5':  case '6':  case '7':  case '8':  case '9':
	{
	  char *end;
	  unsigned long match;

397 398
	  saw_match = true;

399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414
	  match = strtoul (constraint + j, &end, 10);
	  if (match >= (unsigned long) noutputs)
	    {
	      error ("matching constraint references invalid operand number");
	      return false;
	    }

	  /* Try and find the real constraint for this dup.  Only do this
	     if the matching constraint is the only alternative.  */
	  if (*end == '\0'
	      && (j == 0 || (j == 1 && constraint[0] == '%')))
	    {
	      constraint = constraints[match];
	      *constraint_p = constraint;
	      c_len = strlen (constraint);
	      j = 0;
415 416 417 418
	      /* ??? At the end of the loop, we will skip the first part of
		 the matched constraint.  This assumes not only that the
		 other constraint is an output constraint, but also that
		 the '=' or '+' come first.  */
419 420 421 422
	      break;
	    }
	  else
	    j = end - constraint;
423 424
	  /* Anticipate increment at end of loop.  */
	  j--;
425 426 427 428 429 430 431 432 433 434 435
	}
	/* Fall through.  */

      case 'g':  case 'X':
	*allows_reg = true;
	*allows_mem = true;
	break;

      default:
	if (! ISALPHA (constraint[j]))
	  {
436
	    error ("invalid punctuation %qc in constraint", constraint[j]);
437 438
	    return false;
	  }
439 440 441
	enum constraint_num cn = lookup_constraint (constraint + j);
	if (reg_class_for_constraint (cn) != NO_REGS
	    || insn_extra_address_constraint (cn))
442
	  *allows_reg = true;
443
	else if (insn_extra_memory_constraint (cn))
444
	  *allows_mem = true;
445 446 447 448 449 450 451 452 453 454 455
	else
	  {
	    /* Otherwise we can't assume anything about the nature of
	       the constraint except that it isn't purely registers.
	       Treat it like "g" and hope for the best.  */
	    *allows_reg = true;
	    *allows_mem = true;
	  }
	break;
      }

456
  if (saw_match && !*allows_reg)
457
    warning (0, "matching constraint does not allow a register");
458

459 460 461
  return true;
}

462 463
/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
   can be an asm-declared register.  Called via walk_tree.  */
464

465 466 467
static tree
decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
			      void *data)
468
{
469
  tree decl = *declp;
470
  const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
471

472
  if (TREE_CODE (decl) == VAR_DECL)
473
    {
474
      if (DECL_HARD_REGISTER (decl)
475 476 477 478
	  && REG_P (DECL_RTL (decl))
	  && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
	{
	  rtx reg = DECL_RTL (decl);
479 480 481

	  if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
	    return decl;
482 483
	}
      walk_subtrees = 0;
484
    }
485
  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
486 487
    walk_subtrees = 0;
  return NULL_TREE;
488 489
}

490 491 492 493 494 495 496
/* If there is an overlap between *REGS and DECL, return the first overlap
   found.  */
tree
tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
{
  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
}
497

498 499 500 501 502 503 504

/* A subroutine of expand_asm_operands.  Check that all operand names
   are unique.  Return true if so.  We rely on the fact that these names
   are identifiers, and so have been canonicalized by get_identifier,
   so all we need are pointer comparisons.  */

static bool
505
check_unique_operand_names (tree outputs, tree inputs, tree labels)
506
{
507
  tree i, j, i_name = NULL_TREE;
508 509 510

  for (i = outputs; i ; i = TREE_CHAIN (i))
    {
511
      i_name = TREE_PURPOSE (TREE_PURPOSE (i));
512 513 514 515
      if (! i_name)
	continue;

      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
516
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
517 518 519 520 521
	  goto failure;
    }

  for (i = inputs; i ; i = TREE_CHAIN (i))
    {
522
      i_name = TREE_PURPOSE (TREE_PURPOSE (i));
523 524 525 526
      if (! i_name)
	continue;

      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
527
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
528 529
	  goto failure;
      for (j = outputs; j ; j = TREE_CHAIN (j))
530
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
531 532 533
	  goto failure;
    }

534 535
  for (i = labels; i ; i = TREE_CHAIN (i))
    {
536
      i_name = TREE_PURPOSE (i);
537 538 539 540 541 542 543 544 545 546 547
      if (! i_name)
	continue;

      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
	if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
	  goto failure;
      for (j = inputs; j ; j = TREE_CHAIN (j))
	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
	  goto failure;
    }

548 549 550
  return true;

 failure:
551
  error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name));
552 553 554 555 556 557 558
  return false;
}

/* A subroutine of expand_asm_operands.  Resolve the names of the operands
   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
   STRING and in the constraints to those numbers.  */

559
tree
560
resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
561
{
562
  char *buffer;
563
  char *p;
564
  const char *c;
565 566
  tree t;

567
  check_unique_operand_names (outputs, inputs, labels);
568

569 570 571 572
  /* Substitute [<name>] in input constraint strings.  There should be no
     named operands in output constraints.  */
  for (t = inputs; t ; t = TREE_CHAIN (t))
    {
573
      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
574 575 576 577
      if (strchr (c, '[') != NULL)
	{
	  p = buffer = xstrdup (c);
	  while ((p = strchr (p, '[')) != NULL)
578
	    p = resolve_operand_name_1 (p, outputs, inputs, NULL);
579 580 581 582 583 584
	  TREE_VALUE (TREE_PURPOSE (t))
	    = build_string (strlen (buffer), buffer);
	  free (buffer);
	}
    }

585 586 587
  /* Now check for any needed substitutions in the template.  */
  c = TREE_STRING_POINTER (string);
  while ((c = strchr (c, '%')) != NULL)
588
    {
589 590 591 592
      if (c[1] == '[')
	break;
      else if (ISALPHA (c[1]) && c[2] == '[')
	break;
593 594
      else
	{
595
	  c += 1 + (c[1] == '%');
596 597
	  continue;
	}
598 599
    }

600 601 602 603 604 605 606 607
  if (c)
    {
      /* OK, we need to make a copy so we can perform the substitutions.
	 Assume that we will not need extra space--we get to remove '['
	 and ']', which means we cannot have a problem until we have more
	 than 999 operands.  */
      buffer = xstrdup (TREE_STRING_POINTER (string));
      p = buffer + (c - TREE_STRING_POINTER (string));
608

609 610 611 612 613 614 615 616
      while ((p = strchr (p, '%')) != NULL)
	{
	  if (p[1] == '[')
	    p += 1;
	  else if (ISALPHA (p[1]) && p[2] == '[')
	    p += 2;
	  else
	    {
617
	      p += 1 + (p[1] == '%');
618 619 620
	      continue;
	    }

621
	  p = resolve_operand_name_1 (p, outputs, inputs, labels);
622 623 624 625 626
	}

      string = build_string (strlen (buffer), buffer);
      free (buffer);
    }
627 628 629 630 631 632

  return string;
}

/* A subroutine of resolve_operand_names.  P points to the '[' for a
   potential named operand of the form [<name>].  In place, replace
Kazu Hirata committed
633
   the name and brackets with a number.  Return a pointer to the
634 635 636
   balance of the string after substitution.  */

static char *
637
resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
638 639 640 641 642 643
{
  char *q;
  int op;
  tree t;

  /* Collect the operand name.  */
644
  q = strchr (++p, ']');
645 646 647 648 649
  if (!q)
    {
      error ("missing close brace for named operand");
      return strchr (p, '\0');
    }
650
  *q = '\0';
651 652 653 654

  /* Resolve the name to a number.  */
  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
    {
655
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
656 657
      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
	goto found;
658 659 660
    }
  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
    {
661
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
662 663 664 665 666 667 668 669
      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
	goto found;
    }
  for (t = labels; t ; t = TREE_CHAIN (t), op++)
    {
      tree name = TREE_PURPOSE (t);
      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
	goto found;
670 671
    }

672
  error ("undefined named operand %qs", identifier_to_locale (p));
673 674
  op = 0;

675
 found:
676 677 678
  /* Replace the name with the number.  Unfortunately, not all libraries
     get the return value of sprintf correct, so search for the end of the
     generated string by hand.  */
679
  sprintf (--p, "%d", op);
680 681 682
  p = strchr (p, '\0');

  /* Verify the no extra buffer space assumption.  */
683
  gcc_assert (p <= q);
684 685 686 687 688 689

  /* Shift the rest of the buffer down to fill the gap.  */
  memmove (p, q + 1, strlen (q + 1) + 1);

  return p;
}
Richard Kenner committed
690 691


692 693 694 695 696 697
/* Generate RTL to return directly from the current function.
   (That is, we bypass any return value.)  */

void
expand_naked_return (void)
{
698
  rtx end_label;
699 700 701 702

  clear_pending_stack_adjust ();
  do_pending_stack_adjust ();

703
  end_label = naked_return_label;
704 705
  if (end_label == 0)
    end_label = naked_return_label = gen_label_rtx ();
706 707

  emit_jump (end_label);
708 709
}

Easwaran Raman committed
710 711
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
   is the probability of jumping to LABEL.  */
712 713
static void
do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
Easwaran Raman committed
714
		  int unsignedp, int prob)
715
{
Easwaran Raman committed
716
  gcc_assert (prob <= REG_BR_PROB_BASE);
717
  do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
Easwaran Raman committed
718
			   NULL_RTX, NULL_RTX, label, prob);
719
}
720

721 722
/* Do the insertion of a case label into case_list.  The labels are
   fed to us in descending order from the sorted vector of case labels used
723
   in the tree part of the middle end.  So the list we construct is
Easwaran Raman committed
724 725 726 727 728
   sorted in ascending order.
   
   LABEL is the case label to be inserted. LOW and HIGH are the bounds
   against which the index is compared to jump to LABEL and PROB is the
   estimated probability LABEL is reached from the switch statement.  */
729

730
static struct case_node *
731
add_case_node (struct case_node *head, tree low, tree high,
Easwaran Raman committed
732
               tree label, int prob, alloc_pool case_node_pool)
733
{
734
  struct case_node *r;
735

736
  gcc_checking_assert (low);
737
  gcc_checking_assert (high && (TREE_TYPE (low) == TREE_TYPE (high)));
738

739
  /* Add this label to the chain.  */
740
  r = (struct case_node *) pool_alloc (case_node_pool);
741 742
  r->low = low;
  r->high = high;
743
  r->code_label = label;
744
  r->parent = r->left = NULL;
Easwaran Raman committed
745 746
  r->prob = prob;
  r->subtree_prob = prob;
747 748
  r->right = head;
  return r;
Richard Kenner committed
749 750
}

751 752 753 754 755 756 757 758 759 760 761 762 763
/* Dump ROOT, a list or tree of case nodes, to file.  */

static void
dump_case_nodes (FILE *f, struct case_node *root,
		 int indent_step, int indent_level)
{
  if (root == 0)
    return;
  indent_level++;

  dump_case_nodes (f, root->left, indent_step, indent_level);

  fputs (";; ", f);
764 765 766 767 768 769 770
  fprintf (f, "%*s", indent_step * indent_level, "");
  print_dec (root->low, f, TYPE_SIGN (TREE_TYPE (root->low)));
  if (!tree_int_cst_equal (root->low, root->high))
    {
      fprintf (f, " ... ");
      print_dec (root->high, f, TYPE_SIGN (TREE_TYPE (root->high)));
    }
771 772 773 774 775
  fputs ("\n", f);

  dump_case_nodes (f, root->right, indent_step, indent_level);
}

776 777 778 779 780 781 782 783
#ifndef HAVE_casesi
#define HAVE_casesi 0
#endif

#ifndef HAVE_tablejump
#define HAVE_tablejump 0
#endif

784 785 786 787 788 789 790 791 792 793 794 795 796 797
/* Return the smallest number of different values for which it is best to use a
   jump-table instead of a tree of conditional branches.  */

static unsigned int
case_values_threshold (void)
{
  unsigned int threshold = PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD);

  if (threshold == 0)
    threshold = targetm.case_values_threshold ();

  return threshold;
}

798 799 800 801
/* Return true if a switch should be expanded as a decision tree.
   RANGE is the difference between highest and lowest case.
   UNIQ is number of unique case node targets, not counting the default case.
   COUNT is the number of comparisons needed, not counting the default case.  */
Richard Kenner committed
802

803 804 805 806
static bool
expand_switch_as_decision_tree_p (tree range,
				  unsigned int uniq ATTRIBUTE_UNUSED,
				  unsigned int count)
Richard Kenner committed
807
{
808 809 810 811 812 813 814 815
  int max_ratio;

  /* If neither casesi or tablejump is available, or flag_jump_tables
     over-ruled us, we really have no choice.  */
  if (!HAVE_casesi && !HAVE_tablejump)
    return true;
  if (!flag_jump_tables)
    return true;
816 817 818 819
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
  if (flag_pic)
    return true;
#endif
820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838

  /* If the switch is relatively small such that the cost of one
     indirect jump on the target are higher than the cost of a
     decision tree, go with the decision tree.

     If range of values is much bigger than number of values,
     or if it is too large to represent in a HOST_WIDE_INT,
     make a sequence of conditional branches instead of a dispatch.

     The definition of "much bigger" depends on whether we are
     optimizing for size or for speed.  If the former, the maximum
     ratio range/count = 3, because this was found to be the optimal
     ratio for size on i686-pc-linux-gnu, see PR11823.  The ratio
     10 is much older, and was probably selected after an extensive
     benchmarking investigation on numerous platforms.  Or maybe it
     just made sense to someone at some point in the history of GCC,
     who knows...  */
  max_ratio = optimize_insn_for_size_p () ? 3 : 10;
  if (count < case_values_threshold ()
839
      || ! tree_fits_uhwi_p (range)
840 841
      || compare_tree_int (range, max_ratio * count) > 0)
    return true;
Jan Brittenson committed
842

843 844
  return false;
}
845

846 847
/* Generate a decision tree, switching on INDEX_EXPR and jumping to
   one of the labels in CASE_LIST or to the DEFAULT_LABEL.
Easwaran Raman committed
848 849
   DEFAULT_PROB is the estimated probability that it jumps to
   DEFAULT_LABEL.
850 851 852
   
   We generate a binary decision tree to select the appropriate target
   code.  This is done as follows:
853

854 855 856 857 858 859 860 861 862 863 864 865 866 867 868
     If the index is a short or char that we do not have
     an insn to handle comparisons directly, convert it to
     a full integer now, rather than letting each comparison
     generate the conversion.

     Load the index into a register.

     The list of cases is rearranged into a binary tree,
     nearly optimal assuming equal probability for each case.

     The tree is transformed into RTL, eliminating redundant
     test conditions at the same time.

     If program flow could reach the end of the decision tree
     an unconditional jump to the default code is emitted.
869

870 871
   The above process is unaware of the CFG.  The caller has to fix up
   the CFG itself.  This is done in cfgexpand.c.  */     
872

873 874
static void
emit_case_decision_tree (tree index_expr, tree index_type,
Easwaran Raman committed
875 876
			 struct case_node *case_list, rtx default_label,
                         int default_prob)
877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892
{
  rtx index = expand_normal (index_expr);

  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
      && ! have_insn_for (COMPARE, GET_MODE (index)))
    {
      int unsignedp = TYPE_UNSIGNED (index_type);
      enum machine_mode wider_mode;
      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
	   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
	if (have_insn_for (COMPARE, wider_mode))
	  {
	    index = convert_to_mode (wider_mode, index, unsignedp);
	    break;
	  }
    }
893

Richard Kenner committed
894 895
  do_pending_stack_adjust ();

896
  if (MEM_P (index))
Richard Kenner committed
897
    {
898 899 900 901
      index = copy_to_reg (index);
      if (TREE_CODE (index_expr) == SSA_NAME)
	set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr), index);
    }
902

903
  balance_case_nodes (&case_list, NULL);
904

905
  if (dump_file && (dump_flags & TDF_DETAILS))
906 907 908 909 910
    {
      int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
      fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
      dump_case_nodes (dump_file, case_list, indent_step, 0);
    }
911

Easwaran Raman committed
912
  emit_case_nodes (index, case_list, default_label, default_prob, index_type);
913 914 915
  if (default_label)
    emit_jump (default_label);
}
916

Easwaran Raman committed
917 918 919 920 921 922 923 924
/* Return the sum of probabilities of outgoing edges of basic block BB.  */

static int
get_outgoing_edge_probs (basic_block bb)
{
  edge e;
  edge_iterator ei;
  int prob_sum = 0;
925 926
  if (!bb)
    return 0;
927
  FOR_EACH_EDGE (e, ei, bb->succs)
Easwaran Raman committed
928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945
    prob_sum += e->probability;
  return prob_sum;
}

/* Computes the conditional probability of jumping to a target if the branch
   instruction is executed.
   TARGET_PROB is the estimated probability of jumping to a target relative
   to some basic block BB.
   BASE_PROB is the probability of reaching the branch instruction relative
   to the same basic block BB.  */

static inline int
conditional_probability (int target_prob, int base_prob)
{
  if (base_prob > 0)
    {
      gcc_assert (target_prob >= 0);
      gcc_assert (target_prob <= base_prob);
946
      return GCOV_COMPUTE_SCALE (target_prob, base_prob);
Easwaran Raman committed
947 948 949 950
    }
  return -1;
}

951 952 953
/* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
   one of the labels in CASE_LIST or to the DEFAULT_LABEL.
   MINVAL, MAXVAL, and RANGE are the extrema and range of the case
Easwaran Raman committed
954
   labels in CASE_LIST. STMT_BB is the basic block containing the statement.
955

956 957 958 959 960 961 962 963 964 965 966
   First, a jump insn is emitted.  First we try "casesi".  If that
   fails, try "tablejump".   A target *must* have one of them (or both).

   Then, a table with the target labels is emitted.

   The process is unaware of the CFG.  The caller has to fix up
   the CFG itself.  This is done in cfgexpand.c.  */     

static void
emit_case_dispatch_table (tree index_expr, tree index_type,
			  struct case_node *case_list, rtx default_label,
Easwaran Raman committed
967 968
			  tree minval, tree maxval, tree range,
                          basic_block stmt_bb)
969 970 971 972 973 974
{
  int i, ncases;
  struct case_node *n;
  rtx *labelvec;
  rtx fallback_label = label_rtx (case_list->code_label);
  rtx table_label = gen_label_rtx ();
Easwaran Raman committed
975
  bool has_gaps = false;
976
  edge default_edge = stmt_bb ? EDGE_SUCC (stmt_bb, 0) : NULL;
977
  int default_prob = default_edge ? default_edge->probability : 0;
Easwaran Raman committed
978 979 980 981 982
  int base = get_outgoing_edge_probs (stmt_bb);
  bool try_with_tablejump = false;

  int new_default_prob = conditional_probability (default_prob,
                                                  base);
Richard Kenner committed
983

984
  if (! try_casesi (index_type, index_expr, minval, range,
Easwaran Raman committed
985 986
		    table_label, default_label, fallback_label,
                    new_default_prob))
987 988 989 990 991 992 993
    {
      /* Index jumptables from zero for suitable values of minval to avoid
	 a subtraction.  For the rationale see:
	 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html".  */
      if (optimize_insn_for_speed_p ()
	  && compare_tree_int (minval, 0) > 0
	  && compare_tree_int (minval, 3) < 0)
Richard Kenner committed
994
	{
995 996
	  minval = build_int_cst (index_type, 0);
	  range = maxval;
Easwaran Raman committed
997
          has_gaps = true;
Richard Kenner committed
998
	}
Easwaran Raman committed
999
      try_with_tablejump = true;
1000
    }
Richard Kenner committed
1001

1002
  /* Get table of labels to jump to, in order of case index.  */
Richard Kenner committed
1003

1004
  ncases = tree_to_shwi (range) + 1;
1005 1006
  labelvec = XALLOCAVEC (rtx, ncases);
  memset (labelvec, 0, ncases * sizeof (rtx));
Richard Kenner committed
1007

1008 1009 1010 1011 1012 1013
  for (n = case_list; n; n = n->right)
    {
      /* Compute the low and high bounds relative to the minimum
	 value since that should fit in a HOST_WIDE_INT while the
	 actual values may not.  */
      HOST_WIDE_INT i_low
1014 1015
	= tree_to_uhwi (fold_build2 (MINUS_EXPR, index_type,
				     n->low, minval));
1016
      HOST_WIDE_INT i_high
1017 1018
	= tree_to_uhwi (fold_build2 (MINUS_EXPR, index_type,
				     n->high, minval));
1019 1020 1021 1022 1023 1024
      HOST_WIDE_INT i;

      for (i = i_low; i <= i_high; i ++)
	labelvec[i]
	  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
    }
Richard Kenner committed
1025

1026 1027 1028 1029 1030 1031 1032 1033
  /* Fill in the gaps with the default.  We may have gaps at
     the beginning if we tried to avoid the minval subtraction,
     so substitute some label even if the default label was
     deemed unreachable.  */
  if (!default_label)
    default_label = fallback_label;
  for (i = 0; i < ncases; i++)
    if (labelvec[i] == 0)
Easwaran Raman committed
1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056
      {
        has_gaps = true;
        labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
      }

  if (has_gaps)
    {
      /* There is at least one entry in the jump table that jumps
         to default label. The default label can either be reached
         through the indirect jump or the direct conditional jump
         before that. Split the probability of reaching the
         default label among these two jumps.  */
      new_default_prob = conditional_probability (default_prob/2,
                                                  base);
      default_prob /= 2;
      base -= default_prob;
    }
  else
    {
      base -= default_prob;
      default_prob = 0;
    }

1057 1058
  if (default_edge)
    default_edge->probability = default_prob;
Easwaran Raman committed
1059 1060 1061 1062 1063 1064 1065 1066 1067

  /* We have altered the probability of the default edge. So the probabilities
     of all other edges need to be adjusted so that it sums up to
     REG_BR_PROB_BASE.  */
  if (base)
    {
      edge e;
      edge_iterator ei;
      FOR_EACH_EDGE (e, ei, stmt_bb->succs)
1068
        e->probability = GCOV_COMPUTE_SCALE (e->probability,  base);
Easwaran Raman committed
1069
    }
1070

Easwaran Raman committed
1071 1072 1073 1074 1075 1076
  if (try_with_tablejump)
    {
      bool ok = try_tablejump (index_type, index_expr, minval, range,
                               table_label, default_label, new_default_prob);
      gcc_assert (ok);
    }
1077 1078 1079 1080
  /* Output the table.  */
  emit_label (table_label);

  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
1081 1082 1083 1084 1085
    emit_jump_table_data (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
						 gen_rtx_LABEL_REF (Pmode,
								    table_label),
						 gen_rtvec_v (ncases, labelvec),
						 const0_rtx, const0_rtx));
1086
  else
1087 1088
    emit_jump_table_data (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
					    gen_rtvec_v (ncases, labelvec)));
Richard Kenner committed
1089

1090 1091 1092
  /* Record no drop-through after the table.  */
  emit_barrier ();
}
1093

Easwaran Raman committed
1094 1095 1096 1097 1098 1099 1100
/* Reset the aux field of all outgoing edges of basic block BB.  */

static inline void
reset_out_edges_aux (basic_block bb)
{
  edge e;
  edge_iterator ei;
1101
  FOR_EACH_EDGE (e, ei, bb->succs)
Easwaran Raman committed
1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119
    e->aux = (void *)0;
}

/* Compute the number of case labels that correspond to each outgoing edge of
   STMT. Record this information in the aux field of the edge.  */

static inline void
compute_cases_per_edge (gimple stmt)
{
  basic_block bb = gimple_bb (stmt);
  reset_out_edges_aux (bb);
  int ncases = gimple_switch_num_labels (stmt);
  for (int i = ncases - 1; i >= 1; --i)
    {
      tree elt = gimple_switch_label (stmt, i);
      tree lab = CASE_LABEL (elt);
      basic_block case_bb = label_to_block_fn (cfun, lab);
      edge case_edge = find_edge (bb, case_bb);
1120
      case_edge->aux = (void *)((intptr_t)(case_edge->aux) + 1);
Easwaran Raman committed
1121 1122 1123
    }
}

1124 1125 1126 1127 1128
/* Terminate a case (Pascal/Ada) or switch (C) statement
   in which ORIG_INDEX is the expression to be tested.
   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
   type as given in the source before any compiler conversions.
   Generate the code to test it and jump to the right place.  */
1129

1130 1131 1132 1133 1134 1135
void
expand_case (gimple stmt)
{
  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
  rtx default_label = NULL_RTX;
  unsigned int count, uniq;
1136
  int i;
1137 1138 1139 1140
  int ncases = gimple_switch_num_labels (stmt);
  tree index_expr = gimple_switch_index (stmt);
  tree index_type = TREE_TYPE (index_expr);
  tree elt;
Easwaran Raman committed
1141
  basic_block bb = gimple_bb (stmt);
1142

1143 1144 1145
  /* A list of case labels; it is first built as a list and it may then
     be rearranged into a nearly balanced binary tree.  */
  struct case_node *case_list = 0;
1146

1147 1148
  /* A pool for case nodes.  */
  alloc_pool case_node_pool;
Kazu Hirata committed
1149

1150 1151 1152 1153
  /* An ERROR_MARK occurs for various reasons including invalid data type.
     ??? Can this still happen, with GIMPLE and all?  */
  if (index_type == error_mark_node)
    return;
Richard Kenner committed
1154

1155 1156 1157 1158 1159 1160 1161
  /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
     expressions being INTEGER_CST.  */
  gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
  
  case_node_pool = create_alloc_pool ("struct case_node pool",
				      sizeof (struct case_node),
				      100);
Richard Kenner committed
1162

1163
  do_pending_stack_adjust ();
Richard Kenner committed
1164

1165 1166
  /* Find the default case target label.  */
  default_label = label_rtx (CASE_LABEL (gimple_switch_default_label (stmt)));
1167
  edge default_edge = EDGE_SUCC (bb, 0);
Easwaran Raman committed
1168
  int default_prob = default_edge->probability;
Richard Kenner committed
1169

1170
  /* Get upper and lower bounds of case values.  */
1171
  elt = gimple_switch_label (stmt, 1);
1172 1173 1174 1175 1176 1177 1178 1179 1180
  minval = fold_convert (index_type, CASE_LOW (elt));
  elt = gimple_switch_label (stmt, ncases - 1);
  if (CASE_HIGH (elt))
    maxval = fold_convert (index_type, CASE_HIGH (elt));
  else
    maxval = fold_convert (index_type, CASE_LOW (elt));

  /* Compute span of values.  */
  range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
Richard Kenner committed
1181

1182 1183 1184 1185
  /* Listify the labels queue and gather some numbers to decide
     how to expand this switch().  */
  uniq = 0;
  count = 0;
1186
  hash_set<tree> seen_labels;
Easwaran Raman committed
1187 1188 1189
  compute_cases_per_edge (stmt);

  for (i = ncases - 1; i >= 1; --i)
1190 1191
    {
      elt = gimple_switch_label (stmt, i);
1192
      tree low = CASE_LOW (elt);
1193
      gcc_assert (low);
1194
      tree high = CASE_HIGH (elt);
1195
      gcc_assert (! high || tree_int_cst_lt (low, high));
1196
      tree lab = CASE_LABEL (elt);
1197 1198 1199 1200 1201 1202 1203 1204 1205

      /* Count the elements.
	 A range counts double, since it requires two compares.  */
      count++;
      if (high)
	count++;

      /* If we have not seen this label yet, then increase the
	 number of unique case node targets seen.  */
1206
      if (!seen_labels.add (lab))
1207 1208
	uniq++;

1209 1210 1211 1212 1213 1214 1215
      /* The bounds on the case range, LOW and HIGH, have to be converted
	 to case's index type TYPE.  Note that the original type of the
	 case index in the source code is usually "lost" during
	 gimplification due to type promotion, but the case labels retain the
	 original type.  Make sure to drop overflow flags.  */
      low = fold_convert (index_type, low);
      if (TREE_OVERFLOW (low))
Kenneth Zadeck committed
1216
	low = wide_int_to_tree (index_type, low);
1217

1218 1219 1220 1221 1222
      /* The canonical from of a case label in GIMPLE is that a simple case
	 has an empty CASE_HIGH.  For the casesi and tablejump expanders,
	 the back ends want simple cases to have high == low.  */
      if (! high)
	high = low;
1223 1224
      high = fold_convert (index_type, high);
      if (TREE_OVERFLOW (high))
Kenneth Zadeck committed
1225
	high = wide_int_to_tree (index_type, high);
1226

Easwaran Raman committed
1227 1228 1229 1230
      basic_block case_bb = label_to_block_fn (cfun, lab);
      edge case_edge = find_edge (bb, case_bb);
      case_list = add_case_node (
          case_list, low, high, lab,
1231
          case_edge->probability / (intptr_t)(case_edge->aux),
Easwaran Raman committed
1232
          case_node_pool);
Richard Kenner committed
1233
    }
Easwaran Raman committed
1234
  reset_out_edges_aux (bb);
1235 1236 1237 1238 1239 1240 1241

  /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
     destination, such as one with a default case only.
     It also removes cases that are out of range for the switch
     type, so we should never get a zero here.  */
  gcc_assert (count > 0);

1242
  rtx before_case = get_last_insn ();
1243 1244 1245 1246 1247 1248 1249

  /* Decide how to expand this switch.
     The two options at this point are a dispatch table (casesi or
     tablejump) or a decision tree.  */

  if (expand_switch_as_decision_tree_p (range, uniq, count))
    emit_case_decision_tree (index_expr, index_type,
Easwaran Raman committed
1250 1251
                             case_list, default_label,
                             default_prob);
1252 1253 1254
  else
    emit_case_dispatch_table (index_expr, index_type,
			      case_list, default_label,
Easwaran Raman committed
1255
			      minval, maxval, range, bb);
1256

1257
  reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
1258

Richard Kenner committed
1259
  free_temp_slots ();
1260
  free_alloc_pool (case_node_pool);
Richard Kenner committed
1261 1262
}

1263 1264 1265 1266 1267
/* Expand the dispatch to a short decrement chain if there are few cases
   to dispatch to.  Likewise if neither casesi nor tablejump is available,
   or if flag_jump_tables is set.  Otherwise, expand as a casesi or a
   tablejump.  The index mode is always the mode of integer_type_node.
   Trap if no case matches the index.
Richard Kenner committed
1268

1269 1270 1271 1272 1273 1274 1275 1276 1277
   DISPATCH_INDEX is the index expression to switch on.  It should be a
   memory or register operand.
   
   DISPATCH_TABLE is a set of case labels.  The set should be sorted in
   ascending order, be contiguous, starting with value 0, and contain only
   single-valued case labels.  */

void
expand_sjlj_dispatch_table (rtx dispatch_index,
1278
			    vec<tree> dispatch_table)
Richard Kenner committed
1279
{
1280 1281 1282
  tree index_type = integer_type_node;
  enum machine_mode index_mode = TYPE_MODE (index_type);

1283
  int ncases = dispatch_table.length ();
1284 1285 1286 1287 1288 1289 1290 1291 1292

  do_pending_stack_adjust ();
  rtx before_case = get_last_insn ();

  /* Expand as a decrement-chain if there are 5 or fewer dispatch
     labels.  This covers more than 98% of the cases in libjava,
     and seems to be a reasonable compromise between the "old way"
     of expanding as a decision tree or dispatch table vs. the "new
     way" with decrement chain or dispatch table.  */
1293
  if (dispatch_table.length () <= 5
1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314
      || (!HAVE_casesi && !HAVE_tablejump)
      || !flag_jump_tables)
    {
      /* Expand the dispatch as a decrement chain:

	 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"

	 ==>

	 if (index == 0) do_0; else index--;
	 if (index == 0) do_1; else index--;
	 ...
	 if (index == 0) do_N; else index--;

	 This is more efficient than a dispatch table on most machines.
	 The last "index--" is redundant but the code is trivially dead
	 and will be cleaned up by later passes.  */
      rtx index = copy_to_mode_reg (index_mode, dispatch_index);
      rtx zero = CONST0_RTX (index_mode);
      for (int i = 0; i < ncases; i++)
        {
1315
	  tree elt = dispatch_table[i];
1316
	  rtx lab = label_rtx (CASE_LABEL (elt));
Easwaran Raman committed
1317
	  do_jump_if_equal (index_mode, index, zero, lab, 0, -1);
1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331
	  force_expand_binop (index_mode, sub_optab,
			      index, CONST1_RTX (index_mode),
			      index, 0, OPTAB_DIRECT);
	}
    }
  else
    {
      /* Similar to expand_case, but much simpler.  */
      struct case_node *case_list = 0;
      alloc_pool case_node_pool = create_alloc_pool ("struct sjlj_case pool",
						     sizeof (struct case_node),
						     ncases);
      tree index_expr = make_tree (index_type, dispatch_index);
      tree minval = build_int_cst (index_type, 0);
1332
      tree maxval = CASE_LOW (dispatch_table.last ());
1333 1334 1335
      tree range = maxval;
      rtx default_label = gen_label_rtx ();

1336
      for (int i = ncases - 1; i >= 0; --i)
1337
	{
1338
	  tree elt = dispatch_table[i];
1339 1340
	  tree low = CASE_LOW (elt);
	  tree lab = CASE_LABEL (elt);
Easwaran Raman committed
1341
	  case_list = add_case_node (case_list, low, low, lab, 0, case_node_pool);
1342 1343 1344 1345
	}

      emit_case_dispatch_table (index_expr, index_type,
				case_list, default_label,
1346 1347
				minval, maxval, range,
                                BLOCK_FOR_INSN (before_case));
1348 1349 1350 1351 1352 1353 1354 1355 1356 1357
      emit_label (default_label);
      free_alloc_pool (case_node_pool);
    }

  /* Dispatching something not handled?  Trap!  */
  expand_builtin_trap ();

  reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);

  free_temp_slots ();
Richard Kenner committed
1358
}
1359

Richard Kenner committed
1360 1361 1362

/* Take an ordered list of case nodes
   and transform them into a near optimal binary tree,
1363
   on the assumption that any target code selection value is as
Richard Kenner committed
1364 1365 1366 1367 1368
   likely as any other.

   The transformation is performed by splitting the ordered
   list into two equal sections plus a pivot.  The parts are
   then attached to the pivot as left and right branches.  Each
Jeff Law committed
1369
   branch is then transformed recursively.  */
Richard Kenner committed
1370 1371

static void
1372
balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
Richard Kenner committed
1373
{
1374
  case_node_ptr np;
Richard Kenner committed
1375 1376 1377 1378 1379 1380

  np = *head;
  if (np)
    {
      int i = 0;
      int ranges = 0;
1381
      case_node_ptr *npp;
Richard Kenner committed
1382 1383 1384 1385 1386 1387 1388
      case_node_ptr left;

      /* Count the number of entries on branch.  Also count the ranges.  */

      while (np)
	{
	  if (!tree_int_cst_equal (np->low, np->high))
1389
	    ranges++;
Richard Kenner committed
1390 1391 1392 1393 1394 1395 1396 1397 1398 1399

	  i++;
	  np = np->right;
	}

      if (i > 2)
	{
	  /* Split this list if it is long enough for that to help.  */
	  npp = head;
	  left = *npp;
1400

Richard Kenner committed
1401
	  /* If there are just three nodes, split at the middle one.  */
1402
	  if (i == 3)
Richard Kenner committed
1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428
	    npp = &(*npp)->right;
	  else
	    {
	      /* Find the place in the list that bisects the list's total cost,
		 where ranges count as 2.
		 Here I gets half the total cost.  */
	      i = (i + ranges + 1) / 2;
	      while (1)
		{
		  /* Skip nodes while their cost does not reach that amount.  */
		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
		    i--;
		  i--;
		  if (i <= 0)
		    break;
		  npp = &(*npp)->right;
		}
	    }
	  *head = np = *npp;
	  *npp = 0;
	  np->parent = parent;
	  np->left = left;

	  /* Optimize each of the two split parts.  */
	  balance_case_nodes (&np->left, np);
	  balance_case_nodes (&np->right, np);
Easwaran Raman committed
1429 1430 1431
          np->subtree_prob = np->prob;
          np->subtree_prob += np->left->subtree_prob;
          np->subtree_prob += np->right->subtree_prob;
Richard Kenner committed
1432 1433 1434 1435 1436 1437 1438
	}
      else
	{
	  /* Else leave this branch as one level,
	     but fill in `parent' fields.  */
	  np = *head;
	  np->parent = parent;
Easwaran Raman committed
1439
          np->subtree_prob = np->prob;
Richard Kenner committed
1440
	  for (; np->right; np = np->right)
Easwaran Raman committed
1441 1442 1443 1444
            {
	      np->right->parent = np;
              (*head)->subtree_prob += np->right->subtree_prob;
            }
Richard Kenner committed
1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459
	}
    }
}

/* Search the parent sections of the case node tree
   to see if a test for the lower bound of NODE would be redundant.
   INDEX_TYPE is the type of the index expression.

   The instructions to generate the case decision tree are
   output in the same order as nodes are processed so it is
   known that if a parent node checks the range of the current
   node minus one that the current node is bounded at its lower
   span.  Thus the test would be redundant.  */

static int
1460
node_has_low_bound (case_node_ptr node, tree index_type)
Richard Kenner committed
1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477
{
  tree low_minus_one;
  case_node_ptr pnode;

  /* If the lower bound of this node is the lowest value in the index type,
     we need not test it.  */

  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
    return 1;

  /* If this node has a left branch, the value at the left must be less
     than that at this node, so it cannot be bounded at the bottom and
     we need not bother testing any further.  */

  if (node->left)
    return 0;

1478
  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
1479 1480
			       node->low,
			       build_int_cst (TREE_TYPE (node->low), 1));
Richard Kenner committed
1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505

  /* If the subtraction above overflowed, we can't verify anything.
     Otherwise, look for a parent that tests our value - 1.  */

  if (! tree_int_cst_lt (low_minus_one, node->low))
    return 0;

  for (pnode = node->parent; pnode; pnode = pnode->parent)
    if (tree_int_cst_equal (low_minus_one, pnode->high))
      return 1;

  return 0;
}

/* Search the parent sections of the case node tree
   to see if a test for the upper bound of NODE would be redundant.
   INDEX_TYPE is the type of the index expression.

   The instructions to generate the case decision tree are
   output in the same order as nodes are processed so it is
   known that if a parent node checks the range of the current
   node plus one that the current node is bounded at its upper
   span.  Thus the test would be redundant.  */

static int
1506
node_has_high_bound (case_node_ptr node, tree index_type)
Richard Kenner committed
1507 1508 1509 1510
{
  tree high_plus_one;
  case_node_ptr pnode;

1511 1512 1513 1514 1515
  /* If there is no upper bound, obviously no test is needed.  */

  if (TYPE_MAX_VALUE (index_type) == NULL)
    return 1;

Richard Kenner committed
1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528
  /* If the upper bound of this node is the highest value in the type
     of the index expression, we need not test against it.  */

  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
    return 1;

  /* If this node has a right branch, the value at the right must be greater
     than that at this node, so it cannot be bounded at the top and
     we need not bother testing any further.  */

  if (node->right)
    return 0;

1529
  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
1530 1531
			       node->high,
			       build_int_cst (TREE_TYPE (node->high), 1));
Richard Kenner committed
1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550

  /* If the addition above overflowed, we can't verify anything.
     Otherwise, look for a parent that tests our value + 1.  */

  if (! tree_int_cst_lt (node->high, high_plus_one))
    return 0;

  for (pnode = node->parent; pnode; pnode = pnode->parent)
    if (tree_int_cst_equal (high_plus_one, pnode->low))
      return 1;

  return 0;
}

/* Search the parent sections of the
   case node tree to see if both tests for the upper and lower
   bounds of NODE would be redundant.  */

static int
1551
node_is_bounded (case_node_ptr node, tree index_type)
Richard Kenner committed
1552 1553 1554 1555 1556
{
  return (node_has_low_bound (node, index_type)
	  && node_has_high_bound (node, index_type));
}

Easwaran Raman committed
1557

Richard Kenner committed
1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573
/* Emit step-by-step code to select a case for the value of INDEX.
   The thus generated decision tree follows the form of the
   case-node binary tree NODE, whose nodes represent test conditions.
   INDEX_TYPE is the type of the index of the switch.

   Care is taken to prune redundant tests from the decision tree
   by detecting any boundary conditions already checked by
   emitted rtx.  (See node_has_high_bound, node_has_low_bound
   and node_is_bounded, above.)

   Where the test conditions can be shown to be redundant we emit
   an unconditional jump to the target code.  As a further
   optimization, the subordinates of a tree node are examined to
   check for bounded nodes.  In this case conditional and/or
   unconditional jumps as a result of the boundary check for the
   current node are arranged to target the subordinates associated
Jeff Law committed
1574
   code for out of bound conditions on the current node.
Richard Kenner committed
1575

1576
   We can assume that when control reaches the code generated here,
Richard Kenner committed
1577 1578 1579 1580 1581 1582 1583 1584
   the index value has already been compared with the parents
   of this node, and determined to be on the same side of each parent
   as this node is.  Thus, if this node tests for the value 51,
   and a parent tested for 52, we don't need to consider
   the possibility of a value greater than 51.  If another parent
   tests for the value 50, then this node need not test anything.  */

static void
1585
emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
Easwaran Raman committed
1586
		 int default_prob, tree index_type)
Richard Kenner committed
1587 1588
{
  /* If INDEX has an unsigned type, we must make unsigned branches.  */
1589
  int unsignedp = TYPE_UNSIGNED (index_type);
Easwaran Raman committed
1590 1591
  int probability;
  int prob = node->prob, subtree_prob = node->subtree_prob;
Richard Kenner committed
1592
  enum machine_mode mode = GET_MODE (index);
1593
  enum machine_mode imode = TYPE_MODE (index_type);
Richard Kenner committed
1594

1595 1596 1597 1598
  /* Handle indices detected as constant during RTL expansion.  */
  if (mode == VOIDmode)
    mode = imode;

Richard Kenner committed
1599 1600 1601 1602 1603 1604 1605
  /* See if our parents have already tested everything for us.
     If they have, emit an unconditional jump for this node.  */
  if (node_is_bounded (node, index_type))
    emit_jump (label_rtx (node->code_label));

  else if (tree_int_cst_equal (node->low, node->high))
    {
Easwaran Raman committed
1606
      probability = conditional_probability (prob, subtree_prob + default_prob);
Richard Kenner committed
1607
      /* Node is single valued.  First see if the index expression matches
Mike Stump committed
1608
	 this node and then check our children, if any.  */
1609
      do_jump_if_equal (mode, index,
1610
			convert_modes (mode, imode,
1611
				       expand_normal (node->low),
1612
				       unsignedp),
Easwaran Raman committed
1613 1614 1615 1616
			label_rtx (node->code_label), unsignedp, probability);
      /* Since this case is taken at this point, reduce its weight from
         subtree_weight.  */
      subtree_prob -= prob;
Richard Kenner committed
1617 1618 1619 1620 1621 1622 1623 1624 1625 1626
      if (node->right != 0 && node->left != 0)
	{
	  /* This node has children on both sides.
	     Dispatch to one side or the other
	     by comparing the index value with this node's value.
	     If one subtree is bounded, check that one first,
	     so we can avoid real branches in the tree.  */

	  if (node_is_bounded (node->right, index_type))
	    {
Easwaran Raman committed
1627 1628 1629
              probability = conditional_probability (
                  node->right->prob,
                  subtree_prob + default_prob);
Kazu Hirata committed
1630
	      emit_cmp_and_jump_insns (index,
1631 1632
				       convert_modes
				       (mode, imode,
1633
					expand_normal (node->high),
1634
					unsignedp),
1635
				       GT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1636 1637 1638 1639
				       label_rtx (node->right->code_label),
                                       probability);
	      emit_case_nodes (index, node->left, default_label, default_prob,
                               index_type);
Richard Kenner committed
1640 1641 1642 1643
	    }

	  else if (node_is_bounded (node->left, index_type))
	    {
Easwaran Raman committed
1644 1645 1646
              probability = conditional_probability (
                  node->left->prob,
                  subtree_prob + default_prob);
Kazu Hirata committed
1647
	      emit_cmp_and_jump_insns (index,
1648 1649
				       convert_modes
				       (mode, imode,
1650
					expand_normal (node->high),
1651
					unsignedp),
1652
				       LT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1653 1654 1655
				       label_rtx (node->left->code_label),
                                       probability);
	      emit_case_nodes (index, node->right, default_label, default_prob, index_type);
Richard Kenner committed
1656 1657
	    }

1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672
	  /* If both children are single-valued cases with no
	     children, finish up all the work.  This way, we can save
	     one ordered comparison.  */
	  else if (tree_int_cst_equal (node->right->low, node->right->high)
		   && node->right->left == 0
		   && node->right->right == 0
		   && tree_int_cst_equal (node->left->low, node->left->high)
		   && node->left->left == 0
		   && node->left->right == 0)
	    {
	      /* Neither node is bounded.  First distinguish the two sides;
		 then emit the code for one side at a time.  */

	      /* See if the value matches what the right hand side
		 wants.  */
Easwaran Raman committed
1673 1674 1675
              probability = conditional_probability (
                  node->right->prob,
                  subtree_prob + default_prob);
1676
	      do_jump_if_equal (mode, index,
1677
				convert_modes (mode, imode,
1678
					       expand_normal (node->right->low),
1679 1680
					       unsignedp),
				label_rtx (node->right->code_label),
Easwaran Raman committed
1681
				unsignedp, probability);
1682 1683 1684

	      /* See if the value matches what the left hand side
		 wants.  */
Easwaran Raman committed
1685 1686 1687
              probability = conditional_probability (
                  node->left->prob,
                  subtree_prob + default_prob);
1688
	      do_jump_if_equal (mode, index,
1689
				convert_modes (mode, imode,
1690
					       expand_normal (node->left->low),
1691 1692
					       unsignedp),
				label_rtx (node->left->code_label),
Easwaran Raman committed
1693
				unsignedp, probability);
1694 1695
	    }

Richard Kenner committed
1696 1697 1698 1699 1700
	  else
	    {
	      /* Neither node is bounded.  First distinguish the two sides;
		 then emit the code for one side at a time.  */

1701
	      tree test_label
1702
		= build_decl (curr_insn_location (),
1703
			      LABEL_DECL, NULL_TREE, NULL_TREE);
Richard Kenner committed
1704

Easwaran Raman committed
1705 1706 1707 1708 1709 1710
              /* The default label could be reached either through the right
                 subtree or the left subtree. Divide the probability
                 equally.  */
              probability = conditional_probability (
                  node->right->subtree_prob + default_prob/2,
                  subtree_prob + default_prob);
Richard Kenner committed
1711
	      /* See if the value is on the right.  */
Kazu Hirata committed
1712
	      emit_cmp_and_jump_insns (index,
1713 1714
				       convert_modes
				       (mode, imode,
1715
					expand_normal (node->high),
1716
					unsignedp),
1717
				       GT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1718 1719 1720
				       label_rtx (test_label),
                                       probability);
              default_prob /= 2;
Richard Kenner committed
1721 1722 1723

	      /* Value must be on the left.
		 Handle the left-hand subtree.  */
Easwaran Raman committed
1724
	      emit_case_nodes (index, node->left, default_label, default_prob, index_type);
Richard Kenner committed
1725 1726
	      /* If left-hand subtree does nothing,
		 go to default.  */
1727 1728
	      if (default_label)
	        emit_jump (default_label);
Richard Kenner committed
1729 1730 1731

	      /* Code branches here for the right-hand subtree.  */
	      expand_label (test_label);
Easwaran Raman committed
1732
	      emit_case_nodes (index, node->right, default_label, default_prob, index_type);
Richard Kenner committed
1733 1734 1735 1736 1737
	    }
	}

      else if (node->right != 0 && node->left == 0)
	{
1738
	  /* Here we have a right child but no left so we issue a conditional
Richard Kenner committed
1739 1740
	     branch to default and process the right child.

1741 1742 1743
	     Omit the conditional branch to default if the right child
	     does not have any children and is single valued; it would
	     cost too much space to save so little time.  */
Richard Kenner committed
1744

1745
	  if (node->right->right || node->right->left
Richard Kenner committed
1746 1747 1748 1749
	      || !tree_int_cst_equal (node->right->low, node->right->high))
	    {
	      if (!node_has_low_bound (node, index_type))
		{
Easwaran Raman committed
1750 1751 1752
                  probability = conditional_probability (
                      default_prob/2,
                      subtree_prob + default_prob);
Kazu Hirata committed
1753
		  emit_cmp_and_jump_insns (index,
1754 1755
					   convert_modes
					   (mode, imode,
1756
					    expand_normal (node->high),
1757
					    unsignedp),
1758
					   LT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1759 1760 1761
					   default_label,
                                           probability);
                  default_prob /= 2;
Richard Kenner committed
1762 1763
		}

Easwaran Raman committed
1764
	      emit_case_nodes (index, node->right, default_label, default_prob, index_type);
Richard Kenner committed
1765 1766
	    }
	  else
Easwaran Raman committed
1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781
            {
              probability = conditional_probability (
                  node->right->subtree_prob,
                  subtree_prob + default_prob);
	      /* We cannot process node->right normally
	         since we haven't ruled out the numbers less than
	         this node's value.  So handle node->right explicitly.  */
	      do_jump_if_equal (mode, index,
			        convert_modes
			        (mode, imode,
			         expand_normal (node->right->low),
			         unsignedp),
			        label_rtx (node->right->code_label), unsignedp, probability);
            }
	  }
Richard Kenner committed
1782 1783 1784 1785

      else if (node->right == 0 && node->left != 0)
	{
	  /* Just one subtree, on the left.  */
Kazu Hirata committed
1786
	  if (node->left->left || node->left->right
Richard Kenner committed
1787 1788 1789 1790
	      || !tree_int_cst_equal (node->left->low, node->left->high))
	    {
	      if (!node_has_high_bound (node, index_type))
		{
Easwaran Raman committed
1791 1792 1793
                  probability = conditional_probability (
                      default_prob/2,
                      subtree_prob + default_prob);
1794 1795 1796
		  emit_cmp_and_jump_insns (index,
					   convert_modes
					   (mode, imode,
1797
					    expand_normal (node->high),
1798
					    unsignedp),
1799
					   GT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1800 1801 1802
					   default_label,
                                           probability);
                  default_prob /= 2;
Richard Kenner committed
1803 1804
		}

Easwaran Raman committed
1805 1806
	      emit_case_nodes (index, node->left, default_label,
                               default_prob, index_type);
Richard Kenner committed
1807 1808
	    }
	  else
Easwaran Raman committed
1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822
            {
              probability = conditional_probability (
                  node->left->subtree_prob,
                  subtree_prob + default_prob);
	      /* We cannot process node->left normally
	         since we haven't ruled out the numbers less than
	         this node's value.  So handle node->left explicitly.  */
	      do_jump_if_equal (mode, index,
			        convert_modes
			        (mode, imode,
			         expand_normal (node->left->low),
			         unsignedp),
			        label_rtx (node->left->code_label), unsignedp, probability);
            }
Richard Kenner committed
1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840
	}
    }
  else
    {
      /* Node is a range.  These cases are very similar to those for a single
	 value, except that we do not start by testing whether this node
	 is the one to branch to.  */

      if (node->right != 0 && node->left != 0)
	{
	  /* Node has subtrees on both sides.
	     If the right-hand subtree is bounded,
	     test for it first, since we can go straight there.
	     Otherwise, we need to make a branch in the control structure,
	     then handle the two subtrees.  */
	  tree test_label = 0;

	  if (node_is_bounded (node->right, index_type))
Easwaran Raman committed
1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855
            {
	      /* Right hand node is fully bounded so we can eliminate any
	         testing and branch directly to the target code.  */
              probability = conditional_probability (
                  node->right->subtree_prob,
                  subtree_prob + default_prob);
	      emit_cmp_and_jump_insns (index,
				       convert_modes
				       (mode, imode,
				        expand_normal (node->high),
				        unsignedp),
				       GT, NULL_RTX, mode, unsignedp,
				       label_rtx (node->right->code_label),
                                       probability);
            }
Richard Kenner committed
1856 1857 1858 1859 1860
	  else
	    {
	      /* Right hand node requires testing.
		 Branch to a label where we will handle it later.  */

1861
	      test_label = build_decl (curr_insn_location (),
1862
				       LABEL_DECL, NULL_TREE, NULL_TREE);
Easwaran Raman committed
1863 1864 1865
              probability = conditional_probability (
                  node->right->subtree_prob + default_prob/2,
                  subtree_prob + default_prob);
Kazu Hirata committed
1866
	      emit_cmp_and_jump_insns (index,
1867 1868
				       convert_modes
				       (mode, imode,
1869
					expand_normal (node->high),
1870
					unsignedp),
1871
				       GT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1872 1873 1874
				       label_rtx (test_label),
                                       probability);
              default_prob /= 2;
Richard Kenner committed
1875 1876 1877 1878
	    }

	  /* Value belongs to this node or to the left-hand subtree.  */

Easwaran Raman committed
1879 1880 1881
          probability = conditional_probability (
              prob,
              subtree_prob + default_prob);
1882 1883 1884
	  emit_cmp_and_jump_insns (index,
				   convert_modes
				   (mode, imode,
1885
				    expand_normal (node->low),
1886
				    unsignedp),
1887
				   GE, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1888 1889
				   label_rtx (node->code_label),
                                   probability);
Richard Kenner committed
1890 1891

	  /* Handle the left-hand subtree.  */
Easwaran Raman committed
1892
	  emit_case_nodes (index, node->left, default_label, default_prob, index_type);
Richard Kenner committed
1893 1894 1895 1896 1897 1898 1899

	  /* If right node had to be handled later, do that now.  */

	  if (test_label)
	    {
	      /* If the left-hand subtree fell through,
		 don't let it fall into the right-hand subtree.  */
1900 1901
	      if (default_label)
		emit_jump (default_label);
Richard Kenner committed
1902 1903

	      expand_label (test_label);
Easwaran Raman committed
1904
	      emit_case_nodes (index, node->right, default_label, default_prob, index_type);
Richard Kenner committed
1905 1906 1907 1908 1909 1910 1911 1912 1913
	    }
	}

      else if (node->right != 0 && node->left == 0)
	{
	  /* Deal with values to the left of this node,
	     if they are possible.  */
	  if (!node_has_low_bound (node, index_type))
	    {
Easwaran Raman committed
1914 1915 1916
              probability = conditional_probability (
                  default_prob/2,
                  subtree_prob + default_prob);
Kazu Hirata committed
1917
	      emit_cmp_and_jump_insns (index,
1918 1919
				       convert_modes
				       (mode, imode,
1920
					expand_normal (node->low),
1921
					unsignedp),
1922
				       LT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1923 1924 1925
				       default_label,
                                       probability);
              default_prob /= 2;
Richard Kenner committed
1926 1927 1928 1929
	    }

	  /* Value belongs to this node or to the right-hand subtree.  */

Easwaran Raman committed
1930 1931 1932
          probability = conditional_probability (
              prob,
              subtree_prob + default_prob);
1933 1934 1935
	  emit_cmp_and_jump_insns (index,
				   convert_modes
				   (mode, imode,
1936
				    expand_normal (node->high),
1937
				    unsignedp),
1938
				   LE, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1939 1940
				   label_rtx (node->code_label),
                                   probability);
Richard Kenner committed
1941

Easwaran Raman committed
1942
	  emit_case_nodes (index, node->right, default_label, default_prob, index_type);
Richard Kenner committed
1943 1944 1945 1946 1947 1948 1949 1950
	}

      else if (node->right == 0 && node->left != 0)
	{
	  /* Deal with values to the right of this node,
	     if they are possible.  */
	  if (!node_has_high_bound (node, index_type))
	    {
Easwaran Raman committed
1951 1952 1953
              probability = conditional_probability (
                  default_prob/2,
                  subtree_prob + default_prob);
Kazu Hirata committed
1954
	      emit_cmp_and_jump_insns (index,
1955 1956
				       convert_modes
				       (mode, imode,
1957
					expand_normal (node->high),
1958
					unsignedp),
1959
				       GT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1960 1961 1962
				       default_label,
                                       probability);
              default_prob /= 2;
Richard Kenner committed
1963 1964 1965 1966
	    }

	  /* Value belongs to this node or to the left-hand subtree.  */

Easwaran Raman committed
1967 1968 1969
          probability = conditional_probability (
              prob,
              subtree_prob + default_prob);
Kazu Hirata committed
1970
	  emit_cmp_and_jump_insns (index,
1971 1972
				   convert_modes
				   (mode, imode,
1973
				    expand_normal (node->low),
1974
				    unsignedp),
1975
				   GE, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
1976 1977
				   label_rtx (node->code_label),
                                   probability);
Richard Kenner committed
1978

Easwaran Raman committed
1979
	  emit_case_nodes (index, node->left, default_label, default_prob, index_type);
Richard Kenner committed
1980 1981 1982 1983 1984 1985 1986
	}

      else
	{
	  /* Node has no children so we check low and high bounds to remove
	     redundant tests.  Only one of the bounds can exist,
	     since otherwise this node is bounded--a case tested already.  */
1987 1988
	  int high_bound = node_has_high_bound (node, index_type);
	  int low_bound = node_has_low_bound (node, index_type);
Richard Kenner committed
1989

1990
	  if (!high_bound && low_bound)
Richard Kenner committed
1991
	    {
Easwaran Raman committed
1992 1993 1994
              probability = conditional_probability (
                  default_prob,
                  subtree_prob + default_prob);
Kazu Hirata committed
1995
	      emit_cmp_and_jump_insns (index,
1996 1997
				       convert_modes
				       (mode, imode,
1998
					expand_normal (node->high),
1999
					unsignedp),
2000
				       GT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
2001 2002
				       default_label,
                                       probability);
Richard Kenner committed
2003 2004
	    }

2005
	  else if (!low_bound && high_bound)
Richard Kenner committed
2006
	    {
Easwaran Raman committed
2007 2008 2009
              probability = conditional_probability (
                  default_prob,
                  subtree_prob + default_prob);
Kazu Hirata committed
2010
	      emit_cmp_and_jump_insns (index,
2011 2012
				       convert_modes
				       (mode, imode,
2013
					expand_normal (node->low),
2014
					unsignedp),
2015
				       LT, NULL_RTX, mode, unsignedp,
Easwaran Raman committed
2016 2017
				       default_label,
                                       probability);
Richard Kenner committed
2018
	    }
2019 2020
	  else if (!low_bound && !high_bound)
	    {
2021
	      /* Widen LOW and HIGH to the same width as INDEX.  */
2022
	      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
2023 2024
	      tree low = build1 (CONVERT_EXPR, type, node->low);
	      tree high = build1 (CONVERT_EXPR, type, node->high);
2025
	      rtx low_rtx, new_index, new_bound;
2026 2027 2028

	      /* Instead of doing two branches, emit one unsigned branch for
		 (index-low) > (high-low).  */
2029
	      low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
2030 2031 2032
	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
					       NULL_RTX, unsignedp,
					       OPTAB_WIDEN);
2033 2034
	      new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
						    high, low),
2035
				       NULL_RTX, mode, EXPAND_NORMAL);
Kazu Hirata committed
2036

Easwaran Raman committed
2037 2038 2039
              probability = conditional_probability (
                  default_prob,
                  subtree_prob + default_prob);
2040
	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
Easwaran Raman committed
2041
				       mode, 1, default_label, probability);
2042
	    }
Richard Kenner committed
2043 2044 2045 2046 2047

	  emit_jump (label_rtx (node->code_label));
	}
    }
}