dojump.c 36.2 KB
Newer Older
1
/* Convert tree expression to rtl instructions, for GNU compiler.
2
   Copyright (C) 1988-2016 Free Software Foundation, Inc.
3 4 5 6 7

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9 10 11 12 13 14 15 16
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
19 20 21 22

#include "config.h"
#include "system.h"
#include "coretypes.h"
23
#include "backend.h"
24
#include "target.h"
25 26
#include "rtl.h"
#include "tree.h"
27 28 29 30
#include "predict.h"
#include "tm_p.h"
#include "optabs.h"
#include "emit-rtl.h"
31
#include "fold-const.h"
32
#include "stor-layout.h"
33
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
34 35
#include "dojump.h"
#include "explow.h"
36 37 38
#include "expr.h"
#include "langhooks.h"

39
static bool prefer_and_bit_test (machine_mode, int);
40 41 42 43 44 45
static void do_jump_by_parts_greater (tree, tree, int,
				      rtx_code_label *, rtx_code_label *, int);
static void do_jump_by_parts_equality (tree, tree, rtx_code_label *,
				       rtx_code_label *, int);
static void do_compare_and_jump	(tree, tree, enum rtx_code, enum rtx_code,
				 rtx_code_label *, rtx_code_label *, int);
46 47 48 49 50 51 52 53

/* Invert probability if there is any.  -1 stands for unknown.  */

static inline int
inv (int prob)
{
  return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
}
54 55 56 57 58

/* At the start of a function, record that we have no previously-pushed
   arguments waiting to be popped.  */

void
59
init_pending_stack_adjust (void)
60 61 62 63
{
  pending_stack_adjust = 0;
}

64 65 66
/* Discard any pending stack adjustment.  This avoid relying on the
   RTL optimizers to remove useless adjustments when we know the
   stack pointer value is dead.  */
67 68
void
discard_pending_stack_adjust (void)
69 70 71 72 73
{
  stack_pointer_delta -= pending_stack_adjust;
  pending_stack_adjust = 0;
}

74 75 76 77 78 79 80
/* When exiting from function, if safe, clear out any pending stack adjust
   so the adjustment won't get done.

   Note, if the current function calls alloca, then it must have a
   frame pointer regardless of the value of flag_omit_frame_pointer.  */

void
81
clear_pending_stack_adjust (void)
82 83
{
  if (optimize > 0
84
      && (! flag_omit_frame_pointer || cfun->calls_alloca)
85
      && EXIT_IGNORE_STACK)
86
    discard_pending_stack_adjust ();
87 88 89 90 91
}

/* Pop any previously-pushed arguments that have not been popped yet.  */

void
92
do_pending_stack_adjust (void)
93 94 95 96 97 98 99 100
{
  if (inhibit_defer_pop == 0)
    {
      if (pending_stack_adjust != 0)
        adjust_stack (GEN_INT (pending_stack_adjust));
      pending_stack_adjust = 0;
    }
}
101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123

/* Remember pending_stack_adjust/stack_pointer_delta.
   To be used around code that may call do_pending_stack_adjust (),
   but the generated code could be discarded e.g. using delete_insns_since.  */

void
save_pending_stack_adjust (saved_pending_stack_adjust *save)
{
  save->x_pending_stack_adjust = pending_stack_adjust;
  save->x_stack_pointer_delta = stack_pointer_delta;
}

/* Restore the saved pending_stack_adjust/stack_pointer_delta.  */

void
restore_pending_stack_adjust (saved_pending_stack_adjust *save)
{
  if (inhibit_defer_pop == 0)
    {
      pending_stack_adjust = save->x_pending_stack_adjust;
      stack_pointer_delta = save->x_stack_pointer_delta;
    }
}
124 125 126

/* Expand conditional expressions.  */

127
/* Generate code to evaluate EXP and jump to LABEL if the value is zero.  */
128 129

void
130
jumpifnot (tree exp, rtx_code_label *label, int prob)
131
{
132
  do_jump (exp, label, NULL, inv (prob));
133 134
}

135
void
136 137
jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
	     int prob)
138
{
139
  do_jump_1 (code, op0, op1, label, NULL, inv (prob));
140 141
}

142 143 144
/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */

void
145
jumpif (tree exp, rtx_code_label *label, int prob)
146
{
147
  do_jump (exp, NULL, label, prob);
148 149
}

150
void
151 152
jumpif_1 (enum tree_code code, tree op0, tree op1,
	  rtx_code_label *label, int prob)
153
{
154
  do_jump_1 (code, op0, op1, NULL, label, prob);
155 156
}

157 158 159 160 161 162 163 164 165 166 167
/* Used internally by prefer_and_bit_test.  */

static GTY(()) rtx and_reg;
static GTY(()) rtx and_test;
static GTY(()) rtx shift_test;

/* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
   where X is an arbitrary register of mode MODE.  Return true if the former
   is preferred.  */

static bool
168
prefer_and_bit_test (machine_mode mode, int bitnum)
169
{
170
  bool speed_p;
Kenneth Zadeck committed
171
  wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
172

173 174 175 176
  if (and_test == 0)
    {
      /* Set up rtxes for the two variations.  Use NULL as a placeholder
	 for the BITNUM-based constants.  */
177
      and_reg = gen_rtx_REG (mode, LAST_VIRTUAL_REGISTER + 1);
178 179 180 181 182 183 184 185 186 187 188 189 190 191
      and_test = gen_rtx_AND (mode, and_reg, NULL);
      shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
				const1_rtx);
    }
  else
    {
      /* Change the mode of the previously-created rtxes.  */
      PUT_MODE (and_reg, mode);
      PUT_MODE (and_test, mode);
      PUT_MODE (shift_test, mode);
      PUT_MODE (XEXP (shift_test, 0), mode);
    }

  /* Fill in the integers.  */
Kenneth Zadeck committed
192
  XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
193 194
  XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);

195
  speed_p = optimize_insn_for_speed_p ();
196 197
  return (rtx_cost (and_test, mode, IF_THEN_ELSE, 0, speed_p)
	  <= rtx_cost (shift_test, mode, IF_THEN_ELSE, 0, speed_p));
198 199
}

200
/* Subroutine of do_jump, dealing with exploded comparisons of the type
201 202
   OP0 CODE OP1 .  IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
   PROB is probability of jump to if_true_label, or -1 if unknown.  */
203 204 205

void
do_jump_1 (enum tree_code code, tree op0, tree op1,
206 207
	   rtx_code_label *if_false_label, rtx_code_label *if_true_label,
	   int prob)
208
{
209
  machine_mode mode;
210
  rtx_code_label *drop_through_label = 0;
211 212 213 214 215 216 217 218 219 220 221 222 223

  switch (code)
    {
    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (op0);

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_FLOAT);
	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_INT);

        if (integer_zerop (op1))
224
	  do_jump (op0, if_true_label, if_false_label, inv (prob));
225 226
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
227 228
	  do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
				     prob);
229
        else
230 231
	  do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
			       prob);
232 233 234 235 236 237 238 239 240 241 242 243 244
        break;
      }

    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (op0);

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_FLOAT);
	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_INT);

        if (integer_zerop (op1))
245
	  do_jump (op0, if_false_label, if_true_label, prob);
246 247
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
248 249
	  do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
				     inv (prob));
250
        else
251 252
	  do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
			       prob);
253 254 255 256 257 258 259
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
260 261
	do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
				  prob);
262
      else
263 264
	do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
			     prob);
265 266 267 268 269 270
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
271 272
	do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
				  inv (prob));
273
      else
274 275
	do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
			     prob);
276 277 278 279 280 281
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
282 283
	do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
				  prob);
284
      else
285 286
	do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
			     prob);
287 288 289 290 291 292
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
293 294
	do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
				  inv (prob));
295
      else
296 297
	do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
			     prob);
298 299 300 301
      break;

    case ORDERED_EXPR:
      do_compare_and_jump (op0, op1, ORDERED, ORDERED,
302
			   if_false_label, if_true_label, prob);
303 304 305 306
      break;

    case UNORDERED_EXPR:
      do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
307
			   if_false_label, if_true_label, prob);
308 309 310
      break;

    case UNLT_EXPR:
311 312
      do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
			   prob);
313 314 315
      break;

    case UNLE_EXPR:
316 317
      do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
			   prob);
318 319 320
      break;

    case UNGT_EXPR:
321 322
      do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
			   prob);
323 324 325
      break;

    case UNGE_EXPR:
326 327
      do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
			   prob);
328 329 330
      break;

    case UNEQ_EXPR:
331 332
      do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
			   prob);
333 334 335
      break;

    case LTGT_EXPR:
336 337
      do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
			   prob);
338 339 340
      break;

    case TRUTH_ANDIF_EXPR:
341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359
      {
        /* Spread the probability that the expression is false evenly between
           the two conditions. So the first condition is false half the total
           probability of being false. The second condition is false the other
           half of the total probability of being false, so its jump has a false
           probability of half the total, relative to the probability we
           reached it (i.e. the first condition was true).  */
        int op0_prob = -1;
        int op1_prob = -1;
        if (prob != -1)
          {
            int false_prob = inv (prob);
            int op0_false_prob = false_prob / 2;
            int op1_false_prob = GCOV_COMPUTE_SCALE ((false_prob / 2),
                                                     inv (op0_false_prob));
            /* Get the probability that each jump below is true.  */
            op0_prob = inv (op0_false_prob);
            op1_prob = inv (op1_false_prob);
          }
360
	if (if_false_label == NULL)
361 362
          {
            drop_through_label = gen_label_rtx ();
363 364
	    do_jump (op0, drop_through_label, NULL, op0_prob);
	    do_jump (op1, NULL, if_true_label, op1_prob);
365 366 367
          }
        else
          {
368
	    do_jump (op0, if_false_label, NULL, op0_prob);
369 370 371 372
            do_jump (op1, if_false_label, if_true_label, op1_prob);
          }
        break;
      }
373 374

    case TRUTH_ORIF_EXPR:
375 376 377 378 379 380 381 382 383 384 385 386
      {
        /* Spread the probability evenly between the two conditions. So
           the first condition has half the total probability of being true.
           The second condition has the other half of the total probability,
           so its jump has a probability of half the total, relative to
           the probability we reached it (i.e. the first condition was false).  */
        int op0_prob = -1;
        int op1_prob = -1;
        if (prob != -1)
          {
            op0_prob = prob / 2;
            op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
387 388 389 390 391 392 393 394 395 396 397 398
	  }
	if (if_true_label == NULL)
	  {
	    drop_through_label = gen_label_rtx ();
	    do_jump (op0, NULL, drop_through_label, op0_prob);
	    do_jump (op1, if_false_label, NULL, op1_prob);
	  }
	else
	  {
	    do_jump (op0, NULL, if_true_label, op0_prob);
	    do_jump (op1, if_false_label, if_true_label, op1_prob);
	  }
399 400
        break;
      }
401 402 403 404 405 406 407 408 409 410 411 412

    default:
      gcc_unreachable ();
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}

413 414 415 416 417 418 419
/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
   the result is zero, or IF_TRUE_LABEL if the result is one.
   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
   meaning fall through in that case.

   do_jump always does any pending stack adjust except when it does not
   actually perform a jump.  An example where there is no jump
420 421 422
   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.

   PROB is probability of jump to if_true_label, or -1 if unknown.  */
423 424

void
425 426
do_jump (tree exp, rtx_code_label *if_false_label,
	 rtx_code_label *if_true_label, int prob)
427 428 429 430 431
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
432
  machine_mode mode;
433
  rtx_code_label *drop_through_label = NULL;
434 435 436 437 438 439 440

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
441 442 443 444 445 446 447
      {
	rtx_code_label *lab = integer_zerop (exp) ? if_false_label
						  : if_true_label;
	if (lab)
	  emit_jump (lab);
	break;
      }
448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
476
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
477 478 479
      break;

    case TRUTH_NOT_EXPR:
480 481
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
	       inv (prob));
482 483
      break;

484 485
    case COND_EXPR:
      {
486
	rtx_code_label *label1 = gen_label_rtx ();
487 488 489 490 491 492 493 494 495 496
	if (!if_true_label || !if_false_label)
	  {
	    drop_through_label = gen_label_rtx ();
	    if (!if_true_label)
	      if_true_label = drop_through_label;
	    if (!if_false_label)
	      if_false_label = drop_through_label;
	  }

        do_pending_stack_adjust ();
497
	do_jump (TREE_OPERAND (exp, 0), label1, NULL, -1);
498
	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
499
        emit_label (label1);
500
	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
501 502 503
	break;
      }

504
    case COMPOUND_EXPR:
Paolo Bonzini committed
505
      /* Lowered by gimplify.c.  */
506
      gcc_unreachable ();
507

508 509
    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
510 511
      code = NE_EXPR;

512
      /* FALLTHRU */
513
    case EQ_EXPR:
514 515 516 517 518 519
    case NE_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case ORDERED_EXPR:
520 521 522 523 524 525 526
    case UNORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
    case LTGT_EXPR:
527 528
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
529
    other_code:
530
      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
531
		 if_false_label, if_true_label, prob);
532
      break;
533 534 535 536 537 538 539 540

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
541
	  rtx_code_label *set_label, *clr_label;
542
	  int setclr_prob = prob;
543 544 545 546 547 548 549 550 551 552 553 554 555 556 557

	  /* Strip narrowing integral type conversions.  */
	  while (CONVERT_EXPR_P (exp0)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
558
	      setclr_prob = inv (prob);
559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
577 578
		  unsigned HOST_WIDE_INT mask
		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
579
		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
580
				   build_int_cstu (argtype, mask)),
581
			   clr_label, set_label, setclr_prob);
582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
601
          && have_insn_for (COMPARE, TYPE_MODE (type)))
602
        {
603 604
	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		   prob);
605 606 607 608 609 610 611 612
          break;
        }

      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
	goto normal;

      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */
613

614
    case TRUTH_AND_EXPR:
615 616 617
      /* High branch cost, expand as the bitwise AND of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
618 619 620
      if (BRANCH_COST (optimize_insn_for_speed_p (),
		       false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
621
	goto normal;
622 623
      code = TRUTH_ANDIF_EXPR;
      goto other_code;
624

625
    case BIT_IOR_EXPR:
626
    case TRUTH_OR_EXPR:
627 628 629
      /* High branch cost, expand as the bitwise OR of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
630
      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
631
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
632
	goto normal;
633 634
      code = TRUTH_ORIF_EXPR;
      goto other_code;
635

636
      /* Fall through and generate the normal code.  */
637 638
    default:
    normal:
639
      temp = expand_normal (exp);
640
      do_pending_stack_adjust ();
641 642
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
643
	{
644 645 646 647 648 649
	  /* Compare promoted variables in their promoted mode.  */
	  if (SUBREG_PROMOTED_VAR_P (temp)
	      && REG_P (XEXP (temp, 0)))
	    temp = XEXP (temp, 0);
	  else
	    temp = copy_to_reg (temp);
650
	}
651 652 653
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
			       GET_MODE (temp), NULL_RTX,
654
			       if_false_label, if_true_label, prob);
655
    }
656 657 658 659 660 661

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
662 663 664 665 666 667
}

/* Compare OP0 with OP1, word at a time, in mode MODE.
   UNSIGNEDP says to do unsigned comparison.
   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */

668
static void
669
do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
670 671
			      rtx op1, rtx_code_label *if_false_label,
			      rtx_code_label *if_true_label,
672
			      int prob)
673 674
{
  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
675
  rtx_code_label *drop_through_label = 0;
676 677
  bool drop_through_if_true = false, drop_through_if_false = false;
  enum rtx_code code = GT;
678 679 680 681 682
  int i;

  if (! if_true_label || ! if_false_label)
    drop_through_label = gen_label_rtx ();
  if (! if_true_label)
683 684 685 686
    {
      if_true_label = drop_through_label;
      drop_through_if_true = true;
    }
687
  if (! if_false_label)
688 689 690 691 692 693 694 695 696 697 698 699 700 701 702
    {
      if_false_label = drop_through_label;
      drop_through_if_false = true;
    }

  /* Deal with the special case 0 > x: only one comparison is necessary and
     we reverse it to avoid jumping to the drop-through label.  */
  if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
    {
      code = LE;
      if_true_label = if_false_label;
      if_false_label = drop_through_label;
      drop_through_if_true = false;
      drop_through_if_false = true;
    }
703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720

  /* Compare a word at a time, high order first.  */
  for (i = 0; i < nwords; i++)
    {
      rtx op0_word, op1_word;

      if (WORDS_BIG_ENDIAN)
        {
          op0_word = operand_subword_force (op0, i, mode);
          op1_word = operand_subword_force (op1, i, mode);
        }
      else
        {
          op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
          op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
        }

      /* All but high-order word must be compared as unsigned.  */
721
      do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
722
			       word_mode, NULL_RTX, NULL, if_true_label,
723 724 725 726 727
			       prob);

      /* Emit only one comparison for 0.  Do not emit the last cond jump.  */
      if (op0 == const0_rtx || i == nwords - 1)
	break;
728 729 730

      /* Consider lower words only if these are equal.  */
      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
731
			       NULL_RTX, NULL, if_false_label, inv (prob));
732 733
    }

734
  if (!drop_through_if_false)
735 736 737 738
    emit_jump (if_false_label);
  if (drop_through_label)
    emit_label (drop_through_label);
}
739 740 741 742 743 744 745

/* Given a comparison expression EXP for values too wide to be compared
   with one insn, test the comparison and jump to the appropriate label.
   The code of EXP is ignored; we always test GT if SWAP is 0,
   and LT if SWAP is 1.  */

static void
746
do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
747 748
			  rtx_code_label *if_false_label,
			  rtx_code_label *if_true_label, int prob)
749
{
750 751
  rtx op0 = expand_normal (swap ? treeop1 : treeop0);
  rtx op1 = expand_normal (swap ? treeop0 : treeop1);
752
  machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
753
  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
754 755

  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
756
				if_true_label, prob);
757
}
758

759 760
/* Jump according to whether OP0 is 0.  We assume that OP0 has an integer
   mode, MODE, that is too wide for the available compare insns.  Either
761
   Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
762
   to indicate drop through.  */
763

764
static void
765
do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
766 767
			   rtx_code_label *if_false_label,
			   rtx_code_label *if_true_label, int prob)
768
{
769
  int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
770 771
  rtx part;
  int i;
772
  rtx_code_label *drop_through_label = NULL;
773 774 775 776 777 778 779

  /* The fastest way of doing this comparison on almost any machine is to
     "or" all the words and compare the result.  If all have to be loaded
     from memory and this is a very wide item, it's possible this may
     be slower, but that's highly unlikely.  */

  part = gen_reg_rtx (word_mode);
780
  emit_move_insn (part, operand_subword_force (op0, 0, mode));
781 782
  for (i = 1; i < nwords && part != 0; i++)
    part = expand_binop (word_mode, ior_optab, part,
783
                         operand_subword_force (op0, i, mode),
784 785 786 787 788
                         part, 1, OPTAB_WIDEN);

  if (part != 0)
    {
      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
789
			       NULL_RTX, if_false_label, if_true_label, prob);
790 791 792 793 794
      return;
    }

  /* If we couldn't do the "or" simply, do this with a series of compares.  */
  if (! if_false_label)
795
    if_false_label = drop_through_label = gen_label_rtx ();
796 797

  for (i = 0; i < nwords; i++)
798
    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
799
                             const0_rtx, EQ, 1, word_mode, NULL_RTX,
800
			     if_false_label, NULL, prob);
801 802 803 804 805 806 807

  if (if_true_label)
    emit_jump (if_true_label);

  if (drop_through_label)
    emit_label (drop_through_label);
}
808 809 810 811 812 813 814

/* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
   where MODE is an integer mode too wide to be compared with one insn.
   Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
   to indicate drop through.  */

static void
815
do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
816 817
			       rtx_code_label *if_false_label,
			       rtx_code_label *if_true_label, int prob)
818 819
{
  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
820
  rtx_code_label *drop_through_label = NULL;
821 822 823 824
  int i;

  if (op1 == const0_rtx)
    {
825 826
      do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
				 prob);
827 828 829 830
      return;
    }
  else if (op0 == const0_rtx)
    {
831 832
      do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
				 prob);
833 834 835 836 837 838 839 840 841 842
      return;
    }

  if (! if_false_label)
    drop_through_label = if_false_label = gen_label_rtx ();

  for (i = 0; i < nwords; i++)
    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
                             operand_subword_force (op1, i, mode),
                             EQ, 0, word_mode, NULL_RTX,
843
			     if_false_label, NULL, prob);
844 845 846 847 848 849 850 851 852 853 854

  if (if_true_label)
    emit_jump (if_true_label);
  if (drop_through_label)
    emit_label (drop_through_label);
}

/* Given an EQ_EXPR expression EXP for values too wide to be compared
   with one insn, test the comparison and jump to the appropriate label.  */

static void
855 856 857
do_jump_by_parts_equality (tree treeop0, tree treeop1,
			   rtx_code_label *if_false_label,
			   rtx_code_label *if_true_label, int prob)
858
{
859 860
  rtx op0 = expand_normal (treeop0);
  rtx op1 = expand_normal (treeop1);
861
  machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
862
  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
863
				 if_true_label, prob);
864
}
865

866 867 868 869 870 871 872 873 874
/* Split a comparison into two others, the second of which has the other
   "orderedness".  The first is always ORDERED or UNORDERED if MODE
   does not honor NaNs (which means that it can be skipped in that case;
   see do_compare_rtx_and_jump).

   The two conditions are written in *CODE1 and *CODE2.  Return true if
   the conditions must be ANDed, false if they must be ORed.  */

bool
875
split_comparison (enum rtx_code code, machine_mode mode,
876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943
		  enum rtx_code *code1, enum rtx_code *code2)
{
  switch (code)
    {
    case LT:
      *code1 = ORDERED;
      *code2 = UNLT;
      return true;
    case LE:
      *code1 = ORDERED;
      *code2 = UNLE;
      return true;
    case GT:
      *code1 = ORDERED;
      *code2 = UNGT;
      return true;
    case GE:
      *code1 = ORDERED;
      *code2 = UNGE;
      return true;
    case EQ:
      *code1 = ORDERED;
      *code2 = UNEQ;
      return true;
    case NE:
      *code1 = UNORDERED;
      *code2 = LTGT;
      return false;
    case UNLT:
      *code1 = UNORDERED;
      *code2 = LT;
      return false;
    case UNLE:
      *code1 = UNORDERED;
      *code2 = LE;
      return false;
    case UNGT:
      *code1 = UNORDERED;
      *code2 = GT;
      return false;
    case UNGE:
      *code1 = UNORDERED;
      *code2 = GE;
      return false;
    case UNEQ:
      *code1 = UNORDERED;
      *code2 = EQ;
      return false;
    case LTGT:
      /* Do not turn a trapping comparison into a non-trapping one.  */
      if (HONOR_SNANS (mode))
	{
          *code1 = LT;
          *code2 = GT;
          return false;
	}
      else
	{
	  *code1 = ORDERED;
	  *code2 = NE;
	  return true;
	}
    default:
      gcc_unreachable ();
    }
}


944 945 946 947 948 949 950
/* Like do_compare_and_jump but expects the values to compare as two rtx's.
   The decision as to signed or unsigned comparison must be made by the caller.

   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
   compared.  */

void
951
do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
952 953 954
			 machine_mode mode, rtx size,
			 rtx_code_label *if_false_label,
			 rtx_code_label *if_true_label, int prob)
955 956
{
  rtx tem;
957
  rtx_code_label *dummy_label = NULL;
958 959

  /* Reverse the comparison if that is safe and we want to jump if it is
960 961 962 963 964 965 966 967
     false.  Also convert to the reverse comparison if the target can
     implement it.  */
  if ((! if_true_label
       || ! can_compare_p (code, mode, ccp_jump))
      && (! FLOAT_MODE_P (mode)
	  || code == ORDERED || code == UNORDERED
	  || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
	  || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
968
    {
969 970 971 972 973 974 975 976 977 978
      enum rtx_code rcode;
      if (FLOAT_MODE_P (mode))
        rcode = reverse_condition_maybe_unordered (code);
      else
        rcode = reverse_condition (code);

      /* Canonicalize to UNORDERED for the libcall.  */
      if (can_compare_p (rcode, mode, ccp_jump)
	  || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
	{
979
	  std::swap (if_true_label, if_false_label);
980
	  code = rcode;
981
	  prob = inv (prob);
982
	}
983 984 985 986 987 988 989
    }

  /* If one operand is constant, make it the second one.  Only do this
     if the other operand is not constant as well.  */

  if (swap_commutative_operands_p (op0, op1))
    {
990
      std::swap (op0, op1);
991 992 993 994 995
      code = swap_condition (code);
    }

  do_pending_stack_adjust ();

996 997 998
  code = unsignedp ? unsigned_condition (code) : code;
  if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
						 op0, op1)))
999
    {
1000 1001
      if (CONSTANT_P (tem))
	{
1002 1003 1004
	  rtx_code_label *label = (tem == const0_rtx
				   || tem == CONST0_RTX (mode))
					? if_false_label : if_true_label;
1005 1006 1007 1008
	  if (label)
	    emit_jump (label);
	  return;
	}
1009

1010 1011 1012 1013 1014
      code = GET_CODE (tem);
      mode = GET_MODE (tem);
      op0 = XEXP (tem, 0);
      op1 = XEXP (tem, 1);
      unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1015 1016 1017
    }

  if (! if_true_label)
1018
    dummy_label = if_true_label = gen_label_rtx ();
1019

1020 1021 1022 1023 1024 1025 1026
  if (GET_MODE_CLASS (mode) == MODE_INT
      && ! can_compare_p (code, mode, ccp_jump))
    {
      switch (code)
	{
	case LTU:
	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1027
					if_false_label, if_true_label, prob);
1028 1029 1030 1031
	  break;

	case LEU:
	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1032 1033
					if_true_label, if_false_label,
					inv (prob));
1034 1035
	  break;

1036 1037
	case GTU:
	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1038
					if_false_label, if_true_label, prob);
1039 1040 1041 1042
	  break;

	case GEU:
	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1043 1044
					if_true_label, if_false_label,
					inv (prob));
1045 1046
	  break;

1047 1048
	case LT:
	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1049
					if_false_label, if_true_label, prob);
1050 1051
	  break;

1052 1053
	case LE:
	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1054 1055
					if_true_label, if_false_label,
					inv (prob));
1056 1057
	  break;

1058 1059
	case GT:
	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1060
					if_false_label, if_true_label, prob);
1061 1062 1063 1064
	  break;

	case GE:
	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1065 1066
					if_true_label, if_false_label,
					inv (prob));
1067 1068 1069 1070
	  break;

	case EQ:
	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1071
					 if_true_label, prob);
1072 1073 1074 1075
	  break;

	case NE:
	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1076
					 if_false_label, inv (prob));
1077 1078 1079 1080 1081 1082 1083
	  break;

	default:
	  gcc_unreachable ();
	}
    }
  else
1084
    {
1085
      if (SCALAR_FLOAT_MODE_P (mode)
1086
	  && ! can_compare_p (code, mode, ccp_jump)
1087 1088 1089
	  && can_compare_p (swap_condition (code), mode, ccp_jump))
	{
	  code = swap_condition (code);
1090
	  std::swap (op0, op1);
1091
	}
1092
      else if (SCALAR_FLOAT_MODE_P (mode)
1093
	       && ! can_compare_p (code, mode, ccp_jump)
1094 1095
	       /* Never split ORDERED and UNORDERED.
		  These must be implemented.  */
1096
	       && (code != ORDERED && code != UNORDERED)
1097 1098
               /* Split a floating-point comparison if
		  we can jump on other conditions...  */
1099 1100
	       && (have_insn_for (COMPARE, mode)
	           /* ... or if there is no libcall for it.  */
1101
	           || code_to_optab (code) == unknown_optab))
1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112
        {
	  enum rtx_code first_code;
	  bool and_them = split_comparison (code, mode, &first_code, &code);

	  /* If there are no NaNs, the first comparison should always fall
	     through.  */
	  if (!HONOR_NANS (mode))
	    gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));

	  else
	    {
1113 1114 1115 1116 1117
	      int first_prob = prob;
	      if (first_code == UNORDERED)
		first_prob = REG_BR_PROB_BASE / 100;
	      else if (first_code == ORDERED)
		first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100;
1118 1119
	      if (and_them)
		{
1120
		  rtx_code_label *dest_label;
1121 1122 1123 1124 1125 1126 1127 1128 1129 1130
		  /* If we only jump if true, just bypass the second jump.  */
		  if (! if_false_label)
		    {
		      if (! dummy_label)
		        dummy_label = gen_label_rtx ();
		      dest_label = dummy_label;
		    }
		  else
		    dest_label = if_false_label;
                  do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1131
					   size, dest_label, NULL, first_prob);
1132 1133 1134
		}
              else
                do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1135
					 size, NULL, if_true_label, first_prob);
1136 1137 1138 1139
	    }
	}

      emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
Easwaran Raman committed
1140
			       if_true_label, prob);
1141
    }
1142 1143 1144

  if (if_false_label)
    emit_jump (if_false_label);
1145 1146
  if (dummy_label)
    emit_label (dummy_label);
1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159
}

/* Generate code for a comparison expression EXP (including code to compute
   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
   generated code will drop through.
   SIGNED_CODE should be the rtx operation for this comparison for
   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.

   We force a stack adjustment unless there are currently
   things pushed on the stack that aren't yet used.  */

static void
1160
do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1161 1162 1163
		     enum rtx_code unsigned_code,
		     rtx_code_label *if_false_label,
		     rtx_code_label *if_true_label, int prob)
1164 1165 1166
{
  rtx op0, op1;
  tree type;
1167
  machine_mode mode;
1168 1169 1170 1171
  int unsignedp;
  enum rtx_code code;

  /* Don't crash if the comparison was erroneous.  */
1172 1173
  op0 = expand_normal (treeop0);
  if (TREE_CODE (treeop0) == ERROR_MARK)
1174 1175
    return;

1176 1177
  op1 = expand_normal (treeop1);
  if (TREE_CODE (treeop1) == ERROR_MARK)
1178 1179
    return;

1180
  type = TREE_TYPE (treeop0);
1181
  mode = TYPE_MODE (type);
1182 1183
  if (TREE_CODE (treeop0) == INTEGER_CST
      && (TREE_CODE (treeop1) != INTEGER_CST
1184
          || (GET_MODE_BITSIZE (mode)
1185
              > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1186 1187 1188
    {
      /* op0 might have been replaced by promoted constant, in which
         case the type of second argument should be used.  */
1189
      type = TREE_TYPE (treeop1);
1190 1191
      mode = TYPE_MODE (type);
    }
1192
  unsignedp = TYPE_UNSIGNED (type);
1193 1194 1195
  code = unsignedp ? unsigned_code : signed_code;

  /* If function pointers need to be "canonicalized" before they can
1196 1197 1198
     be reliably compared, then canonicalize them.
     Only do this if *both* sides of the comparison are function pointers.
     If one side isn't, we want a noncanonicalized comparison.  See PR
1199
     middle-end/17564.  */
1200
  if (targetm.have_canonicalize_funcptr_for_compare ()
1201 1202 1203 1204
      && POINTER_TYPE_P (TREE_TYPE (treeop0))
      && POINTER_TYPE_P (TREE_TYPE (treeop1))
      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))
      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop1))))
1205 1206
    {
      rtx new_op0 = gen_reg_rtx (mode);
1207
      rtx new_op1 = gen_reg_rtx (mode);
1208

1209
      emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op0, op0));
1210 1211
      op0 = new_op0;

1212
      emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op1, op1));
1213 1214 1215 1216 1217
      op1 = new_op1;
    }

  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
                           ((mode == BLKmode)
1218
                            ? expr_size (treeop0) : NULL_RTX),
1219
			   if_false_label, if_true_label, prob);
1220
}
1221 1222

#include "gt-dojump.h"