ifcvt.c 92.5 KB
Newer Older
Richard Henderson committed
1
/* If-conversion support.
2 3
   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
   Free Software Foundation, Inc.
Richard Henderson committed
4

5
   This file is part of GCC.
Richard Henderson committed
6

7 8
   GCC is free software; you can redistribute it and/or modify it
   under the terms of the GNU General Public License as published by
Richard Henderson committed
9 10 11
   the Free Software Foundation; either version 2, or (at your option)
   any later version.

12 13 14 15
   GCC is distributed in the hope that it will be useful, but WITHOUT
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
   License for more details.
Richard Henderson committed
16 17

   You should have received a copy of the GNU General Public License
18 19 20
   along with GCC; see the file COPYING.  If not, write to the Free
   Software Foundation, 59 Temple Place - Suite 330, Boston, MA
   02111-1307, USA.  */
Richard Henderson committed
21 22 23

#include "config.h"
#include "system.h"
24 25
#include "coretypes.h"
#include "tm.h"
Richard Henderson committed
26 27 28 29 30 31 32

#include "rtl.h"
#include "regs.h"
#include "function.h"
#include "flags.h"
#include "insn-config.h"
#include "recog.h"
33
#include "except.h"
34
#include "hard-reg-set.h"
Richard Henderson committed
35 36
#include "basic-block.h"
#include "expr.h"
37
#include "real.h"
Richard Henderson committed
38
#include "output.h"
39
#include "optabs.h"
40
#include "toplev.h"
Richard Henderson committed
41
#include "tm_p.h"
42
#include "cfgloop.h"
43
#include "target.h"
Richard Henderson committed
44 45 46 47 48 49 50 51 52 53 54 55 56 57


#ifndef HAVE_conditional_execution
#define HAVE_conditional_execution 0
#endif
#ifndef HAVE_conditional_move
#define HAVE_conditional_move 0
#endif
#ifndef HAVE_incscc
#define HAVE_incscc 0
#endif
#ifndef HAVE_decscc
#define HAVE_decscc 0
#endif
58 59 60 61 62 63
#ifndef HAVE_trap
#define HAVE_trap 0
#endif
#ifndef HAVE_conditional_trap
#define HAVE_conditional_trap 0
#endif
Richard Henderson committed
64 65 66 67 68

#ifndef MAX_CONDITIONAL_EXECUTE
#define MAX_CONDITIONAL_EXECUTE   (BRANCH_COST + 1)
#endif

69
#define NULL_BLOCK	((basic_block) NULL)
Richard Henderson committed
70 71 72 73 74 75 76 77

/* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
static int num_possible_if_blocks;

/* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
   execution.  */
static int num_updated_if_blocks;

78 79
/* # of changes made which require life information to be updated.  */
static int num_true_changes;
Richard Henderson committed
80

81 82 83
/* Whether conditional execution changes were made.  */
static int cond_exec_changed_p;

84 85 86
/* True if life data ok at present.  */
static bool life_data_ok;

Richard Henderson committed
87
/* Forward references.  */
88
static int count_bb_insns (basic_block);
89
static bool cheap_bb_rtx_cost_p (basic_block, int);
90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
static rtx first_active_insn (basic_block);
static rtx last_active_insn (basic_block, int);
static basic_block block_fallthru (basic_block);
static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
static rtx cond_exec_get_condition (rtx);
static int cond_exec_process_if_block (ce_if_block_t *, int);
static rtx noce_get_condition (rtx, rtx *);
static int noce_operand_ok (rtx);
static int noce_process_if_block (ce_if_block_t *);
static int process_if_block (ce_if_block_t *);
static void merge_if_block (ce_if_block_t *);
static int find_cond_trap (basic_block, edge, edge);
static basic_block find_if_header (basic_block, int);
static int block_jumps_and_fallthru_p (basic_block, basic_block);
static int find_if_block (ce_if_block_t *);
static int find_if_case_1 (basic_block, edge, edge);
static int find_if_case_2 (basic_block, edge, edge);
static int find_memory (rtx *, void *);
static int dead_or_predicable (basic_block, basic_block, basic_block,
			       basic_block, int);
static void noce_emit_move_insn (rtx, rtx);
static rtx block_has_only_trap (basic_block);
Richard Henderson committed
112 113 114 115

/* Count the number of non-jump active insns in BB.  */

static int
116
count_bb_insns (basic_block bb)
Richard Henderson committed
117 118
{
  int count = 0;
119
  rtx insn = BB_HEAD (bb);
Richard Henderson committed
120 121 122

  while (1)
    {
123
      if (CALL_P (insn) || NONJUMP_INSN_P (insn))
Richard Henderson committed
124 125
	count++;

126
      if (insn == BB_END (bb))
Richard Henderson committed
127 128 129 130 131 132 133
	break;
      insn = NEXT_INSN (insn);
    }

  return count;
}

134 135 136
/* Determine whether the total insn_rtx_cost on non-jump insns in
   basic block BB is less than MAX_COST.  This function returns
   false if the cost of any instruction could not be estimated.  */
137

138 139
static bool
cheap_bb_rtx_cost_p (basic_block bb, int max_cost)
140 141 142 143 144 145
{
  int count = 0;
  rtx insn = BB_HEAD (bb);

  while (1)
    {
146 147 148 149
      if (NONJUMP_INSN_P (insn))
	{
	  int cost = insn_rtx_cost (PATTERN (insn));
	  if (cost == 0)
150 151 152 153 154 155 156 157 158 159 160 161 162 163 164
	    return false;

	  /* If this instruction is the load or set of a "stack" register,
	     such as a floating point register on x87, then the cost of
	     speculatively executing this instruction needs to include
	     the additional cost of popping this register off of the
	     register stack.  */
#ifdef STACK_REGS
	  {
	    rtx set = single_set (insn);
	    if (set && STACK_REG_P (SET_DEST (set)))
	      cost += COSTS_N_INSNS (1);
	  }
#endif

165
	  count += cost;
166 167
	  if (count >= max_cost)
	    return false;
168 169
	}
      else if (CALL_P (insn))
170
	return false;
171
 
172 173 174 175 176
      if (insn == BB_END (bb))
	break;
      insn = NEXT_INSN (insn);
    }

177
  return true;
178 179
}

Richard Henderson committed
180 181 182
/* Return the first non-jump active insn in the basic block.  */

static rtx
183
first_active_insn (basic_block bb)
Richard Henderson committed
184
{
185
  rtx insn = BB_HEAD (bb);
Richard Henderson committed
186

187
  if (LABEL_P (insn))
Richard Henderson committed
188
    {
189
      if (insn == BB_END (bb))
Richard Henderson committed
190 191 192 193
	return NULL_RTX;
      insn = NEXT_INSN (insn);
    }

194
  while (NOTE_P (insn))
Richard Henderson committed
195
    {
196
      if (insn == BB_END (bb))
Richard Henderson committed
197 198 199 200
	return NULL_RTX;
      insn = NEXT_INSN (insn);
    }

201
  if (JUMP_P (insn))
Richard Henderson committed
202 203 204 205 206
    return NULL_RTX;

  return insn;
}

207
/* Return the last non-jump active (non-jump) insn in the basic block.  */
Richard Henderson committed
208

209
static rtx
210
last_active_insn (basic_block bb, int skip_use_p)
Richard Henderson committed
211
{
212 213
  rtx insn = BB_END (bb);
  rtx head = BB_HEAD (bb);
214

215 216
  while (NOTE_P (insn)
	 || JUMP_P (insn)
217
	 || (skip_use_p
218
	     && NONJUMP_INSN_P (insn)
219
	     && GET_CODE (PATTERN (insn)) == USE))
Richard Henderson committed
220
    {
221 222 223
      if (insn == head)
	return NULL_RTX;
      insn = PREV_INSN (insn);
Richard Henderson committed
224 225
    }

226
  if (LABEL_P (insn))
227 228 229
    return NULL_RTX;

  return insn;
Richard Henderson committed
230
}
231

232
/* Return the basic block reached by falling though the basic block BB.  */
233 234

static basic_block
235
block_fallthru (basic_block bb)
236 237
{
  edge e;
238
  edge_iterator ei;
239

240 241 242
  FOR_EACH_EDGE (e, ei, bb->succs)
    if (e->flags & EDGE_FALLTHRU)
      break;
243 244 245

  return (e) ? e->dest : NULL_BLOCK;
}
Richard Henderson committed
246 247 248 249 250 251

/* Go through a bunch of insns, converting them to conditional
   execution format if possible.  Return TRUE if all of the non-note
   insns were processed.  */

static int
252 253 254 255 256 257
cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
			 /* if block information */rtx start,
			 /* first insn to look at */rtx end,
			 /* last insn to look at */rtx test,
			 /* conditional execution test */rtx prob_val,
			 /* probability of branch taken. */int mod_ok)
Richard Henderson committed
258 259 260
{
  int must_be_last = FALSE;
  rtx insn;
261
  rtx xtest;
262
  rtx pattern;
Richard Henderson committed
263

264 265 266
  if (!start || !end)
    return FALSE;

Richard Henderson committed
267 268
  for (insn = start; ; insn = NEXT_INSN (insn))
    {
269
      if (NOTE_P (insn))
Richard Henderson committed
270 271
	goto insn_done;

272
      gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
Richard Henderson committed
273

274 275
      /* Remove USE insns that get in the way.  */
      if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
276
	{
277
	  /* ??? Ug.  Actually unlinking the thing is problematic,
278
	     given what we'd have to coordinate with our callers.  */
279
	  SET_INSN_DELETED (insn);
280 281 282
	  goto insn_done;
	}

Richard Henderson committed
283 284 285 286 287 288 289 290 291 292 293 294
      /* Last insn wasn't last?  */
      if (must_be_last)
	return FALSE;

      if (modified_in_p (test, insn))
	{
	  if (!mod_ok)
	    return FALSE;
	  must_be_last = TRUE;
	}

      /* Now build the conditional form of the instruction.  */
295
      pattern = PATTERN (insn);
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310
      xtest = copy_rtx (test);

      /* If this is already a COND_EXEC, rewrite the test to be an AND of the
         two conditions.  */
      if (GET_CODE (pattern) == COND_EXEC)
	{
	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
	    return FALSE;

	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
			       COND_EXEC_TEST (pattern));
	  pattern = COND_EXEC_CODE (pattern);
	}

      pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
311 312 313 314 315

      /* If the machine needs to modify the insn being conditionally executed,
         say for example to force a constant integer operand into a temp
         register, do so here.  */
#ifdef IFCVT_MODIFY_INSN
316
      IFCVT_MODIFY_INSN (ce_info, pattern, insn);
317 318 319 320
      if (! pattern)
	return FALSE;
#endif

321
      validate_change (insn, &PATTERN (insn), pattern, 1);
Richard Henderson committed
322

323
      if (CALL_P (insn) && prob_val)
324 325 326 327
	validate_change (insn, &REG_NOTES (insn),
			 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
					  REG_NOTES (insn)), 1);

Richard Henderson committed
328 329 330 331 332 333 334 335 336 337 338
    insn_done:
      if (insn == end)
	break;
    }

  return TRUE;
}

/* Return the condition for a jump.  Do not do any special processing.  */

static rtx
339
cond_exec_get_condition (rtx jump)
Richard Henderson committed
340 341 342
{
  rtx test_if, cond;

343
  if (any_condjump_p (jump))
344
    test_if = SET_SRC (pc_set (jump));
Richard Henderson committed
345 346 347 348 349 350 351 352
  else
    return NULL_RTX;
  cond = XEXP (test_if, 0);

  /* If this branches to JUMP_LABEL when the condition is false,
     reverse the condition.  */
  if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
      && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
353 354 355 356 357 358 359 360
    {
      enum rtx_code rev = reversed_comparison_code (cond, jump);
      if (rev == UNKNOWN)
	return NULL_RTX;

      cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
			     XEXP (cond, 1));
    }
Richard Henderson committed
361 362 363 364 365 366

  return cond;
}

/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
   to conditional execution.  Return TRUE if we were successful at
367
   converting the block.  */
Richard Henderson committed
368 369

static int
370 371
cond_exec_process_if_block (ce_if_block_t * ce_info,
			    /* if block information */int do_multiple_p)
Richard Henderson committed
372
{
373 374 375
  basic_block test_bb = ce_info->test_bb;	/* last test block */
  basic_block then_bb = ce_info->then_bb;	/* THEN */
  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
Richard Henderson committed
376 377 378
  rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
  rtx then_start;		/* first insn in THEN block */
  rtx then_end;			/* last insn + 1 in THEN block */
Kaveh R. Ghazi committed
379 380
  rtx else_start = NULL_RTX;	/* first insn in ELSE block or NULL */
  rtx else_end = NULL_RTX;	/* last insn + 1 in ELSE block */
381
  int max;			/* max # of insns to convert.  */
Richard Henderson committed
382 383 384
  int then_mod_ok;		/* whether conditional mods are ok in THEN */
  rtx true_expr;		/* test for else block insns */
  rtx false_expr;		/* test for then block insns */
385 386
  rtx true_prob_val;		/* probability of else block */
  rtx false_prob_val;		/* probability of then block */
Richard Henderson committed
387
  int n_insns;
388
  enum rtx_code false_code;
Richard Henderson committed
389

390 391 392 393 394 395 396 397 398 399 400 401 402 403
  /* If test is comprised of && or || elements, and we've failed at handling
     all of them together, just use the last test if it is the special case of
     && elements without an ELSE block.  */
  if (!do_multiple_p && ce_info->num_multiple_test_blocks)
    {
      if (else_bb || ! ce_info->and_and_p)
	return FALSE;

      ce_info->test_bb = test_bb = ce_info->last_test_bb;
      ce_info->num_multiple_test_blocks = 0;
      ce_info->num_and_and_blocks = 0;
      ce_info->num_or_or_blocks = 0;
    }

Richard Henderson committed
404 405
  /* Find the conditional jump to the ELSE or JOIN part, and isolate
     the test.  */
406
  test_expr = cond_exec_get_condition (BB_END (test_bb));
Richard Henderson committed
407 408 409
  if (! test_expr)
    return FALSE;

410 411
  /* If the conditional jump is more than just a conditional jump,
     then we can not do conditional execution conversion on this block.  */
412
  if (! onlyjump_p (BB_END (test_bb)))
413 414
    return FALSE;

415 416 417 418 419 420 421
  /* Collect the bounds of where we're to search, skipping any labels, jumps
     and notes at the beginning and end of the block.  Then count the total
     number of insns and see if it is small enough to convert.  */
  then_start = first_active_insn (then_bb);
  then_end = last_active_insn (then_bb, TRUE);
  n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
  max = MAX_CONDITIONAL_EXECUTE;
Richard Henderson committed
422 423 424

  if (else_bb)
    {
425 426 427 428
      max *= 2;
      else_start = first_active_insn (else_bb);
      else_end = last_active_insn (else_bb, TRUE);
      n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
Richard Henderson committed
429 430 431 432 433 434 435
    }

  if (n_insns > max)
    return FALSE;

  /* Map test_expr/test_jump into the appropriate MD tests to use on
     the conditionally executed code.  */
436

Richard Henderson committed
437
  true_expr = test_expr;
438

439
  false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
440 441 442 443 444
  if (false_code != UNKNOWN)
    false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
				 XEXP (true_expr, 0), XEXP (true_expr, 1));
  else
    false_expr = NULL_RTX;
Richard Henderson committed
445

446 447 448
#ifdef IFCVT_MODIFY_TESTS
  /* If the machine description needs to modify the tests, such as setting a
     conditional execution register from a comparison, it can do so here.  */
449 450
  IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);

451
  /* See if the conversion failed.  */
452 453 454 455
  if (!true_expr || !false_expr)
    goto fail;
#endif

456
  true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
457 458 459 460 461 462 463 464
  if (true_prob_val)
    {
      true_prob_val = XEXP (true_prob_val, 0);
      false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
    }
  else
    false_prob_val = NULL_RTX;

465 466 467 468 469 470 471
  /* If we have && or || tests, do them here.  These tests are in the adjacent
     blocks after the first block containing the test.  */
  if (ce_info->num_multiple_test_blocks > 0)
    {
      basic_block bb = test_bb;
      basic_block last_test_bb = ce_info->last_test_bb;

472 473 474
      if (! false_expr)
	goto fail;

475 476 477 478
      do
	{
	  rtx start, end;
	  rtx t, f;
479
	  enum rtx_code f_code;
480 481 482 483 484 485 486 487 488 489 490

	  bb = block_fallthru (bb);
	  start = first_active_insn (bb);
	  end = last_active_insn (bb, TRUE);
	  if (start
	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
					    false_prob_val, FALSE))
	    goto fail;

	  /* If the conditional jump is more than just a conditional jump, then
	     we can not do conditional execution conversion on this block.  */
491
	  if (! onlyjump_p (BB_END (bb)))
492 493 494
	    goto fail;

	  /* Find the conditional jump and isolate the test.  */
495
	  t = cond_exec_get_condition (BB_END (bb));
496 497 498
	  if (! t)
	    goto fail;

499 500 501
	  f_code = reversed_comparison_code (t, BB_END (bb));
	  if (f_code == UNKNOWN)
	    goto fail;
502

503
	  f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520
	  if (ce_info->and_and_p)
	    {
	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
	    }
	  else
	    {
	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
	    }

	  /* If the machine description needs to modify the tests, such as
	     setting a conditional execution register from a comparison, it can
	     do so here.  */
#ifdef IFCVT_MODIFY_MULTIPLE_TESTS
	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);

521
	  /* See if the conversion failed.  */
522 523 524 525 526 527 528 529 530 531
	  if (!t || !f)
	    goto fail;
#endif

	  true_expr = t;
	  false_expr = f;
	}
      while (bb != last_test_bb);
    }

Richard Henderson committed
532 533 534 535 536 537 538 539
  /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
     on then THEN block.  */
  then_mod_ok = (else_bb == NULL_BLOCK);

  /* Go through the THEN and ELSE blocks converting the insns if possible
     to conditional execution.  */

  if (then_end
540
      && (! false_expr
541 542 543
	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
					false_expr, false_prob_val,
					then_mod_ok)))
Richard Henderson committed
544 545
    goto fail;

546 547
  if (else_bb && else_end
      && ! cond_exec_process_insns (ce_info, else_start, else_end,
548
				    true_expr, true_prob_val, TRUE))
Richard Henderson committed
549 550
    goto fail;

551 552
  /* If we cannot apply the changes, fail.  Do not go through the normal fail
     processing, since apply_change_group will call cancel_changes.  */
Richard Henderson committed
553
  if (! apply_change_group ())
554 555 556 557 558 559 560
    {
#ifdef IFCVT_MODIFY_CANCEL
      /* Cancel any machine dependent changes.  */
      IFCVT_MODIFY_CANCEL (ce_info);
#endif
      return FALSE;
    }
Richard Henderson committed
561

562
#ifdef IFCVT_MODIFY_FINAL
563
  /* Do any machine dependent final modifications.  */
564
  IFCVT_MODIFY_FINAL (ce_info);
565 566
#endif

Richard Henderson committed
567
  /* Conversion succeeded.  */
568 569
  if (dump_file)
    fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
Richard Henderson committed
570 571 572
	     n_insns, (n_insns == 1) ? " was" : "s were");

  /* Merge the blocks!  */
573 574
  merge_if_block (ce_info);
  cond_exec_changed_p = TRUE;
Richard Henderson committed
575 576 577
  return TRUE;

 fail:
578 579
#ifdef IFCVT_MODIFY_CANCEL
  /* Cancel any machine dependent changes.  */
580
  IFCVT_MODIFY_CANCEL (ce_info);
581 582
#endif

Richard Henderson committed
583 584 585 586
  cancel_changes (0);
  return FALSE;
}

587
/* Used by noce_process_if_block to communicate with its subroutines.
Richard Henderson committed
588 589

   The subroutines know that A and B may be evaluated freely.  They
590
   know that X is a register.  They should insert new instructions
Richard Henderson committed
591 592 593 594
   before cond_earliest.  */

struct noce_if_info
{
595
  basic_block test_bb;
Richard Henderson committed
596 597 598
  rtx insn_a, insn_b;
  rtx x, a, b;
  rtx jump, cond, cond_earliest;
599 600
  /* True if "b" was originally evaluated unconditionally.  */
  bool b_unconditional;
Richard Henderson committed
601 602
};

603
static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
604
static int noce_try_move (struct noce_if_info *);
605 606 607 608 609 610 611 612 613 614 615
static int noce_try_store_flag (struct noce_if_info *);
static int noce_try_addcc (struct noce_if_info *);
static int noce_try_store_flag_constants (struct noce_if_info *);
static int noce_try_store_flag_mask (struct noce_if_info *);
static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
			    rtx, rtx, rtx);
static int noce_try_cmove (struct noce_if_info *);
static int noce_try_cmove_arith (struct noce_if_info *);
static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
static int noce_try_minmax (struct noce_if_info *);
static int noce_try_abs (struct noce_if_info *);
616
static int noce_try_sign_mask (struct noce_if_info *);
Richard Henderson committed
617 618 619 620

/* Helper function for noce_try_store_flag*.  */

static rtx
621 622
noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
		      int normalize)
Richard Henderson committed
623 624 625 626 627 628 629 630 631 632 633 634
{
  rtx cond = if_info->cond;
  int cond_complex;
  enum rtx_code code;

  cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
		  || ! general_operand (XEXP (cond, 1), VOIDmode));

  /* If earliest == jump, or when the condition is complex, try to
     build the store_flag insn directly.  */

  if (cond_complex)
635
    cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
Richard Henderson committed
636

637 638 639 640 641
  if (reversep)
    code = reversed_comparison_code (cond, if_info->jump);
  else
    code = GET_CODE (cond);

Richard Henderson committed
642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657
  if ((if_info->cond_earliest == if_info->jump || cond_complex)
      && (normalize == 0 || STORE_FLAG_VALUE == normalize))
    {
      rtx tmp;

      tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
			    XEXP (cond, 1));
      tmp = gen_rtx_SET (VOIDmode, x, tmp);

      start_sequence ();
      tmp = emit_insn (tmp);

      if (recog_memoized (tmp) >= 0)
	{
	  tmp = get_insns ();
	  end_sequence ();
658
	  emit_insn (tmp);
Richard Henderson committed
659 660 661 662 663 664 665 666 667

	  if_info->cond_earliest = if_info->jump;

	  return x;
	}

      end_sequence ();
    }

668 669
  /* Don't even try if the comparison operands or the mode of X are weird.  */
  if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
Richard Henderson committed
670 671 672 673 674 675 676 677
    return NULL_RTX;

  return emit_store_flag (x, code, XEXP (cond, 0),
			  XEXP (cond, 1), VOIDmode,
			  (code == LTU || code == LEU
			   || code == GEU || code == GTU), normalize);
}

678 679 680
/* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
   X is the destination/target and Y is the value to copy.  */

681
static void
682
noce_emit_move_insn (rtx x, rtx y)
683
{
684
  enum machine_mode outmode;
685 686 687 688 689 690 691 692 693 694 695 696
  rtx outer, inner;
  int bitpos;

  if (GET_CODE (x) != STRICT_LOW_PART)
    {
      emit_move_insn (x, y);
      return;
    }

  outer = XEXP (x, 0);
  inner = XEXP (outer, 0);
  outmode = GET_MODE (outer);
697
  bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
698
  store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
699 700
}

701 702 703 704 705 706 707
/* Return sequence of instructions generated by if conversion.  This
   function calls end_sequence() to end the current stream, ensures
   that are instructions are unshared, recognizable non-jump insns.
   On failure, this function returns a NULL_RTX.  */

static rtx
end_ifcvt_sequence (struct noce_if_info *if_info)
708
{
709
  rtx insn;
710 711
  rtx seq = get_insns ();

712 713 714
  set_used_flags (if_info->x);
  set_used_flags (if_info->cond);
  unshare_all_rtl_in_chain (seq);
715 716
  end_sequence ();

717 718
  /* Make sure that all of the instructions emitted are recognizable,
     and that we haven't introduced a new jump instruction.
719
     As an exercise for the reader, build a general mechanism that
720
     allows proper placement of required clobbers.  */
721
  for (insn = seq; insn; insn = NEXT_INSN (insn))
722
    if (JUMP_P (insn)
723
	|| recog_memoized (insn) == -1)
724
      return NULL_RTX;
725

726
  return seq;
727 728
}

729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761
/* Convert "if (a != b) x = a; else x = b" into "x = a" and
   "if (a == b) x = a; else x = b" into "x = b".  */

static int
noce_try_move (struct noce_if_info *if_info)
{
  rtx cond = if_info->cond;
  enum rtx_code code = GET_CODE (cond);
  rtx y, seq;

  if (code != NE && code != EQ)
    return FALSE;

  /* This optimization isn't valid if either A or B could be a NaN
     or a signed zero.  */
  if (HONOR_NANS (GET_MODE (if_info->x))
      || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
    return FALSE;

  /* Check whether the operands of the comparison are A and in
     either order.  */
  if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
       && rtx_equal_p (if_info->b, XEXP (cond, 1)))
      || (rtx_equal_p (if_info->a, XEXP (cond, 1))
	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
    {
      y = (code == EQ) ? if_info->a : if_info->b;

      /* Avoid generating the move if the source is the destination.  */
      if (! rtx_equal_p (if_info->x, y))
	{
	  start_sequence ();
	  noce_emit_move_insn (if_info->x, y);
762 763 764
	  seq = end_ifcvt_sequence (if_info);
	  if (!seq)
	    return FALSE;
765

766 767 768 769 770 771 772 773
	  emit_insn_before_setloc (seq, if_info->jump,
				   INSN_LOCATOR (if_info->insn_a));
	}
      return TRUE;
    }
  return FALSE;
}

Richard Henderson committed
774 775 776 777 778 779 780
/* Convert "if (test) x = 1; else x = 0".

   Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
   tried in noce_try_store_flag_constants after noce_try_cmove has had
   a go at the conversion.  */

static int
781
noce_try_store_flag (struct noce_if_info *if_info)
Richard Henderson committed
782 783 784 785 786 787 788 789 790 791 792
{
  int reversep;
  rtx target, seq;

  if (GET_CODE (if_info->b) == CONST_INT
      && INTVAL (if_info->b) == STORE_FLAG_VALUE
      && if_info->a == const0_rtx)
    reversep = 0;
  else if (if_info->b == const0_rtx
	   && GET_CODE (if_info->a) == CONST_INT
	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
793 794
	   && (reversed_comparison_code (if_info->cond, if_info->jump)
	       != UNKNOWN))
Richard Henderson committed
795 796 797 798 799 800 801 802 803 804
    reversep = 1;
  else
    return FALSE;

  start_sequence ();

  target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
  if (target)
    {
      if (target != if_info->x)
805
	noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
806

807 808 809
      seq = end_ifcvt_sequence (if_info);
      if (! seq)
	return FALSE;
Richard Henderson committed
810

811 812
      emit_insn_before_setloc (seq, if_info->jump,
			       INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
813 814 815 816 817 818 819 820 821 822 823 824
      return TRUE;
    }
  else
    {
      end_sequence ();
      return FALSE;
    }
}

/* Convert "if (test) x = a; else x = b", for A and B constant.  */

static int
825
noce_try_store_flag_constants (struct noce_if_info *if_info)
Richard Henderson committed
826 827 828 829 830
{
  rtx target, seq;
  int reversep;
  HOST_WIDE_INT itrue, ifalse, diff, tmp;
  int normalize, can_reverse;
831
  enum machine_mode mode;
Richard Henderson committed
832 833 834 835 836

  if (! no_new_pseudos
      && GET_CODE (if_info->a) == CONST_INT
      && GET_CODE (if_info->b) == CONST_INT)
    {
837
      mode = GET_MODE (if_info->x);
Richard Henderson committed
838 839
      ifalse = INTVAL (if_info->a);
      itrue = INTVAL (if_info->b);
840 841 842 843 844 845

      /* Make sure we can represent the difference between the two values.  */
      if ((itrue - ifalse > 0)
	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
	return FALSE;

846
      diff = trunc_int_for_mode (itrue - ifalse, mode);
Richard Henderson committed
847

848 849
      can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
		     != UNKNOWN);
Richard Henderson committed
850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874

      reversep = 0;
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
	normalize = 0;
      else if (ifalse == 0 && exact_log2 (itrue) >= 0
	       && (STORE_FLAG_VALUE == 1
		   || BRANCH_COST >= 2))
	normalize = 1;
      else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
	       && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
	normalize = 1, reversep = 1;
      else if (itrue == -1
	       && (STORE_FLAG_VALUE == -1
		   || BRANCH_COST >= 2))
	normalize = -1;
      else if (ifalse == -1 && can_reverse
	       && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
	normalize = -1, reversep = 1;
      else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
	       || BRANCH_COST >= 3)
	normalize = -1;
      else
	return FALSE;

      if (reversep)
875
	{
Richard Henderson committed
876
	  tmp = itrue; itrue = ifalse; ifalse = tmp;
877
	  diff = trunc_int_for_mode (-diff, mode);
Richard Henderson committed
878 879 880 881 882 883 884 885 886 887 888 889 890 891
	}

      start_sequence ();
      target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
      if (! target)
	{
	  end_sequence ();
	  return FALSE;
	}

      /* if (test) x = 3; else x = 4;
	 =>   x = 3 + (test == 0);  */
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
	{
892 893 894 895 896
	  target = expand_simple_binop (mode,
					(diff == STORE_FLAG_VALUE
					 ? PLUS : MINUS),
					GEN_INT (ifalse), target, if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
897 898 899 900 901 902
	}

      /* if (test) x = 8; else x = 0;
	 =>   x = (test != 0) << 3;  */
      else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
	{
903 904 905
	  target = expand_simple_binop (mode, ASHIFT,
					target, GEN_INT (tmp), if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
906 907 908 909 910 911
	}

      /* if (test) x = -1; else x = b;
	 =>   x = -(test != 0) | b;  */
      else if (itrue == -1)
	{
912 913 914
	  target = expand_simple_binop (mode, IOR,
					target, GEN_INT (ifalse), if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
915 916 917 918 919 920
	}

      /* if (test) x = a; else x = b;
	 =>   x = (-(test != 0) & (b - a)) + a;  */
      else
	{
921 922 923
	  target = expand_simple_binop (mode, AND,
					target, GEN_INT (diff), if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
924
	  if (target)
925 926 927
	    target = expand_simple_binop (mode, PLUS,
					  target, GEN_INT (ifalse),
					  if_info->x, 0, OPTAB_WIDEN);
Richard Henderson committed
928 929 930 931 932 933 934 935 936
	}

      if (! target)
	{
	  end_sequence ();
	  return FALSE;
	}

      if (target != if_info->x)
937
	noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
938

939 940
      seq = end_ifcvt_sequence (if_info);
      if (!seq)
941 942
	return FALSE;

943 944
      emit_insn_before_setloc (seq, if_info->jump,
			       INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
945 946 947 948 949 950
      return TRUE;
    }

  return FALSE;
}

951
/* Convert "if (test) foo++" into "foo += (test != 0)", and
Richard Henderson committed
952 953 954
   similarly for "foo--".  */

static int
955
noce_try_addcc (struct noce_if_info *if_info)
Richard Henderson committed
956 957 958 959 960 961
{
  rtx target, seq;
  int subtract, normalize;

  if (! no_new_pseudos
      && GET_CODE (if_info->a) == PLUS
962
      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
963 964
      && (reversed_comparison_code (if_info->cond, if_info->jump)
	  != UNKNOWN))
Richard Henderson committed
965
    {
966 967
      rtx cond = if_info->cond;
      enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
Richard Henderson committed
968

969
      /* First try to use addcc pattern.  */
970 971
      if (general_operand (XEXP (cond, 0), VOIDmode)
	  && general_operand (XEXP (cond, 1), VOIDmode))
Richard Henderson committed
972
	{
973 974
	  start_sequence ();
	  target = emit_conditional_add (if_info->x, code,
975 976
					 XEXP (cond, 0),
					 XEXP (cond, 1),
977
					 VOIDmode,
978 979
					 if_info->b,
					 XEXP (if_info->a, 1),
980 981 982 983 984 985 986 987
					 GET_MODE (if_info->x),
					 (code == LTU || code == GEU
					  || code == LEU || code == GTU));
	  if (target)
	    {
	      if (target != if_info->x)
		noce_emit_move_insn (if_info->x, target);

988 989 990 991
	      seq = end_ifcvt_sequence (if_info);
	      if (!seq)
		return FALSE;

992
	      emit_insn_before_setloc (seq, if_info->jump,
993
				       INSN_LOCATOR (if_info->insn_a));
994 995
	      return TRUE;
	    }
Richard Henderson committed
996 997
	  end_sequence ();
	}
998

999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020
      /* If that fails, construct conditional increment or decrement using
	 setcc.  */
      if (BRANCH_COST >= 2
	  && (XEXP (if_info->a, 1) == const1_rtx
	      || XEXP (if_info->a, 1) == constm1_rtx))
        {
	  start_sequence ();
	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
	    subtract = 0, normalize = 0;
	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
	    subtract = 1, normalize = 0;
	  else
	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));


	  target = noce_emit_store_flag (if_info,
					 gen_reg_rtx (GET_MODE (if_info->x)),
					 1, normalize);

	  if (target)
	    target = expand_simple_binop (GET_MODE (if_info->x),
					  subtract ? MINUS : PLUS,
1021
					  if_info->b, target, if_info->x,
1022 1023 1024 1025 1026 1027
					  0, OPTAB_WIDEN);
	  if (target)
	    {
	      if (target != if_info->x)
		noce_emit_move_insn (if_info->x, target);

1028 1029
	      seq = end_ifcvt_sequence (if_info);
	      if (!seq)
1030 1031
		return FALSE;

1032
	      emit_insn_before_setloc (seq, if_info->jump,
1033
				       INSN_LOCATOR (if_info->insn_a));
1034 1035 1036 1037
	      return TRUE;
	    }
	  end_sequence ();
	}
Richard Henderson committed
1038 1039 1040 1041 1042 1043 1044 1045
    }

  return FALSE;
}

/* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */

static int
1046
noce_try_store_flag_mask (struct noce_if_info *if_info)
Richard Henderson committed
1047 1048 1049 1050 1051 1052 1053 1054 1055 1056
{
  rtx target, seq;
  int reversep;

  reversep = 0;
  if (! no_new_pseudos
      && (BRANCH_COST >= 2
	  || STORE_FLAG_VALUE == -1)
      && ((if_info->a == const0_rtx
	   && rtx_equal_p (if_info->b, if_info->x))
1057 1058 1059
	  || ((reversep = (reversed_comparison_code (if_info->cond,
						     if_info->jump)
			   != UNKNOWN))
Richard Henderson committed
1060 1061 1062 1063 1064 1065 1066 1067
	      && if_info->b == const0_rtx
	      && rtx_equal_p (if_info->a, if_info->x))))
    {
      start_sequence ();
      target = noce_emit_store_flag (if_info,
				     gen_reg_rtx (GET_MODE (if_info->x)),
				     reversep, -1);
      if (target)
1068
        target = expand_simple_binop (GET_MODE (if_info->x), AND,
1069 1070
				      if_info->x,
				      target, if_info->x, 0,
1071
				      OPTAB_WIDEN);
Richard Henderson committed
1072 1073 1074 1075

      if (target)
	{
	  if (target != if_info->x)
1076
	    noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
1077

1078 1079
	  seq = end_ifcvt_sequence (if_info);
	  if (!seq)
1080 1081
	    return FALSE;

1082
	  emit_insn_before_setloc (seq, if_info->jump,
1083
				   INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095
	  return TRUE;
	}

      end_sequence ();
    }

  return FALSE;
}

/* Helper function for noce_try_cmove and noce_try_cmove_arith.  */

static rtx
1096 1097
noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
Richard Henderson committed
1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118
{
  /* If earliest == jump, try to build the cmove insn directly.
     This is helpful when combine has created some complex condition
     (like for alpha's cmovlbs) that we can't hope to regenerate
     through the normal interface.  */

  if (if_info->cond_earliest == if_info->jump)
    {
      rtx tmp;

      tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
      tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
      tmp = gen_rtx_SET (VOIDmode, x, tmp);

      start_sequence ();
      tmp = emit_insn (tmp);

      if (recog_memoized (tmp) >= 0)
	{
	  tmp = get_insns ();
	  end_sequence ();
1119
	  emit_insn (tmp);
Richard Henderson committed
1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131

	  return x;
	}

      end_sequence ();
    }

  /* Don't even try if the comparison operands are weird.  */
  if (! general_operand (cmp_a, GET_MODE (cmp_a))
      || ! general_operand (cmp_b, GET_MODE (cmp_b)))
    return NULL_RTX;

1132
#if HAVE_conditional_move
Richard Henderson committed
1133 1134 1135 1136
  return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
				vtrue, vfalse, GET_MODE (x),
			        (code == LTU || code == GEU
				 || code == LEU || code == GTU));
1137 1138 1139
#else
  /* We'll never get here, as noce_process_if_block doesn't call the
     functions involved.  Ifdef code, however, should be discouraged
1140
     because it leads to typos in the code not selected.  However,
1141 1142 1143
     emit_conditional_move won't exist either.  */
  return NULL_RTX;
#endif
Richard Henderson committed
1144 1145 1146 1147 1148 1149 1150
}

/* Try only simple constants and registers here.  More complex cases
   are handled in noce_try_cmove_arith after noce_try_store_flag_arith
   has had a go at it.  */

static int
1151
noce_try_cmove (struct noce_if_info *if_info)
Richard Henderson committed
1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169
{
  enum rtx_code code;
  rtx target, seq;

  if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
      && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
    {
      start_sequence ();

      code = GET_CODE (if_info->cond);
      target = noce_emit_cmove (if_info, if_info->x, code,
				XEXP (if_info->cond, 0),
				XEXP (if_info->cond, 1),
				if_info->a, if_info->b);

      if (target)
	{
	  if (target != if_info->x)
1170
	    noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
1171

1172 1173 1174 1175
	  seq = end_ifcvt_sequence (if_info);
	  if (!seq)
	    return FALSE;

1176
	  emit_insn_before_setloc (seq, if_info->jump,
1177
				   INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192
	  return TRUE;
	}
      else
	{
	  end_sequence ();
	  return FALSE;
	}
    }

  return FALSE;
}

/* Try more complex cases involving conditional_move.  */

static int
1193
noce_try_cmove_arith (struct noce_if_info *if_info)
Richard Henderson committed
1194 1195 1196 1197
{
  rtx a = if_info->a;
  rtx b = if_info->b;
  rtx x = if_info->x;
1198
  rtx orig_a, orig_b;
Richard Henderson committed
1199 1200 1201
  rtx insn_a, insn_b;
  rtx tmp, target;
  int is_mem = 0;
1202
  int insn_cost;
Richard Henderson committed
1203 1204 1205 1206 1207 1208 1209
  enum rtx_code code;

  /* A conditional move from two memory sources is equivalent to a
     conditional on their addresses followed by a load.  Don't do this
     early because it'll screw alias analysis.  Note that we've
     already checked for no side effects.  */
  if (! no_new_pseudos && cse_not_expected
1210
      && MEM_P (a) && MEM_P (b)
Richard Henderson committed
1211 1212 1213 1214 1215 1216 1217 1218 1219
      && BRANCH_COST >= 5)
    {
      a = XEXP (a, 0);
      b = XEXP (b, 0);
      x = gen_reg_rtx (Pmode);
      is_mem = 1;
    }

  /* ??? We could handle this if we knew that a load from A or B could
1220
     not fault.  This is also true if we've already loaded
Richard Henderson committed
1221
     from the address along the path from ENTRY.  */
1222
  else if (may_trap_p (a) || may_trap_p (b))
Richard Henderson committed
1223 1224 1225 1226 1227 1228 1229 1230
    return FALSE;

  /* if (test) x = a + b; else x = c - d;
     => y = a + b;
        x = c - d;
	if (test)
	  x = y;
  */
1231

Richard Henderson committed
1232 1233 1234 1235
  code = GET_CODE (if_info->cond);
  insn_a = if_info->insn_a;
  insn_b = if_info->insn_b;

1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254
  /* Total insn_rtx_cost should be smaller than branch cost.  Exit
     if insn_rtx_cost can't be estimated.  */
  if (insn_a)
    {
      insn_cost = insn_rtx_cost (PATTERN (insn_a));
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
	return FALSE;
    }
  else
    {
      insn_cost = 0;
    }

  if (insn_b) {
    insn_cost += insn_rtx_cost (PATTERN (insn_b));
    if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
      return FALSE;
  }

Richard Henderson committed
1255
  /* Possibly rearrange operands to make things come out more natural.  */
1256
  if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
Richard Henderson committed
1257 1258 1259 1260 1261 1262 1263 1264 1265
    {
      int reversep = 0;
      if (rtx_equal_p (b, x))
	reversep = 1;
      else if (general_operand (b, GET_MODE (b)))
	reversep = 1;

      if (reversep)
	{
1266
	  code = reversed_comparison_code (if_info->cond, if_info->jump);
Richard Henderson committed
1267 1268 1269 1270 1271 1272 1273
	  tmp = a, a = b, b = tmp;
	  tmp = insn_a, insn_a = insn_b, insn_b = tmp;
	}
    }

  start_sequence ();

1274 1275 1276
  orig_a = a;
  orig_b = b;

Richard Henderson committed
1277 1278
  /* If either operand is complex, load it into a register first.
     The best way to do this is to copy the original insn.  In this
1279
     way we preserve any clobbers etc that the insn may have had.
Richard Henderson committed
1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307
     This is of course not possible in the IS_MEM case.  */
  if (! general_operand (a, GET_MODE (a)))
    {
      rtx set;

      if (no_new_pseudos)
	goto end_seq_and_fail;

      if (is_mem)
	{
	  tmp = gen_reg_rtx (GET_MODE (a));
	  tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
	}
      else if (! insn_a)
	goto end_seq_and_fail;
      else
	{
	  a = gen_reg_rtx (GET_MODE (a));
	  tmp = copy_rtx (insn_a);
	  set = single_set (tmp);
	  SET_DEST (set) = a;
	  tmp = emit_insn (PATTERN (tmp));
	}
      if (recog_memoized (tmp) < 0)
	goto end_seq_and_fail;
    }
  if (! general_operand (b, GET_MODE (b)))
    {
1308
      rtx set, last;
Richard Henderson committed
1309 1310 1311 1312 1313 1314 1315

      if (no_new_pseudos)
	goto end_seq_and_fail;

      if (is_mem)
	{
          tmp = gen_reg_rtx (GET_MODE (b));
1316
	  tmp = gen_rtx_SET (VOIDmode, tmp, b);
Richard Henderson committed
1317 1318 1319 1320 1321 1322 1323 1324 1325
	}
      else if (! insn_b)
	goto end_seq_and_fail;
      else
	{
          b = gen_reg_rtx (GET_MODE (b));
	  tmp = copy_rtx (insn_b);
	  set = single_set (tmp);
	  SET_DEST (set) = b;
1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337
	  tmp = PATTERN (tmp);
	}

      /* If insn to set up A clobbers any registers B depends on, try to
	 swap insn that sets up A with the one that sets up B.  If even
	 that doesn't help, punt.  */
      last = get_last_insn ();
      if (last && modified_in_p (orig_b, last))
	{
	  tmp = emit_insn_before (tmp, get_insns ());
	  if (modified_in_p (orig_a, tmp))
	    goto end_seq_and_fail;
Richard Henderson committed
1338
	}
1339 1340 1341
      else
	tmp = emit_insn (tmp);

Richard Henderson committed
1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364
      if (recog_memoized (tmp) < 0)
	goto end_seq_and_fail;
    }

  target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
			    XEXP (if_info->cond, 1), a, b);

  if (! target)
    goto end_seq_and_fail;

  /* If we're handling a memory for above, emit the load now.  */
  if (is_mem)
    {
      tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);

      /* Copy over flags as appropriate.  */
      if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
	MEM_VOLATILE_P (tmp) = 1;
      if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
	MEM_IN_STRUCT_P (tmp) = 1;
      if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
	MEM_SCALAR_P (tmp) = 1;
      if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1365
	set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1366 1367
      set_mem_align (tmp,
		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
Richard Henderson committed
1368

1369
      noce_emit_move_insn (if_info->x, tmp);
Richard Henderson committed
1370 1371
    }
  else if (target != x)
1372
    noce_emit_move_insn (x, target);
Richard Henderson committed
1373

1374 1375 1376 1377
  tmp = end_ifcvt_sequence (if_info);
  if (!tmp)
    return FALSE;

1378
  emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1379 1380 1381 1382 1383 1384 1385
  return TRUE;

 end_seq_and_fail:
  end_sequence ();
  return FALSE;
}

1386 1387 1388 1389 1390
/* For most cases, the simplified condition we found is the best
   choice, but this is not the case for the min/max/abs transforms.
   For these we wish to know that it is A or B in the condition.  */

static rtx
1391 1392
noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
			rtx *earliest)
1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409
{
  rtx cond, set, insn;
  int reverse;

  /* If target is already mentioned in the known condition, return it.  */
  if (reg_mentioned_p (target, if_info->cond))
    {
      *earliest = if_info->cond_earliest;
      return if_info->cond;
    }

  set = pc_set (if_info->jump);
  cond = XEXP (SET_SRC (set), 0);
  reverse
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);

1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442
  /* If we're looking for a constant, try to make the conditional
     have that constant in it.  There are two reasons why it may
     not have the constant we want:

     1. GCC may have needed to put the constant in a register, because
        the target can't compare directly against that constant.  For
        this case, we look for a SET immediately before the comparison
        that puts a constant in that register.

     2. GCC may have canonicalized the conditional, for example
	replacing "if x < 4" with "if x <= 3".  We can undo that (or
	make equivalent types of changes) to get the constants we need
	if they're off by one in the right direction.  */

  if (GET_CODE (target) == CONST_INT)
    {
      enum rtx_code code = GET_CODE (if_info->cond);
      rtx op_a = XEXP (if_info->cond, 0);
      rtx op_b = XEXP (if_info->cond, 1);
      rtx prev_insn;

      /* First, look to see if we put a constant in a register.  */
      prev_insn = PREV_INSN (if_info->cond_earliest);
      if (prev_insn
	  && INSN_P (prev_insn)
	  && GET_CODE (PATTERN (prev_insn)) == SET)
	{
	  rtx src = find_reg_equal_equiv_note (prev_insn);
	  if (!src)
	    src = SET_SRC (PATTERN (prev_insn));
	  if (GET_CODE (src) == CONST_INT)
	    {
	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1443
		op_a = src;
1444
	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1445
		op_b = src;
1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511

	      if (GET_CODE (op_a) == CONST_INT)
		{
		  rtx tmp = op_a;
		  op_a = op_b;
		  op_b = tmp;
		  code = swap_condition (code);
		}
	    }
	}

      /* Now, look to see if we can get the right constant by
	 adjusting the conditional.  */
      if (GET_CODE (op_b) == CONST_INT)
	{
	  HOST_WIDE_INT desired_val = INTVAL (target);
	  HOST_WIDE_INT actual_val = INTVAL (op_b);

	  switch (code)
	    {
	    case LT:
	      if (actual_val == desired_val + 1)
		{
		  code = LE;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    case LE:
	      if (actual_val == desired_val - 1)
		{
		  code = LT;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    case GT:
	      if (actual_val == desired_val - 1)
		{
		  code = GE;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    case GE:
	      if (actual_val == desired_val + 1)
		{
		  code = GT;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    default:
	      break;
	    }
	}

      /* If we made any changes, generate a new conditional that is
	 equivalent to what we started with, but has the right
	 constants in it.  */
      if (code != GET_CODE (if_info->cond)
	  || op_a != XEXP (if_info->cond, 0)
	  || op_b != XEXP (if_info->cond, 1))
	{
	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
	  *earliest = if_info->cond_earliest;
	  return cond;
	}
    }

1512
  cond = canonicalize_condition (if_info->jump, cond, reverse,
1513
				 earliest, target, false, true);
1514 1515 1516 1517 1518 1519 1520 1521
  if (! cond || ! reg_mentioned_p (target, cond))
    return NULL;

  /* We almost certainly searched back to a different place.
     Need to re-verify correct lifetimes.  */

  /* X may not be mentioned in the range (cond_earliest, jump].  */
  for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1522
    if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537
      return NULL;

  /* A and B may not be modified in the range [cond_earliest, jump).  */
  for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
    if (INSN_P (insn)
	&& (modified_in_p (if_info->a, insn)
	    || modified_in_p (if_info->b, insn)))
      return NULL;

  return cond;
}

/* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */

static int
1538 1539
noce_try_minmax (struct noce_if_info *if_info)
{
1540
  rtx cond, earliest, target, seq;
1541
  enum rtx_code code, op;
1542 1543 1544 1545 1546 1547
  int unsignedp;

  /* ??? Can't guarantee that expand_binop won't create pseudos.  */
  if (no_new_pseudos)
    return FALSE;

1548 1549
  /* ??? Reject modes with NaNs or signed zeros since we don't know how
     they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1550
     to get the target to tell us...  */
1551 1552
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
      || HONOR_NANS (GET_MODE (if_info->x)))
1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583
    return FALSE;

  cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
  if (!cond)
    return FALSE;

  /* Verify the condition is of the form we expect, and canonicalize
     the comparison code.  */
  code = GET_CODE (cond);
  if (rtx_equal_p (XEXP (cond, 0), if_info->a))
    {
      if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
	return FALSE;
    }
  else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
    {
      if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
	return FALSE;
      code = swap_condition (code);
    }
  else
    return FALSE;

  /* Determine what sort of operation this is.  Note that the code is for
     a taken branch, so the code->operation mapping appears backwards.  */
  switch (code)
    {
    case LT:
    case LE:
    case UNLT:
    case UNLE:
1584
      op = SMAX;
1585 1586 1587 1588 1589 1590
      unsignedp = 0;
      break;
    case GT:
    case GE:
    case UNGT:
    case UNGE:
1591
      op = SMIN;
1592 1593 1594 1595
      unsignedp = 0;
      break;
    case LTU:
    case LEU:
1596
      op = UMAX;
1597 1598 1599 1600
      unsignedp = 1;
      break;
    case GTU:
    case GEU:
1601
      op = UMIN;
1602 1603 1604 1605 1606 1607 1608 1609
      unsignedp = 1;
      break;
    default:
      return FALSE;
    }

  start_sequence ();

1610 1611 1612
  target = expand_simple_binop (GET_MODE (if_info->x), op,
				if_info->a, if_info->b,
				if_info->x, unsignedp, OPTAB_WIDEN);
1613 1614 1615 1616 1617 1618
  if (! target)
    {
      end_sequence ();
      return FALSE;
    }
  if (target != if_info->x)
1619
    noce_emit_move_insn (if_info->x, target);
1620

1621 1622
  seq = end_ifcvt_sequence (if_info);
  if (!seq)
1623 1624
    return FALSE;

1625
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1626 1627 1628 1629 1630 1631 1632 1633 1634
  if_info->cond = cond;
  if_info->cond_earliest = earliest;

  return TRUE;
}

/* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc.  */

static int
1635 1636
noce_try_abs (struct noce_if_info *if_info)
{
1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655
  rtx cond, earliest, target, seq, a, b, c;
  int negate;

  /* ??? Can't guarantee that expand_binop won't create pseudos.  */
  if (no_new_pseudos)
    return FALSE;

  /* Recognize A and B as constituting an ABS or NABS.  */
  a = if_info->a;
  b = if_info->b;
  if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
    negate = 0;
  else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
    {
      c = a; a = b; b = c;
      negate = 1;
    }
  else
    return FALSE;
1656

1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674
  cond = noce_get_alt_condition (if_info, b, &earliest);
  if (!cond)
    return FALSE;

  /* Verify the condition is of the form we expect.  */
  if (rtx_equal_p (XEXP (cond, 0), b))
    c = XEXP (cond, 1);
  else if (rtx_equal_p (XEXP (cond, 1), b))
    c = XEXP (cond, 0);
  else
    return FALSE;

  /* Verify that C is zero.  Search backward through the block for
     a REG_EQUAL note if necessary.  */
  if (REG_P (c))
    {
      rtx insn, note = NULL;
      for (insn = earliest;
1675
	   insn != BB_HEAD (if_info->test_bb);
1676
	   insn = PREV_INSN (insn))
1677
	if (INSN_P (insn)
1678 1679 1680 1681 1682 1683 1684
	    && ((note = find_reg_note (insn, REG_EQUAL, c))
		|| (note = find_reg_note (insn, REG_EQUIV, c))))
	  break;
      if (! note)
	return FALSE;
      c = XEXP (note, 0);
    }
1685
  if (MEM_P (c)
1686 1687 1688 1689 1690
      && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
      && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
    c = get_pool_constant (XEXP (c, 0));

  /* Work around funny ideas get_condition has wrt canonicalization.
1691
     Note that these rtx constants are known to be CONST_INT, and
1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719
     therefore imply integer comparisons.  */
  if (c == constm1_rtx && GET_CODE (cond) == GT)
    ;
  else if (c == const1_rtx && GET_CODE (cond) == LT)
    ;
  else if (c != CONST0_RTX (GET_MODE (b)))
    return FALSE;

  /* Determine what sort of operation this is.  */
  switch (GET_CODE (cond))
    {
    case LT:
    case LE:
    case UNLT:
    case UNLE:
      negate = !negate;
      break;
    case GT:
    case GE:
    case UNGT:
    case UNGE:
      break;
    default:
      return FALSE;
    }

  start_sequence ();

1720
  target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1721

1722
  /* ??? It's a quandary whether cmove would be better here, especially
1723 1724
     for integers.  Perhaps combine will clean things up.  */
  if (target && negate)
1725
    target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1726 1727 1728 1729 1730 1731 1732 1733

  if (! target)
    {
      end_sequence ();
      return FALSE;
    }

  if (target != if_info->x)
1734
    noce_emit_move_insn (if_info->x, target);
1735

1736 1737
  seq = end_ifcvt_sequence (if_info);
  if (!seq)
1738 1739
    return FALSE;

1740
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1741 1742 1743 1744 1745 1746
  if_info->cond = cond;
  if_info->cond_earliest = earliest;

  return TRUE;
}

1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785
/* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */

static int
noce_try_sign_mask (struct noce_if_info *if_info)
{
  rtx cond, t, m, c, seq;
  enum machine_mode mode;
  enum rtx_code code;

  if (no_new_pseudos)
    return FALSE;

  cond = if_info->cond;
  code = GET_CODE (cond);
  m = XEXP (cond, 0);
  c = XEXP (cond, 1);

  t = NULL_RTX;
  if (if_info->a == const0_rtx)
    {
      if ((code == LT && c == const0_rtx)
	  || (code == LE && c == constm1_rtx))
	t = if_info->b;
    }
  else if (if_info->b == const0_rtx)
    {
      if ((code == GE && c == const0_rtx)
	  || (code == GT && c == constm1_rtx))
	t = if_info->a;
    }

  if (! t || side_effects_p (t))
    return FALSE;

  /* We currently don't handle different modes.  */
  mode = GET_MODE (t);
  if (GET_MODE (m) != mode)
    return FALSE;

1786 1787 1788 1789 1790
  /* This is only profitable if T is cheap, or T is unconditionally
     executed/evaluated in the original insn sequence.  */
  if (rtx_cost (t, SET) >= COSTS_N_INSNS (2)
      && (!if_info->b_unconditional
          || t != if_info->b))
1791 1792 1793
    return FALSE;

  start_sequence ();
1794 1795 1796 1797
  /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
     "(signed) m >> 31" directly.  This benefits targets with specialized
     insns to obtain the signmask, but still uses ashr_optab otherwise.  */
  m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1798 1799 1800 1801 1802 1803 1804 1805 1806 1807
  t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
	: NULL_RTX;

  if (!t)
    {
      end_sequence ();
      return FALSE;
    }

  noce_emit_move_insn (if_info->x, t);
1808 1809 1810 1811 1812 1813

  seq = end_ifcvt_sequence (if_info);
  if (!seq)
    return FALSE;

  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1814 1815 1816 1817
  return TRUE;
}


1818 1819
/* Similar to get_condition, only the resulting condition must be
   valid at JUMP, instead of at EARLIEST.  */
Richard Henderson committed
1820 1821

static rtx
1822
noce_get_condition (rtx jump, rtx *earliest)
Richard Henderson committed
1823
{
1824
  rtx cond, set, tmp;
1825
  bool reverse;
Richard Henderson committed
1826

1827
  if (! any_condjump_p (jump))
Richard Henderson committed
1828 1829
    return NULL_RTX;

1830 1831
  set = pc_set (jump);

1832 1833 1834 1835 1836 1837 1838
  /* If this branches to JUMP_LABEL when the condition is false,
     reverse the condition.  */
  reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
	     && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));

  /* If the condition variable is a register and is MODE_INT, accept it.  */

1839
  cond = XEXP (SET_SRC (set), 0);
1840 1841
  tmp = XEXP (cond, 0);
  if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
Richard Henderson committed
1842 1843 1844
    {
      *earliest = jump;

1845
      if (reverse)
Richard Henderson committed
1846
	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1847 1848
			       GET_MODE (cond), tmp, XEXP (cond, 1));
      return cond;
Richard Henderson committed
1849 1850
    }

1851 1852
  /* Otherwise, fall back on canonicalize_condition to do the dirty
     work of manipulating MODE_CC values and COMPARE rtx codes.  */
1853 1854
  return canonicalize_condition (jump, cond, reverse, earliest,
				 NULL_RTX, false, true);
Richard Henderson committed
1855 1856
}

1857 1858 1859
/* Return true if OP is ok for if-then-else processing.  */

static int
1860
noce_operand_ok (rtx op)
1861 1862 1863
{
  /* We special-case memories, so handle any of them with
     no address side effects.  */
1864
  if (MEM_P (op))
1865 1866 1867 1868 1869 1870 1871 1872
    return ! side_effects_p (XEXP (op, 0));

  if (side_effects_p (op))
    return FALSE;

  return ! may_trap_p (op);
}

Richard Henderson committed
1873 1874
/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
   without using conditional execution.  Return TRUE if we were
1875
   successful at converting the block.  */
Richard Henderson committed
1876 1877

static int
1878
noce_process_if_block (struct ce_if_block * ce_info)
Richard Henderson committed
1879
{
1880 1881 1882 1883 1884 1885 1886
  basic_block test_bb = ce_info->test_bb;	/* test block */
  basic_block then_bb = ce_info->then_bb;	/* THEN */
  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
  struct noce_if_info if_info;
  rtx insn_a, insn_b;
  rtx set_a, set_b;
  rtx orig_x, x, a, b;
1887
  rtx jump, cond;
1888

Richard Henderson committed
1889 1890 1891 1892 1893 1894 1895 1896 1897 1898
  /* We're looking for patterns of the form

     (1) if (...) x = a; else x = b;
     (2) x = b; if (...) x = a;
     (3) if (...) x = a;   // as if with an initial x = x.

     The later patterns require jumps to be more expensive.

     ??? For future expansion, look for multiple X in such patterns.  */

1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910
  /* If test is comprised of && or || elements, don't handle it unless it is
     the special case of && elements without an ELSE block.  */
  if (ce_info->num_multiple_test_blocks)
    {
      if (else_bb || ! ce_info->and_and_p)
	return FALSE;

      ce_info->test_bb = test_bb = ce_info->last_test_bb;
      ce_info->num_multiple_test_blocks = 0;
      ce_info->num_and_and_blocks = 0;
      ce_info->num_or_or_blocks = 0;
    }
Richard Henderson committed
1911 1912

  /* If this is not a standard conditional jump, we can't parse it.  */
1913
  jump = BB_END (test_bb);
Richard Henderson committed
1914 1915 1916 1917
  cond = noce_get_condition (jump, &if_info.cond_earliest);
  if (! cond)
    return FALSE;

1918 1919
  /* If the conditional jump is more than just a conditional
     jump, then we can not do if-conversion on this block.  */
1920 1921 1922
  if (! onlyjump_p (jump))
    return FALSE;

Richard Henderson committed
1923 1924 1925 1926 1927 1928 1929
  /* We must be comparing objects whose modes imply the size.  */
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
    return FALSE;

  /* Look for one of the potential sets.  */
  insn_a = first_active_insn (then_bb);
  if (! insn_a
1930
      || insn_a != last_active_insn (then_bb, FALSE)
Richard Henderson committed
1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948
      || (set_a = single_set (insn_a)) == NULL_RTX)
    return FALSE;

  x = SET_DEST (set_a);
  a = SET_SRC (set_a);

  /* Look for the other potential set.  Make sure we've got equivalent
     destinations.  */
  /* ??? This is overconservative.  Storing to two different mems is
     as easy as conditionally computing the address.  Storing to a
     single mem merely requires a scratch memory to use as one of the
     destination addresses; often the memory immediately below the
     stack pointer is available for this.  */
  set_b = NULL_RTX;
  if (else_bb)
    {
      insn_b = first_active_insn (else_bb);
      if (! insn_b
1949
	  || insn_b != last_active_insn (else_bb, FALSE)
Richard Henderson committed
1950 1951 1952 1953 1954 1955 1956
	  || (set_b = single_set (insn_b)) == NULL_RTX
	  || ! rtx_equal_p (x, SET_DEST (set_b)))
	return FALSE;
    }
  else
    {
      insn_b = prev_nonnote_insn (if_info.cond_earliest);
1957 1958 1959
      /* We're going to be moving the evaluation of B down from above
	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
	 intact.  */
Richard Henderson committed
1960
      if (! insn_b
1961
	  || !NONJUMP_INSN_P (insn_b)
Richard Henderson committed
1962 1963
	  || (set_b = single_set (insn_b)) == NULL_RTX
	  || ! rtx_equal_p (x, SET_DEST (set_b))
1964
	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1965
	  || modified_between_p (SET_SRC (set_b),
1966 1967 1968 1969 1970 1971 1972
				 PREV_INSN (if_info.cond_earliest), jump)
	  /* Likewise with X.  In particular this can happen when
	     noce_get_condition looks farther back in the instruction
	     stream than one might expect.  */
	  || reg_overlap_mentioned_p (x, cond)
	  || reg_overlap_mentioned_p (x, a)
	  || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
Richard Henderson committed
1973 1974
	insn_b = set_b = NULL_RTX;
    }
1975 1976 1977

  /* If x has side effects then only the if-then-else form is safe to
     convert.  But even in that case we would need to restore any notes
1978
     (such as REG_INC) at then end.  That can be tricky if
1979 1980 1981 1982 1983
     noce_emit_move_insn expands to more than one insn, so disable the
     optimization entirely for now if there are side effects.  */
  if (side_effects_p (x))
    return FALSE;

1984 1985 1986 1987 1988 1989 1990 1991
  /* If x is a read-only memory, then the program is valid only if we
     avoid the store into it.  If there are stores on both the THEN and
     ELSE arms, then we can go ahead with the conversion; either the 
     program is broken, or the condition is always false such that the
     other memory is selected.  */
  if (!set_b && MEM_P (x) && MEM_READONLY_P (x))
    return FALSE;

Richard Henderson committed
1992 1993 1994 1995 1996
  b = (set_b ? SET_SRC (set_b) : x);

  /* Only operate on register destinations, and even then avoid extending
     the lifetime of hard registers on small register class machines.  */
  orig_x = x;
1997
  if (!REG_P (x)
Richard Henderson committed
1998 1999 2000
      || (SMALL_REGISTER_CLASSES
	  && REGNO (x) < FIRST_PSEUDO_REGISTER))
    {
2001
      if (no_new_pseudos || GET_MODE (x) == BLKmode)
Richard Henderson committed
2002
	return FALSE;
2003 2004
      x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
				 ? XEXP (x, 0) : x));
Richard Henderson committed
2005 2006 2007
    }

  /* Don't operate on sources that may trap or are volatile.  */
2008
  if (! noce_operand_ok (a) || ! noce_operand_ok (b))
Richard Henderson committed
2009 2010 2011
    return FALSE;

  /* Set up the info block for our subroutines.  */
2012
  if_info.test_bb = test_bb;
Richard Henderson committed
2013 2014 2015 2016 2017 2018 2019
  if_info.cond = cond;
  if_info.jump = jump;
  if_info.insn_a = insn_a;
  if_info.insn_b = insn_b;
  if_info.x = x;
  if_info.a = a;
  if_info.b = b;
2020
  if_info.b_unconditional = else_bb == 0;
Richard Henderson committed
2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035

  /* Try optimizations in some approximation of a useful order.  */
  /* ??? Should first look to see if X is live incoming at all.  If it
     isn't, we don't need anything but an unconditional set.  */

  /* Look and see if A and B are really the same.  Avoid creating silly
     cmove constructs that no one will fix up later.  */
  if (rtx_equal_p (a, b))
    {
      /* If we have an INSN_B, we don't have to create any new rtl.  Just
	 move the instruction that we already have.  If we don't have an
	 INSN_B, that means that A == X, and we've got a noop move.  In
	 that case don't do anything and let the code below delete INSN_A.  */
      if (insn_b && else_bb)
	{
2036 2037
	  rtx note;

2038 2039
	  if (else_bb && insn_b == BB_END (else_bb))
	    BB_END (else_bb) = PREV_INSN (insn_b);
2040
	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2041 2042 2043 2044 2045 2046

	  /* If there was a REG_EQUAL note, delete it since it may have been
	     true due to this insn being after a jump.  */
	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
	    remove_note (insn_b, note);

Richard Henderson committed
2047 2048
	  insn_b = NULL_RTX;
	}
2049 2050 2051 2052
      /* If we have "x = b; if (...) x = a;", and x has side-effects, then
	 x must be executed twice.  */
      else if (insn_b && side_effects_p (orig_x))
	return FALSE;
2053

2054
      x = orig_x;
Richard Henderson committed
2055 2056 2057
      goto success;
    }

2058
  /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
2059
     for most optimizations if writing to x may trap, i.e. it's a memory
2060 2061
     other than a static var or a stack slot.  */
  if (! set_b
2062
      && MEM_P (orig_x)
2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076
      && ! MEM_NOTRAP_P (orig_x)
      && rtx_addr_can_trap_p (XEXP (orig_x, 0)))
    {
      if (HAVE_conditional_move)
	{
	  if (noce_try_cmove (&if_info))
	    goto success;
	  if (! HAVE_conditional_execution
	      && noce_try_cmove_arith (&if_info))
	    goto success;
	}
      return FALSE;
    }

2077 2078
  if (noce_try_move (&if_info))
    goto success;
Richard Henderson committed
2079 2080
  if (noce_try_store_flag (&if_info))
    goto success;
2081 2082 2083 2084
  if (noce_try_minmax (&if_info))
    goto success;
  if (noce_try_abs (&if_info))
    goto success;
Richard Henderson committed
2085 2086 2087 2088 2089 2090 2091
  if (HAVE_conditional_move
      && noce_try_cmove (&if_info))
    goto success;
  if (! HAVE_conditional_execution)
    {
      if (noce_try_store_flag_constants (&if_info))
	goto success;
2092
      if (noce_try_addcc (&if_info))
Richard Henderson committed
2093 2094 2095 2096 2097 2098
	goto success;
      if (noce_try_store_flag_mask (&if_info))
	goto success;
      if (HAVE_conditional_move
	  && noce_try_cmove_arith (&if_info))
	goto success;
2099 2100
      if (noce_try_sign_mask (&if_info))
	goto success;
Richard Henderson committed
2101 2102 2103 2104 2105 2106
    }

  return FALSE;

 success:
  /* The original sets may now be killed.  */
2107
  delete_insn (insn_a);
Richard Henderson committed
2108 2109 2110 2111 2112 2113 2114 2115

  /* Several special cases here: First, we may have reused insn_b above,
     in which case insn_b is now NULL.  Second, we want to delete insn_b
     if it came from the ELSE block, because follows the now correct
     write that appears in the TEST block.  However, if we got insn_b from
     the TEST block, it may in fact be loading data needed for the comparison.
     We'll let life_analysis remove the insn if it's really dead.  */
  if (insn_b && else_bb)
2116
    delete_insn (insn_b);
Richard Henderson committed
2117

2118 2119 2120
  /* The new insns will have been inserted immediately before the jump.  We
     should be able to remove the jump with impunity, but the condition itself
     may have been modified by gcse to be shared across basic blocks.  */
2121
  delete_insn (jump);
Richard Henderson committed
2122 2123 2124 2125 2126

  /* If we used a temporary, fix it up now.  */
  if (orig_x != x)
    {
      start_sequence ();
2127
      noce_emit_move_insn (orig_x, x);
2128
      insn_b = get_insns ();
2129 2130
      set_used_flags (orig_x);
      unshare_all_rtl_in_chain (insn_b);
Richard Henderson committed
2131 2132
      end_sequence ();

2133
      emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
Richard Henderson committed
2134 2135 2136
    }

  /* Merge the blocks!  */
2137
  merge_if_block (ce_info);
Richard Henderson committed
2138 2139 2140 2141 2142 2143 2144 2145

  return TRUE;
}

/* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
   straight line code.  Return true if successful.  */

static int
2146
process_if_block (struct ce_if_block * ce_info)
Richard Henderson committed
2147 2148
{
  if (! reload_completed
2149
      && noce_process_if_block (ce_info))
Richard Henderson committed
2150 2151
    return TRUE;

2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168
  if (HAVE_conditional_execution && reload_completed)
    {
      /* If we have && and || tests, try to first handle combining the && and
         || tests into the conditional code, and if that fails, go back and
         handle it without the && and ||, which at present handles the && case
         if there was no ELSE block.  */
      if (cond_exec_process_if_block (ce_info, TRUE))
	return TRUE;

      if (ce_info->num_multiple_test_blocks)
	{
	  cancel_changes (0);

	  if (cond_exec_process_if_block (ce_info, FALSE))
	    return TRUE;
	}
    }
Richard Henderson committed
2169 2170 2171 2172 2173 2174 2175

  return FALSE;
}

/* Merge the blocks and mark for local life update.  */

static void
2176
merge_if_block (struct ce_if_block * ce_info)
Richard Henderson committed
2177
{
2178 2179 2180 2181
  basic_block test_bb = ce_info->test_bb;	/* last test block */
  basic_block then_bb = ce_info->then_bb;	/* THEN */
  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
  basic_block join_bb = ce_info->join_bb;	/* join block */
Richard Henderson committed
2182 2183 2184 2185 2186 2187
  basic_block combo_bb;

  /* All block merging is done into the lower block numbers.  */

  combo_bb = test_bb;

2188 2189 2190 2191 2192 2193 2194
  /* Merge any basic blocks to handle && and || subtests.  Each of
     the blocks are on the fallthru path from the predecessor block.  */
  if (ce_info->num_multiple_test_blocks > 0)
    {
      basic_block bb = test_bb;
      basic_block last_test_bb = ce_info->last_test_bb;
      basic_block fallthru = block_fallthru (bb);
2195

2196 2197 2198 2199
      do
	{
	  bb = fallthru;
	  fallthru = block_fallthru (bb);
2200
	  merge_blocks (combo_bb, bb);
2201
	  num_true_changes++;
2202 2203 2204 2205 2206 2207 2208 2209
	}
      while (bb != last_test_bb);
    }

  /* Merge TEST block into THEN block.  Normally the THEN block won't have a
     label, but it might if there were || tests.  That label's count should be
     zero, and it normally should be removed.  */

2210 2211 2212 2213 2214
  if (then_bb)
    {
      if (combo_bb->global_live_at_end)
	COPY_REG_SET (combo_bb->global_live_at_end,
		      then_bb->global_live_at_end);
2215
      merge_blocks (combo_bb, then_bb);
2216
      num_true_changes++;
2217
    }
Richard Henderson committed
2218 2219 2220 2221 2222 2223

  /* The ELSE block, if it existed, had a label.  That label count
     will almost always be zero, but odd things can happen when labels
     get their addresses taken.  */
  if (else_bb)
    {
2224
      merge_blocks (combo_bb, else_bb);
2225
      num_true_changes++;
Richard Henderson committed
2226 2227 2228 2229 2230 2231 2232
    }

  /* If there was no join block reported, that means it was not adjacent
     to the others, and so we cannot merge them.  */

  if (! join_bb)
    {
2233
      rtx last = BB_END (combo_bb);
2234

Richard Henderson committed
2235 2236
      /* The outgoing edge for the current COMBO block should already
	 be correct.  Verify this.  */
2237
      if (EDGE_COUNT (combo_bb->succs) == 0)
2238 2239 2240 2241 2242
	gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
		    || (NONJUMP_INSN_P (last)
			&& GET_CODE (PATTERN (last)) == TRAP_IF
			&& (TRAP_CONDITION (PATTERN (last))
			    == const_true_rtx)));
Richard Henderson committed
2243

2244
      else
2245
      /* There should still be something at the end of the THEN or ELSE
Richard Henderson committed
2246
         blocks taking us to our final destination.  */
2247 2248 2249 2250 2251 2252
	gcc_assert (JUMP_P (last)
		    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
			&& CALL_P (last)
			&& SIBLING_CALL_P (last))
		    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
			&& can_throw_internal (last)));
Richard Henderson committed
2253 2254
    }

2255 2256 2257
  /* The JOIN block may have had quite a number of other predecessors too.
     Since we've already merged the TEST, THEN and ELSE blocks, we should
     have only one remaining edge from our if-then-else diamond.  If there
2258
     is more than one remaining edge, it must come from elsewhere.  There
2259
     may be zero incoming edges if the THEN block didn't actually join
2260
     back up (as with a call to a non-return function).  */
2261
  else if (EDGE_COUNT (join_bb->preds) < 2
2262
	   && join_bb != EXIT_BLOCK_PTR)
Richard Henderson committed
2263 2264
    {
      /* We can merge the JOIN.  */
2265
      if (combo_bb->global_live_at_end)
Richard Henderson committed
2266 2267
	COPY_REG_SET (combo_bb->global_live_at_end,
		      join_bb->global_live_at_end);
2268

2269
      merge_blocks (combo_bb, join_bb);
2270
      num_true_changes++;
Richard Henderson committed
2271 2272 2273 2274 2275 2276 2277
    }
  else
    {
      /* We cannot merge the JOIN.  */

      /* The outgoing edge for the current COMBO block should already
	 be correct.  Verify this.  */
2278 2279
      gcc_assert (single_succ_p (combo_bb)
		  && single_succ (combo_bb) == join_bb);
Richard Henderson committed
2280 2281

      /* Remove the jump and cruft from the end of the COMBO block.  */
2282
      if (join_bb != EXIT_BLOCK_PTR)
2283
	tidy_fallthru_edge (single_succ_edge (combo_bb));
Richard Henderson committed
2284 2285 2286 2287 2288
    }

  num_updated_if_blocks++;
}

2289 2290 2291 2292
/* Find a block ending in a simple IF condition and try to transform it
   in some way.  When converting a multi-block condition, put the new code
   in the first such block and delete the rest.  Return a pointer to this
   first block if some transformation was done.  Return NULL otherwise.  */
Richard Henderson committed
2293

2294
static basic_block
2295
find_if_header (basic_block test_bb, int pass)
Richard Henderson committed
2296
{
2297
  ce_if_block_t ce_info;
Richard Henderson committed
2298 2299 2300 2301
  edge then_edge;
  edge else_edge;

  /* The kind of block we're looking for has exactly two successors.  */
2302
  if (EDGE_COUNT (test_bb->succs) != 2)
2303
    return NULL;
Richard Henderson committed
2304

2305 2306 2307
  then_edge = EDGE_SUCC (test_bb, 0);
  else_edge = EDGE_SUCC (test_bb, 1);

Richard Henderson committed
2308 2309 2310
  /* Neither edge should be abnormal.  */
  if ((then_edge->flags & EDGE_COMPLEX)
      || (else_edge->flags & EDGE_COMPLEX))
2311
    return NULL;
Richard Henderson committed
2312

2313 2314 2315 2316 2317
  /* Nor exit the loop.  */
  if ((then_edge->flags & EDGE_LOOP_EXIT)
      || (else_edge->flags & EDGE_LOOP_EXIT))
    return NULL;

Richard Henderson committed
2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328
  /* The THEN edge is canonically the one that falls through.  */
  if (then_edge->flags & EDGE_FALLTHRU)
    ;
  else if (else_edge->flags & EDGE_FALLTHRU)
    {
      edge e = else_edge;
      else_edge = then_edge;
      then_edge = e;
    }
  else
    /* Otherwise this must be a multiway branch of some sort.  */
2329 2330
    return NULL;

2331
  memset (&ce_info, '\0', sizeof (ce_info));
2332 2333 2334 2335
  ce_info.test_bb = test_bb;
  ce_info.then_bb = then_edge->dest;
  ce_info.else_bb = else_edge->dest;
  ce_info.pass = pass;
Richard Henderson committed
2336

2337 2338 2339 2340 2341
#ifdef IFCVT_INIT_EXTRA_FIELDS
  IFCVT_INIT_EXTRA_FIELDS (&ce_info);
#endif

  if (find_if_block (&ce_info))
Richard Henderson committed
2342
    goto success;
2343

2344 2345 2346
  if (HAVE_trap && HAVE_conditional_trap
      && find_cond_trap (test_bb, then_edge, else_edge))
    goto success;
2347

2348
  if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
Richard Henderson committed
2349 2350 2351 2352 2353 2354 2355 2356
      && (! HAVE_conditional_execution || reload_completed))
    {
      if (find_if_case_1 (test_bb, then_edge, else_edge))
	goto success;
      if (find_if_case_2 (test_bb, then_edge, else_edge))
	goto success;
    }

2357
  return NULL;
Richard Henderson committed
2358 2359

 success:
2360 2361
  if (dump_file)
    fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
2362 2363 2364 2365 2366 2367 2368 2369 2370
  return ce_info.test_bb;
}

/* Return true if a block has two edges, one of which falls through to the next
   block, and the other jumps to a specific block, so that we can tell if the
   block is part of an && test or an || test.  Returns either -1 or the number
   of non-note, non-jump, non-USE/CLOBBER insns in the block.  */

static int
2371
block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2372 2373 2374 2375 2376 2377 2378
{
  edge cur_edge;
  int fallthru_p = FALSE;
  int jump_p = FALSE;
  rtx insn;
  rtx end;
  int n_insns = 0;
2379
  edge_iterator ei;
2380 2381 2382 2383 2384

  if (!cur_bb || !target_bb)
    return -1;

  /* If no edges, obviously it doesn't jump or fallthru.  */
2385
  if (EDGE_COUNT (cur_bb->succs) == 0)
2386 2387
    return FALSE;

2388
  FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410
    {
      if (cur_edge->flags & EDGE_COMPLEX)
	/* Anything complex isn't what we want.  */
	return -1;

      else if (cur_edge->flags & EDGE_FALLTHRU)
	fallthru_p = TRUE;

      else if (cur_edge->dest == target_bb)
	jump_p = TRUE;

      else
	return -1;
    }

  if ((jump_p & fallthru_p) == 0)
    return -1;

  /* Don't allow calls in the block, since this is used to group && and ||
     together for conditional execution support.  ??? we should support
     conditional execution support across calls for IA-64 some day, but
     for now it makes the code simpler.  */
2411 2412
  end = BB_END (cur_bb);
  insn = BB_HEAD (cur_bb);
2413 2414 2415

  while (insn != NULL_RTX)
    {
2416
      if (CALL_P (insn))
2417 2418 2419
	return -1;

      if (INSN_P (insn)
2420
	  && !JUMP_P (insn)
2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431
	  && GET_CODE (PATTERN (insn)) != USE
	  && GET_CODE (PATTERN (insn)) != CLOBBER)
	n_insns++;

      if (insn == end)
	break;

      insn = NEXT_INSN (insn);
    }

  return n_insns;
Richard Henderson committed
2432 2433 2434 2435
}

/* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
   block.  If so, we'll try to convert the insns to not require the branch.
2436
   Return TRUE if we were successful at converting the block.  */
Richard Henderson committed
2437 2438

static int
2439
find_if_block (struct ce_if_block * ce_info)
Richard Henderson committed
2440
{
2441 2442 2443
  basic_block test_bb = ce_info->test_bb;
  basic_block then_bb = ce_info->then_bb;
  basic_block else_bb = ce_info->else_bb;
Richard Henderson committed
2444
  basic_block join_bb = NULL_BLOCK;
2445
  edge cur_edge;
2446
  basic_block next;
2447
  edge_iterator ei;
Richard Henderson committed
2448

2449 2450 2451 2452 2453 2454
  ce_info->last_test_bb = test_bb;

  /* Discover if any fall through predecessors of the current test basic block
     were && tests (which jump to the else block) or || tests (which jump to
     the then block).  */
  if (HAVE_conditional_execution && reload_completed
2455 2456
      && single_pred_p (test_bb)
      && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
2457
    {
2458
      basic_block bb = single_pred (test_bb);
2459 2460 2461 2462
      basic_block target_bb;
      int max_insns = MAX_CONDITIONAL_EXECUTE;
      int n_insns;

2463
      /* Determine if the preceding block is an && or || block.  */
2464 2465 2466 2467 2468 2469 2470
      if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
	{
	  ce_info->and_and_p = TRUE;
	  target_bb = else_bb;
	}
      else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
	{
2471
	  ce_info->and_and_p = FALSE;
2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490
	  target_bb = then_bb;
	}
      else
	target_bb = NULL_BLOCK;

      if (target_bb && n_insns <= max_insns)
	{
	  int total_insns = 0;
	  int blocks = 0;

	  ce_info->last_test_bb = test_bb;

	  /* Found at least one && or || block, look for more.  */
	  do
	    {
	      ce_info->test_bb = test_bb = bb;
	      total_insns += n_insns;
	      blocks++;

2491
	      if (!single_pred_p (bb))
2492 2493
		break;

2494
	      bb = single_pred (bb);
2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508
	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
	    }
	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);

	  ce_info->num_multiple_test_blocks = blocks;
	  ce_info->num_multiple_test_insns = total_insns;

	  if (ce_info->and_and_p)
	    ce_info->num_and_and_blocks = blocks;
	  else
	    ce_info->num_or_or_blocks = blocks;
	}
    }

2509 2510 2511 2512 2513 2514
  /* The THEN block of an IF-THEN combo must have exactly one predecessor,
     other than any || blocks which jump to the THEN block.  */
  if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
    return FALSE;
    
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
2515
  FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
2516 2517 2518 2519 2520
    {
      if (cur_edge->flags & EDGE_COMPLEX)
	return FALSE;
    }

2521
  FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
2522 2523 2524 2525 2526
    {
      if (cur_edge->flags & EDGE_COMPLEX)
	return FALSE;
    }

2527
  /* The THEN block of an IF-THEN combo must have zero or one successors.  */
2528
  if (EDGE_COUNT (then_bb->succs) > 0
2529 2530
      && (!single_succ_p (then_bb)
          || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
2531
	  || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
Richard Henderson committed
2532 2533
    return FALSE;

2534 2535
  /* If the THEN block has no successors, conditional execution can still
     make a conditional call.  Don't do this unless the ELSE block has
2536 2537 2538
     only one incoming edge -- the CFG manipulation is too ugly otherwise.
     Check for the last insn of the THEN block being an indirect jump, which
     is listed as not having any successors, but confuses the rest of the CE
2539
     code processing.  ??? we should fix this in the future.  */
2540
  if (EDGE_COUNT (then_bb->succs) == 0)
2541
    {
2542
      if (single_pred_p (else_bb))
2543
	{
2544
	  rtx last_insn = BB_END (then_bb);
2545

2546
	  while (last_insn
2547
		 && NOTE_P (last_insn)
2548
		 && last_insn != BB_HEAD (then_bb))
2549
	    last_insn = PREV_INSN (last_insn);
2550

2551
	  if (last_insn
2552
	      && JUMP_P (last_insn)
2553 2554 2555
	      && ! simplejump_p (last_insn))
	    return FALSE;

2556 2557 2558 2559 2560 2561 2562
	  join_bb = else_bb;
	  else_bb = NULL_BLOCK;
	}
      else
	return FALSE;
    }

Richard Henderson committed
2563 2564
  /* If the THEN block's successor is the other edge out of the TEST block,
     then we have an IF-THEN combo without an ELSE.  */
2565
  else if (single_succ (then_bb) == else_bb)
Richard Henderson committed
2566 2567 2568 2569 2570 2571 2572 2573
    {
      join_bb = else_bb;
      else_bb = NULL_BLOCK;
    }

  /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
     has exactly one predecessor and one successor, and the outgoing edge
     is not complex, then we have an IF-THEN-ELSE combo.  */
2574 2575 2576 2577
  else if (single_succ_p (else_bb)
	   && single_succ (then_bb) == single_succ (else_bb)
	   && single_pred_p (else_bb)
	   && ! (single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
2578
	   && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2579
    join_bb = single_succ (else_bb);
Richard Henderson committed
2580 2581 2582

  /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
  else
2583
    return FALSE;
Richard Henderson committed
2584 2585 2586

  num_possible_if_blocks++;

2587
  if (dump_file)
Richard Henderson committed
2588
    {
2589 2590 2591
      fprintf (dump_file,
	       "\nIF-THEN%s block found, pass %d, start block %d "
	       "[insn %d], then %d [%d]",
2592 2593
	       (else_bb) ? "-ELSE" : "",
	       ce_info->pass,
2594 2595 2596 2597
	       test_bb->index,
	       BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
	       then_bb->index,
	       BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
Richard Henderson committed
2598

2599
      if (else_bb)
2600 2601 2602
	fprintf (dump_file, ", else %d [%d]",
		 else_bb->index,
		 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2603

2604 2605 2606
      fprintf (dump_file, ", join %d [%d]",
	       join_bb->index,
	       BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2607 2608

      if (ce_info->num_multiple_test_blocks > 0)
2609
	fprintf (dump_file, ", %d %s block%s last test %d [%d]",
2610 2611 2612 2613
		 ce_info->num_multiple_test_blocks,
		 (ce_info->and_and_p) ? "&&" : "||",
		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
		 ce_info->last_test_bb->index,
2614 2615
		 ((BB_HEAD (ce_info->last_test_bb))
		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2616 2617
		  : -1));

2618
      fputc ('\n', dump_file);
2619 2620 2621 2622 2623 2624 2625
    }

  /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
     first condition for free, since we've already asserted that there's a
     fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
     we checked the FALLTHRU flag, those are already adjacent to the last IF
     block.  */
2626
  /* ??? As an enhancement, move the ELSE block.  Have to deal with
2627
     BLOCK notes, if by no other means than backing out the merge if they
Richard Henderson committed
2628
     exist.  Sticky enough I don't want to think about it now.  */
2629 2630
  next = then_bb;
  if (else_bb && (next = next->next_bb) != else_bb)
Richard Henderson committed
2631
    return FALSE;
2632
  if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
Richard Henderson committed
2633 2634 2635 2636 2637 2638 2639 2640
    {
      if (else_bb)
	join_bb = NULL;
      else
	return FALSE;
    }

  /* Do the real work.  */
2641 2642 2643 2644
  ce_info->else_bb = else_bb;
  ce_info->join_bb = join_bb;

  return process_if_block (ce_info);
Richard Henderson committed
2645 2646
}

2647 2648
/* Convert a branch over a trap, or a branch
   to a trap, into a conditional trap.  */
2649 2650

static int
2651
find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2652
{
2653 2654 2655
  basic_block then_bb = then_edge->dest;
  basic_block else_bb = else_edge->dest;
  basic_block other_bb, trap_bb;
2656 2657 2658 2659 2660 2661
  rtx trap, jump, cond, cond_earliest, seq;
  enum rtx_code code;

  /* Locate the block with the trap instruction.  */
  /* ??? While we look for no successors, we really ought to allow
     EH successors.  Need to fix merge_if_block for that to work.  */
2662
  if ((trap = block_has_only_trap (then_bb)) != NULL)
2663
    trap_bb = then_bb, other_bb = else_bb;
2664
  else if ((trap = block_has_only_trap (else_bb)) != NULL)
2665
    trap_bb = else_bb, other_bb = then_bb;
2666 2667 2668
  else
    return FALSE;

2669
  if (dump_file)
2670
    {
2671
      fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2672
	       test_bb->index, trap_bb->index);
2673 2674 2675
    }

  /* If this is not a standard conditional jump, we can't parse it.  */
2676
  jump = BB_END (test_bb);
2677 2678 2679 2680
  cond = noce_get_condition (jump, &cond_earliest);
  if (! cond)
    return FALSE;

2681 2682
  /* If the conditional jump is more than just a conditional jump, then
     we can not do if-conversion on this block.  */
2683 2684 2685 2686 2687 2688 2689 2690 2691 2692 2693 2694 2695 2696 2697 2698 2699
  if (! onlyjump_p (jump))
    return FALSE;

  /* We must be comparing objects whose modes imply the size.  */
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
    return FALSE;

  /* Reverse the comparison code, if necessary.  */
  code = GET_CODE (cond);
  if (then_bb == trap_bb)
    {
      code = reversed_comparison_code (cond, jump);
      if (code == UNKNOWN)
	return FALSE;
    }

  /* Attempt to generate the conditional trap.  */
2700 2701
  seq = gen_cond_trap (code, XEXP (cond, 0),
		       XEXP (cond, 1),
2702 2703 2704 2705
		       TRAP_CODE (PATTERN (trap)));
  if (seq == NULL)
    return FALSE;

2706 2707
  num_true_changes++;

2708
  /* Emit the new insns before cond_earliest.  */
2709
  emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2710

2711 2712
  /* Delete the trap block if possible.  */
  remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2713
  if (EDGE_COUNT (trap_bb->preds) == 0)
2714
    delete_basic_block (trap_bb);
2715 2716 2717

  /* If the non-trap block and the test are now adjacent, merge them.
     Otherwise we must insert a direct branch.  */
2718
  if (test_bb->next_bb == other_bb)
2719
    {
2720
      struct ce_if_block new_ce_info;
2721
      delete_insn (jump);
2722
      memset (&new_ce_info, '\0', sizeof (new_ce_info));
2723 2724 2725 2726 2727
      new_ce_info.test_bb = test_bb;
      new_ce_info.then_bb = NULL;
      new_ce_info.else_bb = NULL;
      new_ce_info.join_bb = other_bb;
      merge_if_block (&new_ce_info);
2728
    }
2729
  else
2730 2731
    {
      rtx lab, newjump;
2732

2733 2734 2735 2736 2737 2738 2739 2740
      lab = JUMP_LABEL (jump);
      newjump = emit_jump_insn_after (gen_jump (lab), jump);
      LABEL_NUSES (lab) += 1;
      JUMP_LABEL (newjump) = lab;
      emit_barrier_after (newjump);

      delete_insn (jump);
    }
2741 2742 2743 2744

  return TRUE;
}

2745
/* Subroutine of find_cond_trap: if BB contains only a trap insn,
2746 2747 2748
   return it.  */

static rtx
2749
block_has_only_trap (basic_block bb)
2750 2751 2752 2753 2754 2755 2756 2757
{
  rtx trap;

  /* We're not the exit block.  */
  if (bb == EXIT_BLOCK_PTR)
    return NULL_RTX;

  /* The block must have no successors.  */
2758
  if (EDGE_COUNT (bb->succs) > 0)
2759 2760 2761 2762
    return NULL_RTX;

  /* The only instruction in the THEN block must be the trap.  */
  trap = first_active_insn (bb);
2763
  if (! (trap == BB_END (bb)
2764 2765 2766 2767 2768 2769 2770
	 && GET_CODE (PATTERN (trap)) == TRAP_IF
         && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
    return NULL_RTX;

  return trap;
}

Richard Henderson committed
2771 2772 2773 2774
/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
   transformable, but not necessarily the other.  There need be no
   JOIN block.

2775
   Return TRUE if we were successful at converting the block.
Richard Henderson committed
2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848

   Cases we'd like to look at:

   (1)
	if (test) goto over; // x not live
	x = a;
	goto label;
	over:

   becomes

	x = a;
	if (! test) goto label;

   (2)
	if (test) goto E; // x not live
	x = big();
	goto L;
	E:
	x = b;
	goto M;

   becomes

	x = b;
	if (test) goto M;
	x = big();
	goto L;

   (3) // This one's really only interesting for targets that can do
       // multiway branching, e.g. IA-64 BBB bundles.  For other targets
       // it results in multiple branches on a cache line, which often
       // does not sit well with predictors.

	if (test1) goto E; // predicted not taken
	x = a;
	if (test2) goto F;
	...
	E:
	x = b;
	J:

   becomes

	x = a;
	if (test1) goto E;
	if (test2) goto F;

   Notes:

   (A) Don't do (2) if the branch is predicted against the block we're
   eliminating.  Do it anyway if we can eliminate a branch; this requires
   that the sole successor of the eliminated block postdominate the other
   side of the if.

   (B) With CE, on (3) we can steal from both sides of the if, creating

	if (test1) x = a;
	if (!test1) x = b;
	if (test1) goto J;
	if (test2) goto F;
	...
	J:

   Again, this is most useful if J postdominates.

   (C) CE substitutes for helpful life information.

   (D) These heuristics need a lot of work.  */

/* Tests for case 1 above.  */

static int
2849
find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
Richard Henderson committed
2850 2851
{
  basic_block then_bb = then_edge->dest;
2852
  basic_block else_bb = else_edge->dest, new_bb;
2853
  int then_bb_index;
Richard Henderson committed
2854

2855 2856
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
2857
     and cold sections.
2858
  
2859 2860 2861 2862 2863 2864
     Basic block partitioning may result in some jumps that appear to
     be optimizable (or blocks that appear to be mergeable), but which really 
     must be left untouched (they are required to make it safely across 
     partition boundaries).  See  the comments at the top of 
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */

2865 2866 2867 2868 2869 2870 2871
  if ((BB_END (then_bb) 
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
      || (BB_END (test_bb)
	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
      || (BB_END (else_bb)
	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP, 
			    NULL_RTX)))
2872 2873
    return FALSE;

Richard Henderson committed
2874
  /* THEN has one successor.  */
2875
  if (!single_succ_p (then_bb))
Richard Henderson committed
2876 2877 2878
    return FALSE;

  /* THEN does not fall through, but is not strange either.  */
2879
  if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
Richard Henderson committed
2880 2881 2882
    return FALSE;

  /* THEN has one predecessor.  */
2883
  if (!single_pred_p (then_bb))
Richard Henderson committed
2884 2885
    return FALSE;

2886 2887
  /* THEN must do something.  */
  if (forwarder_block_p (then_bb))
Richard Henderson committed
2888 2889 2890
    return FALSE;

  num_possible_if_blocks++;
2891 2892
  if (dump_file)
    fprintf (dump_file,
Richard Henderson committed
2893
	     "\nIF-CASE-1 found, start %d, then %d\n",
2894
	     test_bb->index, then_bb->index);
Richard Henderson committed
2895 2896

  /* THEN is small.  */
2897
  if (! cheap_bb_rtx_cost_p (then_bb, COSTS_N_INSNS (BRANCH_COST)))
Richard Henderson committed
2898 2899 2900
    return FALSE;

  /* Registers set are dead, or are predicable.  */
2901
  if (! dead_or_predicable (test_bb, then_bb, else_bb,
2902
			    single_succ (then_bb), 1))
Richard Henderson committed
2903 2904 2905 2906 2907
    return FALSE;

  /* Conversion went ok, including moving the insns and fixing up the
     jump.  Adjust the CFG to match.  */

2908 2909 2910
  bitmap_ior (test_bb->global_live_at_end,
	      else_bb->global_live_at_start,
	      then_bb->global_live_at_end);
2911

2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927

  /* We can avoid creating a new basic block if then_bb is immediately
     followed by else_bb, i.e. deleting then_bb allows test_bb to fall
     thru to else_bb.  */

  if (then_bb->next_bb == else_bb
      && then_bb->prev_bb == test_bb
      && else_bb != EXIT_BLOCK_PTR)
    {
      redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
      new_bb = 0;
    }
  else
    new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
                                             else_bb);

2928
  then_bb_index = then_bb->index;
2929
  delete_basic_block (then_bb);
2930

2931
  /* Make rest of code believe that the newly created block is the THEN_BB
2932
     block we removed.  */
2933
  if (new_bb)
2934 2935 2936
    {
      new_bb->index = then_bb_index;
      BASIC_BLOCK (then_bb_index) = new_bb;
2937 2938 2939
      /* Since the fallthru edge was redirected from test_bb to new_bb,
         we need to ensure that new_bb is in the same partition as
         test bb (you can not fall through across section boundaries).  */
2940
      BB_COPY_PARTITION (new_bb, test_bb);
2941
    }
2942 2943
  /* We've possibly created jump to next insn, cleanup_cfg will solve that
     later.  */
Richard Henderson committed
2944

2945
  num_true_changes++;
Richard Henderson committed
2946 2947 2948 2949 2950 2951 2952 2953
  num_updated_if_blocks++;

  return TRUE;
}

/* Test for case 2 above.  */

static int
2954
find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
Richard Henderson committed
2955 2956 2957
{
  basic_block then_bb = then_edge->dest;
  basic_block else_bb = else_edge->dest;
2958
  edge else_succ;
2959
  rtx note;
Richard Henderson committed
2960

2961 2962
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
2963
     and cold sections.
2964
  
2965 2966 2967 2968 2969 2970
     Basic block partitioning may result in some jumps that appear to
     be optimizable (or blocks that appear to be mergeable), but which really 
     must be left untouched (they are required to make it safely across 
     partition boundaries).  See  the comments at the top of 
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */

2971 2972 2973 2974 2975 2976 2977
  if ((BB_END (then_bb)
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
      || (BB_END (test_bb)
	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
      || (BB_END (else_bb) 
	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP, 
			    NULL_RTX)))
2978 2979
    return FALSE;

Richard Henderson committed
2980
  /* ELSE has one successor.  */
2981
  if (!single_succ_p (else_bb))
Richard Henderson committed
2982
    return FALSE;
2983
  else
2984
    else_succ = single_succ_edge (else_bb);
Richard Henderson committed
2985 2986 2987 2988 2989 2990

  /* ELSE outgoing edge is not complex.  */
  if (else_succ->flags & EDGE_COMPLEX)
    return FALSE;

  /* ELSE has one predecessor.  */
2991
  if (!single_pred_p (else_bb))
Richard Henderson committed
2992 2993
    return FALSE;

2994
  /* THEN is not EXIT.  */
2995
  if (then_bb->index < 0)
2996 2997
    return FALSE;

Richard Henderson committed
2998
  /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
2999
  note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
Richard Henderson committed
3000 3001
  if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
    ;
3002
  else if (else_succ->dest->index < 0
3003
	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3004
			      else_succ->dest))
Richard Henderson committed
3005 3006 3007 3008 3009
    ;
  else
    return FALSE;

  num_possible_if_blocks++;
3010 3011
  if (dump_file)
    fprintf (dump_file,
Richard Henderson committed
3012
	     "\nIF-CASE-2 found, start %d, else %d\n",
3013
	     test_bb->index, else_bb->index);
Richard Henderson committed
3014 3015

  /* ELSE is small.  */
3016
  if (! cheap_bb_rtx_cost_p (else_bb, COSTS_N_INSNS (BRANCH_COST)))
Richard Henderson committed
3017 3018 3019
    return FALSE;

  /* Registers set are dead, or are predicable.  */
3020
  if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
Richard Henderson committed
3021 3022 3023 3024 3025
    return FALSE;

  /* Conversion went ok, including moving the insns and fixing up the
     jump.  Adjust the CFG to match.  */

3026 3027 3028
  bitmap_ior (test_bb->global_live_at_end,
	      then_bb->global_live_at_start,
	      else_bb->global_live_at_end);
3029

3030
  delete_basic_block (else_bb);
Richard Henderson committed
3031

3032
  num_true_changes++;
Richard Henderson committed
3033 3034 3035 3036 3037 3038 3039 3040 3041 3042 3043 3044
  num_updated_if_blocks++;

  /* ??? We may now fallthru from one of THEN's successors into a join
     block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */

  return TRUE;
}

/* A subroutine of dead_or_predicable called through for_each_rtx.
   Return 1 if a memory is found.  */

static int
3045
find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
Richard Henderson committed
3046
{
3047
  return MEM_P (*px);
Richard Henderson committed
3048 3049 3050 3051 3052 3053 3054 3055 3056 3057 3058
}

/* Used by the code above to perform the actual rtl transformations.
   Return TRUE if successful.

   TEST_BB is the block containing the conditional branch.  MERGE_BB
   is the block containing the code to manipulate.  NEW_DEST is the
   label TEST_BB should be branching to after the conversion.
   REVERSEP is true if the sense of the branch should be reversed.  */

static int
3059 3060
dead_or_predicable (basic_block test_bb, basic_block merge_bb,
		    basic_block other_bb, basic_block new_dest, int reversep)
Richard Henderson committed
3061
{
3062
  rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
Richard Henderson committed
3063

3064
  jump = BB_END (test_bb);
Richard Henderson committed
3065 3066

  /* Find the extent of the real code in the merge block.  */
3067 3068
  head = BB_HEAD (merge_bb);
  end = BB_END (merge_bb);
Richard Henderson committed
3069

3070
  if (LABEL_P (head))
Richard Henderson committed
3071
    head = NEXT_INSN (head);
3072
  if (NOTE_P (head))
Richard Henderson committed
3073 3074 3075 3076 3077 3078 3079 3080 3081
    {
      if (head == end)
	{
	  head = end = NULL_RTX;
	  goto no_body;
	}
      head = NEXT_INSN (head);
    }

3082
  if (JUMP_P (end))
Richard Henderson committed
3083 3084 3085 3086 3087 3088 3089 3090 3091
    {
      if (head == end)
	{
	  head = end = NULL_RTX;
	  goto no_body;
	}
      end = PREV_INSN (end);
    }

3092 3093 3094
  /* Disable handling dead code by conditional execution if the machine needs
     to do anything funny with the tests, etc.  */
#ifndef IFCVT_MODIFY_TESTS
Richard Henderson committed
3095 3096 3097
  if (HAVE_conditional_execution)
    {
      /* In the conditional execution case, we have things easy.  We know
3098 3099
	 the condition is reversible.  We don't have to check life info
	 because we're going to conditionally execute the code anyway.
Richard Henderson committed
3100 3101 3102
	 All that's left is making sure the insns involved can actually
	 be predicated.  */

3103
      rtx cond, prob_val;
Richard Henderson committed
3104 3105

      cond = cond_exec_get_condition (jump);
3106 3107
      if (! cond)
	return FALSE;
3108 3109 3110 3111 3112

      prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
      if (prob_val)
	prob_val = XEXP (prob_val, 0);

Richard Henderson committed
3113
      if (reversep)
3114
	{
3115 3116
	  enum rtx_code rev = reversed_comparison_code (cond, jump);
	  if (rev == UNKNOWN)
3117
	    return FALSE;
3118
	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3119 3120 3121 3122
			         XEXP (cond, 1));
	  if (prob_val)
	    prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
	}
Richard Henderson committed
3123

3124 3125
      if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
				     prob_val, 0))
Richard Henderson committed
3126 3127 3128 3129 3130
	goto cancel;

      earliest = jump;
    }
  else
3131
#endif
Richard Henderson committed
3132 3133 3134 3135 3136 3137 3138 3139
    {
      /* In the non-conditional execution case, we have to verify that there
	 are no trapping operations, no calls, no references to memory, and
	 that any registers modified are dead at the branch site.  */

      rtx insn, cond, prev;
      regset merge_set, tmp, test_live, test_set;
      struct propagate_block_info *pbi;
3140
      unsigned i, fail = 0;
3141
      bitmap_iterator bi;
Richard Henderson committed
3142 3143 3144 3145

      /* Check for no calls or trapping operations.  */
      for (insn = head; ; insn = NEXT_INSN (insn))
	{
3146
	  if (CALL_P (insn))
Richard Henderson committed
3147 3148 3149 3150 3151 3152 3153 3154 3155
	    return FALSE;
	  if (INSN_P (insn))
	    {
	      if (may_trap_p (PATTERN (insn)))
		return FALSE;

	      /* ??? Even non-trapping memories such as stack frame
		 references must be avoided.  For stores, we collect
		 no lifetime info; for reads, we'd have to assert
3156
		 true_dependence false against every store in the
Richard Henderson committed
3157 3158 3159 3160 3161 3162 3163 3164
		 TEST range.  */
	      if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
		return FALSE;
	    }
	  if (insn == end)
	    break;
	}

3165
      if (! any_condjump_p (jump))
Richard Henderson committed
3166 3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177 3178
	return FALSE;

      /* Find the extent of the conditional.  */
      cond = noce_get_condition (jump, &earliest);
      if (! cond)
	return FALSE;

      /* Collect:
	   MERGE_SET = set of registers set in MERGE_BB
	   TEST_LIVE = set of registers live at EARLIEST
	   TEST_SET  = set of registers set between EARLIEST and the
		       end of the block.  */

3179 3180 3181 3182
      tmp = ALLOC_REG_SET (&reg_obstack);
      merge_set = ALLOC_REG_SET (&reg_obstack);
      test_live = ALLOC_REG_SET (&reg_obstack);
      test_set = ALLOC_REG_SET (&reg_obstack);
Richard Henderson committed
3183 3184

      /* ??? bb->local_set is only valid during calculate_global_regs_live,
3185
	 so we must recompute usage for MERGE_BB.  Not so bad, I suppose,
Richard Henderson committed
3186
         since we've already asserted that MERGE_BB is small.  */
3187
      propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
Richard Henderson committed
3188 3189 3190 3191 3192

      /* For small register class machines, don't lengthen lifetimes of
	 hard registers before reload.  */
      if (SMALL_REGISTER_CLASSES && ! reload_completed)
	{
3193 3194 3195 3196 3197
          EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
	    {
	      if (i < FIRST_PSEUDO_REGISTER
		  && ! fixed_regs[i]
		  && ! global_regs[i])
Richard Henderson committed
3198
		fail = 1;
3199
	    }
Richard Henderson committed
3200 3201 3202 3203 3204 3205
	}

      /* For TEST, we're interested in a range of insns, not a whole block.
	 Moreover, we're interested in the insns live from OTHER_BB.  */

      COPY_REG_SET (test_live, other_bb->global_live_at_start);
3206 3207
      pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
				       0);
Richard Henderson committed
3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223

      for (insn = jump; ; insn = prev)
	{
	  prev = propagate_one_insn (pbi, insn);
	  if (insn == earliest)
	    break;
	}

      free_propagate_block_info (pbi);

      /* We can perform the transformation if
	   MERGE_SET & (TEST_SET | TEST_LIVE)
	 and
	   TEST_SET & merge_bb->global_live_at_start
	 are empty.  */

3224 3225 3226
      if (bitmap_intersect_p (test_set, merge_set)
	  || bitmap_intersect_p (test_live, merge_set)
	  || bitmap_intersect_p (test_set, merge_bb->global_live_at_start))
3227
	fail = 1;
Richard Henderson committed
3228 3229 3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240 3241 3242 3243

      FREE_REG_SET (tmp);
      FREE_REG_SET (merge_set);
      FREE_REG_SET (test_live);
      FREE_REG_SET (test_set);

      if (fail)
	return FALSE;
    }

 no_body:
  /* We don't want to use normal invert_jump or redirect_jump because
     we don't want to delete_insn called.  Also, we want to do our own
     change group management.  */

  old_dest = JUMP_LABEL (jump);
3244 3245 3246 3247 3248 3249 3250 3251
  if (other_bb != new_dest)
    {
      new_label = block_label (new_dest);
      if (reversep
	  ? ! invert_jump_1 (jump, new_label)
	  : ! redirect_jump_1 (jump, new_label))
	goto cancel;
    }
Richard Henderson committed
3252 3253 3254 3255

  if (! apply_change_group ())
    return FALSE;

3256
  if (other_bb != new_dest)
3257
    {
3258
      redirect_jump_2 (jump, old_dest, new_label, -1, reversep);
3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269 3270 3271 3272

      redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
      if (reversep)
	{
	  gcov_type count, probability;
	  count = BRANCH_EDGE (test_bb)->count;
	  BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
	  FALLTHRU_EDGE (test_bb)->count = count;
	  probability = BRANCH_EDGE (test_bb)->probability;
	  BRANCH_EDGE (test_bb)->probability
	    = FALLTHRU_EDGE (test_bb)->probability;
	  FALLTHRU_EDGE (test_bb)->probability = probability;
	  update_br_prob_note (test_bb);
	}
3273 3274
    }

Richard Henderson committed
3275 3276 3277
  /* Move the insns out of MERGE_BB to before the branch.  */
  if (head != NULL)
    {
3278 3279
      if (end == BB_END (merge_bb))
	BB_END (merge_bb) = PREV_INSN (head);
3280

3281 3282
      if (squeeze_notes (&head, &end))
	return TRUE;
3283

Richard Henderson committed
3284 3285
      reorder_insns (head, end, PREV_INSN (earliest));
    }
3286 3287 3288 3289 3290 3291 3292 3293 3294 3295

  /* Remove the jump and edge if we can.  */
  if (other_bb == new_dest)
    {
      delete_insn (jump);
      remove_edge (BRANCH_EDGE (test_bb));
      /* ??? Can't merge blocks here, as then_bb is still in use.
	 At minimum, the merge will get done just before bb-reorder.  */
    }

Richard Henderson committed
3296 3297 3298 3299 3300 3301 3302 3303 3304 3305
  return TRUE;

 cancel:
  cancel_changes (0);
  return FALSE;
}

/* Main entry point for all if-conversion.  */

void
3306
if_convert (int x_life_data_ok)
Richard Henderson committed
3307
{
3308
  basic_block bb;
3309
  int pass;
Richard Henderson committed
3310 3311 3312

  num_possible_if_blocks = 0;
  num_updated_if_blocks = 0;
3313
  num_true_changes = 0;
3314
  life_data_ok = (x_life_data_ok != 0);
Richard Henderson committed
3315

3316
  if ((! targetm.cannot_modify_jumps_p ())
3317 3318
      && (!flag_reorder_blocks_and_partition || !no_new_pseudos
	  || !targetm.have_named_sections))
3319 3320 3321 3322 3323 3324 3325 3326
    {
      struct loops loops;

      flow_loops_find (&loops);
      mark_loop_exit_edges (&loops);
      flow_loops_free (&loops);
      free_dominance_info (CDI_DOMINATORS);
    }
3327

Richard Henderson committed
3328 3329
  /* Compute postdominators if we think we'll use them.  */
  if (HAVE_conditional_execution || life_data_ok)
3330 3331
    calculate_dominance_info (CDI_POST_DOMINATORS);

3332 3333
  if (life_data_ok)
    clear_bb_flags ();
Richard Henderson committed
3334

3335 3336 3337 3338 3339 3340 3341 3342 3343 3344
  /* Go through each of the basic blocks looking for things to convert.  If we
     have conditional execution, we make multiple passes to allow us to handle
     IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
  pass = 0;
  do
    {
      cond_exec_changed_p = FALSE;
      pass++;

#ifdef IFCVT_MULTIPLE_DUMPS
3345 3346
      if (dump_file && pass > 1)
	fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
3347 3348 3349 3350
#endif

      FOR_EACH_BB (bb)
	{
3351 3352
	  basic_block new_bb;
	  while ((new_bb = find_if_header (bb, pass)))
3353 3354 3355 3356
	    bb = new_bb;
	}

#ifdef IFCVT_MULTIPLE_DUMPS
3357 3358
      if (dump_file && cond_exec_changed_p)
	print_rtl_with_bb (dump_file, get_insns ());
3359 3360 3361 3362 3363
#endif
    }
  while (cond_exec_changed_p);

#ifdef IFCVT_MULTIPLE_DUMPS
3364 3365
  if (dump_file)
    fprintf (dump_file, "\n\n========== no more changes\n");
3366
#endif
Richard Henderson committed
3367

3368
  free_dominance_info (CDI_POST_DOMINATORS);
Richard Henderson committed
3369

3370 3371
  if (dump_file)
    fflush (dump_file);
Richard Henderson committed
3372

3373 3374
  clear_aux_for_blocks ();

Richard Henderson committed
3375
  /* Rebuild life info for basic blocks that require it.  */
3376
  if (num_true_changes && life_data_ok)
Richard Henderson committed
3377 3378 3379 3380 3381 3382 3383
    {
      /* If we allocated new pseudos, we must resize the array for sched1.  */
      if (max_regno < max_reg_num ())
	{
	  max_regno = max_reg_num ();
	  allocate_reg_info (max_regno, FALSE, FALSE);
	}
3384 3385 3386
      update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
					PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
					| PROP_KILL_DEAD_CODE);
Richard Henderson committed
3387 3388 3389
    }

  /* Write the final stats.  */
3390
  if (dump_file && num_possible_if_blocks > 0)
Richard Henderson committed
3391
    {
3392
      fprintf (dump_file,
Richard Henderson committed
3393 3394
	       "\n%d possible IF blocks searched.\n",
	       num_possible_if_blocks);
3395
      fprintf (dump_file,
Richard Henderson committed
3396 3397
	       "%d IF blocks converted.\n",
	       num_updated_if_blocks);
3398
      fprintf (dump_file,
3399 3400
	       "%d true changes made.\n\n\n",
	       num_true_changes);
Richard Henderson committed
3401 3402
    }

3403
#ifdef ENABLE_CHECKING
3404
  verify_flow_info ();
3405
#endif
Richard Henderson committed
3406
}