ifcvt.c 122 KB
Newer Older
Richard Henderson committed
1
/* If-conversion support.
2 3
   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010,
   2011
4
   Free Software Foundation, Inc.
Richard Henderson committed
5

6
   This file is part of GCC.
Richard Henderson committed
7

8 9
   GCC is free software; you can redistribute it and/or modify it
   under the terms of the GNU General Public License as published by
10
   the Free Software Foundation; either version 3, or (at your option)
Richard Henderson committed
11 12
   any later version.

13 14 15 16
   GCC is distributed in the hope that it will be useful, but WITHOUT
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
   License for more details.
Richard Henderson committed
17 18

   You should have received a copy of the GNU General Public License
19 20
   along with GCC; see the file COPYING3.  If not see
   <http://www.gnu.org/licenses/>.  */
Richard Henderson committed
21 22 23

#include "config.h"
#include "system.h"
24 25
#include "coretypes.h"
#include "tm.h"
Richard Henderson committed
26 27 28 29 30 31 32

#include "rtl.h"
#include "regs.h"
#include "function.h"
#include "flags.h"
#include "insn-config.h"
#include "recog.h"
33
#include "except.h"
34
#include "hard-reg-set.h"
Richard Henderson committed
35 36 37
#include "basic-block.h"
#include "expr.h"
#include "output.h"
38
#include "optabs.h"
39
#include "diagnostic-core.h"
Richard Henderson committed
40
#include "tm_p.h"
41
#include "cfgloop.h"
42
#include "target.h"
43 44
#include "timevar.h"
#include "tree-pass.h"
45
#include "df.h"
46 47
#include "vec.h"
#include "vecprim.h"
48
#include "dbgcnt.h"
Richard Henderson committed
49 50 51 52 53 54 55 56 57 58

#ifndef HAVE_conditional_move
#define HAVE_conditional_move 0
#endif
#ifndef HAVE_incscc
#define HAVE_incscc 0
#endif
#ifndef HAVE_decscc
#define HAVE_decscc 0
#endif
59 60 61
#ifndef HAVE_trap
#define HAVE_trap 0
#endif
Richard Henderson committed
62 63

#ifndef MAX_CONDITIONAL_EXECUTE
64 65 66
#define MAX_CONDITIONAL_EXECUTE \
  (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
   + 1)
Richard Henderson committed
67 68
#endif

69 70
#define IFCVT_MULTIPLE_DUMPS 1

71
#define NULL_BLOCK	((basic_block) NULL)
Richard Henderson committed
72 73 74 75 76 77 78 79

/* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
static int num_possible_if_blocks;

/* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
   execution.  */
static int num_updated_if_blocks;

80
/* # of changes made.  */
81
static int num_true_changes;
Richard Henderson committed
82

83 84 85
/* Whether conditional execution changes were made.  */
static int cond_exec_changed_p;

Richard Henderson committed
86
/* Forward references.  */
87 88
static int count_bb_insns (const_basic_block);
static bool cheap_bb_rtx_cost_p (const_basic_block, int);
89 90
static rtx first_active_insn (basic_block);
static rtx last_active_insn (basic_block, int);
91 92
static rtx find_active_insn_before (basic_block, rtx);
static rtx find_active_insn_after (basic_block, rtx);
93 94 95
static basic_block block_fallthru (basic_block);
static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
static rtx cond_exec_get_condition (rtx);
96
static rtx noce_get_condition (rtx, rtx *, bool);
97
static int noce_operand_ok (const_rtx);
98 99 100 101
static void merge_if_block (ce_if_block_t *);
static int find_cond_trap (basic_block, edge, edge);
static basic_block find_if_header (basic_block, int);
static int block_jumps_and_fallthru_p (basic_block, basic_block);
102 103
static int noce_find_if_block (basic_block, edge, edge, int);
static int cond_exec_find_if_block (ce_if_block_t *);
104 105 106 107 108 109
static int find_if_case_1 (basic_block, edge, edge);
static int find_if_case_2 (basic_block, edge, edge);
static int dead_or_predicable (basic_block, basic_block, basic_block,
			       basic_block, int);
static void noce_emit_move_insn (rtx, rtx);
static rtx block_has_only_trap (basic_block);
Richard Henderson committed
110 111 112 113

/* Count the number of non-jump active insns in BB.  */

static int
114
count_bb_insns (const_basic_block bb)
Richard Henderson committed
115 116
{
  int count = 0;
117
  rtx insn = BB_HEAD (bb);
Richard Henderson committed
118 119 120

  while (1)
    {
121
      if (CALL_P (insn) || NONJUMP_INSN_P (insn))
Richard Henderson committed
122 123
	count++;

124
      if (insn == BB_END (bb))
Richard Henderson committed
125 126 127 128 129 130 131
	break;
      insn = NEXT_INSN (insn);
    }

  return count;
}

132 133 134
/* Determine whether the total insn_rtx_cost on non-jump insns in
   basic block BB is less than MAX_COST.  This function returns
   false if the cost of any instruction could not be estimated.  */
135

136
static bool
137
cheap_bb_rtx_cost_p (const_basic_block bb, int max_cost)
138 139 140
{
  int count = 0;
  rtx insn = BB_HEAD (bb);
141
  bool speed = optimize_bb_for_speed_p (bb);
142 143 144

  while (1)
    {
145 146
      if (NONJUMP_INSN_P (insn))
	{
147
	  int cost = insn_rtx_cost (PATTERN (insn), speed);
148
	  if (cost == 0)
149 150 151 152
	    return false;

	  /* If this instruction is the load or set of a "stack" register,
	     such as a floating point register on x87, then the cost of
153 154 155 156 157
	     speculatively executing this insn may need to include
	     the additional cost of popping its result off of the
	     register stack.  Unfortunately, correctly recognizing and
	     accounting for this additional overhead is tricky, so for
	     now we simply prohibit such speculative execution.  */
158 159 160 161
#ifdef STACK_REGS
	  {
	    rtx set = single_set (insn);
	    if (set && STACK_REG_P (SET_DEST (set)))
162
	      return false;
163 164 165
	  }
#endif

166
	  count += cost;
167 168
	  if (count >= max_cost)
	    return false;
169 170
	}
      else if (CALL_P (insn))
171
	return false;
172

173 174 175 176 177
      if (insn == BB_END (bb))
	break;
      insn = NEXT_INSN (insn);
    }

178
  return true;
179 180
}

Richard Henderson committed
181 182 183
/* Return the first non-jump active insn in the basic block.  */

static rtx
184
first_active_insn (basic_block bb)
Richard Henderson committed
185
{
186
  rtx insn = BB_HEAD (bb);
Richard Henderson committed
187

188
  if (LABEL_P (insn))
Richard Henderson committed
189
    {
190
      if (insn == BB_END (bb))
Richard Henderson committed
191 192 193 194
	return NULL_RTX;
      insn = NEXT_INSN (insn);
    }

195
  while (NOTE_P (insn) || DEBUG_INSN_P (insn))
Richard Henderson committed
196
    {
197
      if (insn == BB_END (bb))
Richard Henderson committed
198 199 200 201
	return NULL_RTX;
      insn = NEXT_INSN (insn);
    }

202
  if (JUMP_P (insn))
Richard Henderson committed
203 204 205 206 207
    return NULL_RTX;

  return insn;
}

208
/* Return the last non-jump active (non-jump) insn in the basic block.  */
Richard Henderson committed
209

210
static rtx
211
last_active_insn (basic_block bb, int skip_use_p)
Richard Henderson committed
212
{
213 214
  rtx insn = BB_END (bb);
  rtx head = BB_HEAD (bb);
215

216 217
  while (NOTE_P (insn)
	 || JUMP_P (insn)
218
	 || DEBUG_INSN_P (insn)
219
	 || (skip_use_p
220
	     && NONJUMP_INSN_P (insn)
221
	     && GET_CODE (PATTERN (insn)) == USE))
Richard Henderson committed
222
    {
223 224 225
      if (insn == head)
	return NULL_RTX;
      insn = PREV_INSN (insn);
Richard Henderson committed
226 227
    }

228
  if (LABEL_P (insn))
229 230 231
    return NULL_RTX;

  return insn;
Richard Henderson committed
232
}
233

234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
/* Return the active insn before INSN inside basic block CURR_BB. */

static rtx
find_active_insn_before (basic_block curr_bb, rtx insn)
{
  if (!insn || insn == BB_HEAD (curr_bb))
    return NULL_RTX;

  while ((insn = PREV_INSN (insn)) != NULL_RTX)
    {
      if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
        break;

      /* No other active insn all the way to the start of the basic block. */
      if (insn == BB_HEAD (curr_bb))
        return NULL_RTX;
    }

  return insn;
}

/* Return the active insn after INSN inside basic block CURR_BB. */

static rtx
find_active_insn_after (basic_block curr_bb, rtx insn)
{
  if (!insn || insn == BB_END (curr_bb))
    return NULL_RTX;

  while ((insn = NEXT_INSN (insn)) != NULL_RTX)
    {
      if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
        break;

      /* No other active insn all the way to the end of the basic block. */
      if (insn == BB_END (curr_bb))
        return NULL_RTX;
    }

  return insn;
}

276
/* Return the basic block reached by falling though the basic block BB.  */
277 278

static basic_block
279
block_fallthru (basic_block bb)
280
{
281
  edge e = find_fallthru_edge (bb->succs);
282 283 284

  return (e) ? e->dest : NULL_BLOCK;
}
Richard Henderson committed
285 286 287 288 289 290

/* Go through a bunch of insns, converting them to conditional
   execution format if possible.  Return TRUE if all of the non-note
   insns were processed.  */

static int
291 292 293 294 295 296
cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
			 /* if block information */rtx start,
			 /* first insn to look at */rtx end,
			 /* last insn to look at */rtx test,
			 /* conditional execution test */rtx prob_val,
			 /* probability of branch taken. */int mod_ok)
Richard Henderson committed
297 298 299
{
  int must_be_last = FALSE;
  rtx insn;
300
  rtx xtest;
301
  rtx pattern;
Richard Henderson committed
302

303 304 305
  if (!start || !end)
    return FALSE;

Richard Henderson committed
306 307
  for (insn = start; ; insn = NEXT_INSN (insn))
    {
308 309 310 311
      /* dwarf2out can't cope with conditional prologues.  */
      if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
	return FALSE;

312
      if (NOTE_P (insn) || DEBUG_INSN_P (insn))
Richard Henderson committed
313 314
	goto insn_done;

315
      gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
Richard Henderson committed
316

317 318
      /* Remove USE insns that get in the way.  */
      if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
319
	{
320
	  /* ??? Ug.  Actually unlinking the thing is problematic,
321
	     given what we'd have to coordinate with our callers.  */
322
	  SET_INSN_DELETED (insn);
323 324 325
	  goto insn_done;
	}

Richard Henderson committed
326 327 328 329 330 331 332 333 334 335 336 337
      /* Last insn wasn't last?  */
      if (must_be_last)
	return FALSE;

      if (modified_in_p (test, insn))
	{
	  if (!mod_ok)
	    return FALSE;
	  must_be_last = TRUE;
	}

      /* Now build the conditional form of the instruction.  */
338
      pattern = PATTERN (insn);
339 340 341 342 343 344 345 346 347 348 349 350 351 352 353
      xtest = copy_rtx (test);

      /* If this is already a COND_EXEC, rewrite the test to be an AND of the
         two conditions.  */
      if (GET_CODE (pattern) == COND_EXEC)
	{
	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
	    return FALSE;

	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
			       COND_EXEC_TEST (pattern));
	  pattern = COND_EXEC_CODE (pattern);
	}

      pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
354 355 356 357 358

      /* If the machine needs to modify the insn being conditionally executed,
         say for example to force a constant integer operand into a temp
         register, do so here.  */
#ifdef IFCVT_MODIFY_INSN
359
      IFCVT_MODIFY_INSN (ce_info, pattern, insn);
360 361 362 363
      if (! pattern)
	return FALSE;
#endif

364
      validate_change (insn, &PATTERN (insn), pattern, 1);
Richard Henderson committed
365

366
      if (CALL_P (insn) && prob_val)
367 368 369 370
	validate_change (insn, &REG_NOTES (insn),
			 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
					  REG_NOTES (insn)), 1);

Richard Henderson committed
371 372 373 374 375 376 377 378 379 380 381
    insn_done:
      if (insn == end)
	break;
    }

  return TRUE;
}

/* Return the condition for a jump.  Do not do any special processing.  */

static rtx
382
cond_exec_get_condition (rtx jump)
Richard Henderson committed
383 384 385
{
  rtx test_if, cond;

386
  if (any_condjump_p (jump))
387
    test_if = SET_SRC (pc_set (jump));
Richard Henderson committed
388 389 390 391 392 393 394 395
  else
    return NULL_RTX;
  cond = XEXP (test_if, 0);

  /* If this branches to JUMP_LABEL when the condition is false,
     reverse the condition.  */
  if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
      && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
396 397 398 399 400 401 402 403
    {
      enum rtx_code rev = reversed_comparison_code (cond, jump);
      if (rev == UNKNOWN)
	return NULL_RTX;

      cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
			     XEXP (cond, 1));
    }
Richard Henderson committed
404 405 406 407 408 409

  return cond;
}

/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
   to conditional execution.  Return TRUE if we were successful at
410
   converting the block.  */
Richard Henderson committed
411 412

static int
413 414
cond_exec_process_if_block (ce_if_block_t * ce_info,
			    /* if block information */int do_multiple_p)
Richard Henderson committed
415
{
416 417 418
  basic_block test_bb = ce_info->test_bb;	/* last test block */
  basic_block then_bb = ce_info->then_bb;	/* THEN */
  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
Richard Henderson committed
419 420 421
  rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
  rtx then_start;		/* first insn in THEN block */
  rtx then_end;			/* last insn + 1 in THEN block */
Kaveh R. Ghazi committed
422 423
  rtx else_start = NULL_RTX;	/* first insn in ELSE block or NULL */
  rtx else_end = NULL_RTX;	/* last insn + 1 in ELSE block */
424
  int max;			/* max # of insns to convert.  */
Richard Henderson committed
425 426 427
  int then_mod_ok;		/* whether conditional mods are ok in THEN */
  rtx true_expr;		/* test for else block insns */
  rtx false_expr;		/* test for then block insns */
428 429
  rtx true_prob_val;		/* probability of else block */
  rtx false_prob_val;		/* probability of then block */
430 431 432 433 434
  rtx then_last_head = NULL_RTX;	/* Last match at the head of THEN */
  rtx else_last_head = NULL_RTX;	/* Last match at the head of ELSE */
  rtx then_first_tail = NULL_RTX;	/* First match at the tail of THEN */
  rtx else_first_tail = NULL_RTX;	/* First match at the tail of ELSE */
  int then_n_insns, else_n_insns, n_insns;
435
  enum rtx_code false_code;
Richard Henderson committed
436

437 438 439 440 441 442 443 444 445 446 447 448 449 450
  /* If test is comprised of && or || elements, and we've failed at handling
     all of them together, just use the last test if it is the special case of
     && elements without an ELSE block.  */
  if (!do_multiple_p && ce_info->num_multiple_test_blocks)
    {
      if (else_bb || ! ce_info->and_and_p)
	return FALSE;

      ce_info->test_bb = test_bb = ce_info->last_test_bb;
      ce_info->num_multiple_test_blocks = 0;
      ce_info->num_and_and_blocks = 0;
      ce_info->num_or_or_blocks = 0;
    }

Richard Henderson committed
451 452
  /* Find the conditional jump to the ELSE or JOIN part, and isolate
     the test.  */
453
  test_expr = cond_exec_get_condition (BB_END (test_bb));
Richard Henderson committed
454 455 456
  if (! test_expr)
    return FALSE;

457 458
  /* If the conditional jump is more than just a conditional jump,
     then we can not do conditional execution conversion on this block.  */
459
  if (! onlyjump_p (BB_END (test_bb)))
460 461
    return FALSE;

462 463 464 465 466
  /* Collect the bounds of where we're to search, skipping any labels, jumps
     and notes at the beginning and end of the block.  Then count the total
     number of insns and see if it is small enough to convert.  */
  then_start = first_active_insn (then_bb);
  then_end = last_active_insn (then_bb, TRUE);
467 468
  then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
  n_insns = then_n_insns;
469
  max = MAX_CONDITIONAL_EXECUTE;
Richard Henderson committed
470 471 472

  if (else_bb)
    {
473 474
      int n_matching;

475 476 477
      max *= 2;
      else_start = first_active_insn (else_bb);
      else_end = last_active_insn (else_bb, TRUE);
478 479 480 481 482 483
      else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
      n_insns += else_n_insns;

      /* Look for matching sequences at the head and tail of the two blocks,
	 and limit the range of insns to be converted if possible.  */
      n_matching = flow_find_cross_jump (then_bb, else_bb,
484 485
					 &then_first_tail, &else_first_tail,
					 NULL);
486 487 488 489 490 491 492 493
      if (then_first_tail == BB_HEAD (then_bb))
	then_start = then_end = NULL_RTX;
      if (else_first_tail == BB_HEAD (else_bb))
	else_start = else_end = NULL_RTX;

      if (n_matching > 0)
	{
	  if (then_end)
494
	    then_end = find_active_insn_before (then_bb, then_first_tail);
495
	  if (else_end)
496
	    else_end = find_active_insn_before (else_bb, else_first_tail);
497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533
	  n_insns -= 2 * n_matching;
	}

      if (then_start && else_start)
	{
	  int longest_match = MIN (then_n_insns - n_matching,
				   else_n_insns - n_matching);
	  n_matching
	    = flow_find_head_matching_sequence (then_bb, else_bb,
						&then_last_head,
						&else_last_head,
						longest_match);

	  if (n_matching > 0)
	    {
	      rtx insn;

	      /* We won't pass the insns in the head sequence to
		 cond_exec_process_insns, so we need to test them here
		 to make sure that they don't clobber the condition.  */
	      for (insn = BB_HEAD (then_bb);
		   insn != NEXT_INSN (then_last_head);
		   insn = NEXT_INSN (insn))
		if (!LABEL_P (insn) && !NOTE_P (insn)
		    && !DEBUG_INSN_P (insn)
		    && modified_in_p (test_expr, insn))
		  return FALSE;
	    }

	  if (then_last_head == then_end)
	    then_start = then_end = NULL_RTX;
	  if (else_last_head == else_end)
	    else_start = else_end = NULL_RTX;

	  if (n_matching > 0)
	    {
	      if (then_start)
534
		then_start = find_active_insn_after (then_bb, then_last_head);
535
	      if (else_start)
536
		else_start = find_active_insn_after (else_bb, else_last_head);
537 538 539
	      n_insns -= 2 * n_matching;
	    }
	}
Richard Henderson committed
540 541 542 543 544 545 546
    }

  if (n_insns > max)
    return FALSE;

  /* Map test_expr/test_jump into the appropriate MD tests to use on
     the conditionally executed code.  */
547

Richard Henderson committed
548
  true_expr = test_expr;
549

550
  false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
551 552 553 554 555
  if (false_code != UNKNOWN)
    false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
				 XEXP (true_expr, 0), XEXP (true_expr, 1));
  else
    false_expr = NULL_RTX;
Richard Henderson committed
556

557 558 559
#ifdef IFCVT_MODIFY_TESTS
  /* If the machine description needs to modify the tests, such as setting a
     conditional execution register from a comparison, it can do so here.  */
560 561
  IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);

562
  /* See if the conversion failed.  */
563 564 565 566
  if (!true_expr || !false_expr)
    goto fail;
#endif

567
  true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
568 569 570 571 572 573 574 575
  if (true_prob_val)
    {
      true_prob_val = XEXP (true_prob_val, 0);
      false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
    }
  else
    false_prob_val = NULL_RTX;

576 577 578 579 580 581 582
  /* If we have && or || tests, do them here.  These tests are in the adjacent
     blocks after the first block containing the test.  */
  if (ce_info->num_multiple_test_blocks > 0)
    {
      basic_block bb = test_bb;
      basic_block last_test_bb = ce_info->last_test_bb;

583 584 585
      if (! false_expr)
	goto fail;

586 587 588 589
      do
	{
	  rtx start, end;
	  rtx t, f;
590
	  enum rtx_code f_code;
591 592 593 594 595 596 597 598 599 600 601

	  bb = block_fallthru (bb);
	  start = first_active_insn (bb);
	  end = last_active_insn (bb, TRUE);
	  if (start
	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
					    false_prob_val, FALSE))
	    goto fail;

	  /* If the conditional jump is more than just a conditional jump, then
	     we can not do conditional execution conversion on this block.  */
602
	  if (! onlyjump_p (BB_END (bb)))
603 604 605
	    goto fail;

	  /* Find the conditional jump and isolate the test.  */
606
	  t = cond_exec_get_condition (BB_END (bb));
607 608 609
	  if (! t)
	    goto fail;

610 611 612
	  f_code = reversed_comparison_code (t, BB_END (bb));
	  if (f_code == UNKNOWN)
	    goto fail;
613

614
	  f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631
	  if (ce_info->and_and_p)
	    {
	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
	    }
	  else
	    {
	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
	    }

	  /* If the machine description needs to modify the tests, such as
	     setting a conditional execution register from a comparison, it can
	     do so here.  */
#ifdef IFCVT_MODIFY_MULTIPLE_TESTS
	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);

632
	  /* See if the conversion failed.  */
633 634 635 636 637 638 639 640 641 642
	  if (!t || !f)
	    goto fail;
#endif

	  true_expr = t;
	  false_expr = f;
	}
      while (bb != last_test_bb);
    }

Richard Henderson committed
643 644 645 646 647 648 649 650
  /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
     on then THEN block.  */
  then_mod_ok = (else_bb == NULL_BLOCK);

  /* Go through the THEN and ELSE blocks converting the insns if possible
     to conditional execution.  */

  if (then_end
651
      && (! false_expr
652 653 654
	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
					false_expr, false_prob_val,
					then_mod_ok)))
Richard Henderson committed
655 656
    goto fail;

657 658
  if (else_bb && else_end
      && ! cond_exec_process_insns (ce_info, else_start, else_end,
659
				    true_expr, true_prob_val, TRUE))
Richard Henderson committed
660 661
    goto fail;

662 663
  /* If we cannot apply the changes, fail.  Do not go through the normal fail
     processing, since apply_change_group will call cancel_changes.  */
Richard Henderson committed
664
  if (! apply_change_group ())
665 666 667 668 669 670 671
    {
#ifdef IFCVT_MODIFY_CANCEL
      /* Cancel any machine dependent changes.  */
      IFCVT_MODIFY_CANCEL (ce_info);
#endif
      return FALSE;
    }
Richard Henderson committed
672

673
#ifdef IFCVT_MODIFY_FINAL
674
  /* Do any machine dependent final modifications.  */
675
  IFCVT_MODIFY_FINAL (ce_info);
676 677
#endif

Richard Henderson committed
678
  /* Conversion succeeded.  */
679 680
  if (dump_file)
    fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
Richard Henderson committed
681 682
	     n_insns, (n_insns == 1) ? " was" : "s were");

683 684 685 686 687 688 689 690 691
  /* Merge the blocks!  If we had matching sequences, make sure to delete one
     copy at the appropriate location first: delete the copy in the THEN branch
     for a tail sequence so that the remaining one is executed last for both
     branches, and delete the copy in the ELSE branch for a head sequence so
     that the remaining one is executed first for both branches.  */
  if (then_first_tail)
    {
      rtx from = then_first_tail;
      if (!INSN_P (from))
692
	from = find_active_insn_after (then_bb, from);
693 694 695 696 697
      delete_insn_chain (from, BB_END (then_bb), false);
    }
  if (else_last_head)
    delete_insn_chain (first_active_insn (else_bb), else_last_head, false);

698 699
  merge_if_block (ce_info);
  cond_exec_changed_p = TRUE;
Richard Henderson committed
700 701 702
  return TRUE;

 fail:
703 704
#ifdef IFCVT_MODIFY_CANCEL
  /* Cancel any machine dependent changes.  */
705
  IFCVT_MODIFY_CANCEL (ce_info);
706 707
#endif

Richard Henderson committed
708 709 710 711
  cancel_changes (0);
  return FALSE;
}

712
/* Used by noce_process_if_block to communicate with its subroutines.
Richard Henderson committed
713 714

   The subroutines know that A and B may be evaluated freely.  They
715
   know that X is a register.  They should insert new instructions
Richard Henderson committed
716 717 718 719
   before cond_earliest.  */

struct noce_if_info
{
720 721
  /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block.  */
  basic_block test_bb, then_bb, else_bb, join_bb;
722 723 724

  /* The jump that ends TEST_BB.  */
  rtx jump;
725

726 727 728 729 730 731 732 733 734 735 736 737
  /* The jump condition.  */
  rtx cond;

  /* New insns should be inserted before this one.  */
  rtx cond_earliest;

  /* Insns in the THEN and ELSE block.  There is always just this
     one insns in those blocks.  The insns are single_set insns.
     If there was no ELSE block, INSN_B is the last insn before
     COND_EARLIEST, or NULL_RTX.  In the former case, the insn
     operands are still valid, as if INSN_B was moved down below
     the jump.  */
Richard Henderson committed
738
  rtx insn_a, insn_b;
739 740 741 742 743 744

  /* The SET_SRC of INSN_A and INSN_B.  */
  rtx a, b;

  /* The SET_DEST of INSN_A.  */
  rtx x;
745 746 747 748 749 750

  /* True if this if block is not canonical.  In the canonical form of
     if blocks, the THEN_BB is the block reached via the fallthru edge
     from TEST_BB.  For the noce transformations, we allow the symmetric
     form as well.  */
  bool then_else_reversed;
751 752 753

  /* Estimated cost of the particular branch instruction.  */
  int branch_cost;
Richard Henderson committed
754 755
};

756
static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
757
static int noce_try_move (struct noce_if_info *);
758 759 760 761 762 763 764 765 766 767 768
static int noce_try_store_flag (struct noce_if_info *);
static int noce_try_addcc (struct noce_if_info *);
static int noce_try_store_flag_constants (struct noce_if_info *);
static int noce_try_store_flag_mask (struct noce_if_info *);
static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
			    rtx, rtx, rtx);
static int noce_try_cmove (struct noce_if_info *);
static int noce_try_cmove_arith (struct noce_if_info *);
static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
static int noce_try_minmax (struct noce_if_info *);
static int noce_try_abs (struct noce_if_info *);
769
static int noce_try_sign_mask (struct noce_if_info *);
Richard Henderson committed
770 771 772 773

/* Helper function for noce_try_store_flag*.  */

static rtx
774 775
noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
		      int normalize)
Richard Henderson committed
776 777 778 779 780 781 782 783 784 785 786 787
{
  rtx cond = if_info->cond;
  int cond_complex;
  enum rtx_code code;

  cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
		  || ! general_operand (XEXP (cond, 1), VOIDmode));

  /* If earliest == jump, or when the condition is complex, try to
     build the store_flag insn directly.  */

  if (cond_complex)
788 789 790 791 792 793 794 795 796
    {
      rtx set = pc_set (if_info->jump);
      cond = XEXP (SET_SRC (set), 0);
      if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
	  && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump))
	reversep = !reversep;
      if (if_info->then_else_reversed)
	reversep = !reversep;
    }
Richard Henderson committed
797

798 799 800 801 802
  if (reversep)
    code = reversed_comparison_code (cond, if_info->jump);
  else
    code = GET_CODE (cond);

Richard Henderson committed
803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818
  if ((if_info->cond_earliest == if_info->jump || cond_complex)
      && (normalize == 0 || STORE_FLAG_VALUE == normalize))
    {
      rtx tmp;

      tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
			    XEXP (cond, 1));
      tmp = gen_rtx_SET (VOIDmode, x, tmp);

      start_sequence ();
      tmp = emit_insn (tmp);

      if (recog_memoized (tmp) >= 0)
	{
	  tmp = get_insns ();
	  end_sequence ();
819
	  emit_insn (tmp);
Richard Henderson committed
820 821 822 823 824 825 826 827 828

	  if_info->cond_earliest = if_info->jump;

	  return x;
	}

      end_sequence ();
    }

829 830
  /* Don't even try if the comparison operands or the mode of X are weird.  */
  if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
Richard Henderson committed
831 832 833 834 835 836 837 838
    return NULL_RTX;

  return emit_store_flag (x, code, XEXP (cond, 0),
			  XEXP (cond, 1), VOIDmode,
			  (code == LTU || code == LEU
			   || code == GEU || code == GTU), normalize);
}

839 840 841
/* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
   X is the destination/target and Y is the value to copy.  */

842
static void
843
noce_emit_move_insn (rtx x, rtx y)
844
{
845
  enum machine_mode outmode;
846 847 848 849 850
  rtx outer, inner;
  int bitpos;

  if (GET_CODE (x) != STRICT_LOW_PART)
    {
851 852 853 854
      rtx seq, insn, target;
      optab ot;

      start_sequence ();
855 856 857 858 859
      /* Check that the SET_SRC is reasonable before calling emit_move_insn,
	 otherwise construct a suitable SET pattern ourselves.  */
      insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
	     ? emit_move_insn (x, y)
	     : emit_insn (gen_rtx_SET (VOIDmode, x, y));
860
      seq = get_insns ();
861
      end_sequence ();
862 863

      if (recog_memoized (insn) <= 0)
864 865 866 867 868 869 870
	{
	  if (GET_CODE (x) == ZERO_EXTRACT)
	    {
	      rtx op = XEXP (x, 0);
	      unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
	      unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));

871 872 873
	      /* store_bit_field expects START to be relative to
		 BYTES_BIG_ENDIAN and adjusts this value for machines with
		 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
874 875 876 877 878 879 880 881 882 883 884 885
		 invoke store_bit_field again it is necessary to have the START
		 value from the first call.  */
	      if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
		{
		  if (MEM_P (op))
		    start = BITS_PER_UNIT - start - size;
		  else
		    {
		      gcc_assert (REG_P (op));
		      start = BITS_PER_WORD - start - size;
		    }
		}
886

887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908
	      gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
	      store_bit_field (op, size, start, GET_MODE (x), y);
	      return;
	    }

	  switch (GET_RTX_CLASS (GET_CODE (y)))
	    {
	    case RTX_UNARY:
	      ot = code_to_optab[GET_CODE (y)];
	      if (ot)
		{
		  start_sequence ();
		  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
		  if (target != NULL_RTX)
		    {
		      if (target != x)
			emit_move_insn (x, target);
		      seq = get_insns ();
		    }
		  end_sequence ();
		}
	      break;
909

910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927
	    case RTX_BIN_ARITH:
	    case RTX_COMM_ARITH:
	      ot = code_to_optab[GET_CODE (y)];
	      if (ot)
		{
		  start_sequence ();
		  target = expand_binop (GET_MODE (y), ot,
					 XEXP (y, 0), XEXP (y, 1),
					 x, 0, OPTAB_DIRECT);
		  if (target != NULL_RTX)
		    {
		      if (target != x)
			  emit_move_insn (x, target);
		      seq = get_insns ();
		    }
		  end_sequence ();
		}
	      break;
928

929 930 931 932
	    default:
	      break;
	    }
	}
933

934
      emit_insn (seq);
935 936 937 938 939 940
      return;
    }

  outer = XEXP (x, 0);
  inner = XEXP (outer, 0);
  outmode = GET_MODE (outer);
941
  bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
942
  store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
943 944
}

945 946 947 948 949 950 951
/* Return sequence of instructions generated by if conversion.  This
   function calls end_sequence() to end the current stream, ensures
   that are instructions are unshared, recognizable non-jump insns.
   On failure, this function returns a NULL_RTX.  */

static rtx
end_ifcvt_sequence (struct noce_if_info *if_info)
952
{
953
  rtx insn;
954 955
  rtx seq = get_insns ();

956 957 958
  set_used_flags (if_info->x);
  set_used_flags (if_info->cond);
  unshare_all_rtl_in_chain (seq);
959 960
  end_sequence ();

961 962
  /* Make sure that all of the instructions emitted are recognizable,
     and that we haven't introduced a new jump instruction.
963
     As an exercise for the reader, build a general mechanism that
964
     allows proper placement of required clobbers.  */
965
  for (insn = seq; insn; insn = NEXT_INSN (insn))
966
    if (JUMP_P (insn)
967
	|| recog_memoized (insn) == -1)
968
      return NULL_RTX;
969

970
  return seq;
971 972
}

973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005
/* Convert "if (a != b) x = a; else x = b" into "x = a" and
   "if (a == b) x = a; else x = b" into "x = b".  */

static int
noce_try_move (struct noce_if_info *if_info)
{
  rtx cond = if_info->cond;
  enum rtx_code code = GET_CODE (cond);
  rtx y, seq;

  if (code != NE && code != EQ)
    return FALSE;

  /* This optimization isn't valid if either A or B could be a NaN
     or a signed zero.  */
  if (HONOR_NANS (GET_MODE (if_info->x))
      || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
    return FALSE;

  /* Check whether the operands of the comparison are A and in
     either order.  */
  if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
       && rtx_equal_p (if_info->b, XEXP (cond, 1)))
      || (rtx_equal_p (if_info->a, XEXP (cond, 1))
	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
    {
      y = (code == EQ) ? if_info->a : if_info->b;

      /* Avoid generating the move if the source is the destination.  */
      if (! rtx_equal_p (if_info->x, y))
	{
	  start_sequence ();
	  noce_emit_move_insn (if_info->x, y);
1006 1007 1008
	  seq = end_ifcvt_sequence (if_info);
	  if (!seq)
	    return FALSE;
1009

1010 1011 1012 1013 1014 1015 1016 1017
	  emit_insn_before_setloc (seq, if_info->jump,
				   INSN_LOCATOR (if_info->insn_a));
	}
      return TRUE;
    }
  return FALSE;
}

Richard Henderson committed
1018 1019 1020 1021 1022 1023 1024
/* Convert "if (test) x = 1; else x = 0".

   Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
   tried in noce_try_store_flag_constants after noce_try_cmove has had
   a go at the conversion.  */

static int
1025
noce_try_store_flag (struct noce_if_info *if_info)
Richard Henderson committed
1026 1027 1028 1029
{
  int reversep;
  rtx target, seq;

Shujing Zhao committed
1030
  if (CONST_INT_P (if_info->b)
Richard Henderson committed
1031 1032 1033 1034
      && INTVAL (if_info->b) == STORE_FLAG_VALUE
      && if_info->a == const0_rtx)
    reversep = 0;
  else if (if_info->b == const0_rtx
Shujing Zhao committed
1035
	   && CONST_INT_P (if_info->a)
Richard Henderson committed
1036
	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
1037 1038
	   && (reversed_comparison_code (if_info->cond, if_info->jump)
	       != UNKNOWN))
Richard Henderson committed
1039 1040 1041 1042 1043 1044 1045 1046 1047 1048
    reversep = 1;
  else
    return FALSE;

  start_sequence ();

  target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
  if (target)
    {
      if (target != if_info->x)
1049
	noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
1050

1051 1052 1053
      seq = end_ifcvt_sequence (if_info);
      if (! seq)
	return FALSE;
Richard Henderson committed
1054

1055 1056
      emit_insn_before_setloc (seq, if_info->jump,
			       INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068
      return TRUE;
    }
  else
    {
      end_sequence ();
      return FALSE;
    }
}

/* Convert "if (test) x = a; else x = b", for A and B constant.  */

static int
1069
noce_try_store_flag_constants (struct noce_if_info *if_info)
Richard Henderson committed
1070 1071 1072 1073 1074
{
  rtx target, seq;
  int reversep;
  HOST_WIDE_INT itrue, ifalse, diff, tmp;
  int normalize, can_reverse;
1075
  enum machine_mode mode;
Richard Henderson committed
1076

Shujing Zhao committed
1077 1078
  if (CONST_INT_P (if_info->a)
      && CONST_INT_P (if_info->b))
Richard Henderson committed
1079
    {
1080
      mode = GET_MODE (if_info->x);
Richard Henderson committed
1081 1082
      ifalse = INTVAL (if_info->a);
      itrue = INTVAL (if_info->b);
1083 1084 1085 1086 1087 1088

      /* Make sure we can represent the difference between the two values.  */
      if ((itrue - ifalse > 0)
	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
	return FALSE;

1089
      diff = trunc_int_for_mode (itrue - ifalse, mode);
Richard Henderson committed
1090

1091 1092
      can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
		     != UNKNOWN);
Richard Henderson committed
1093 1094 1095 1096 1097 1098

      reversep = 0;
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
	normalize = 0;
      else if (ifalse == 0 && exact_log2 (itrue) >= 0
	       && (STORE_FLAG_VALUE == 1
1099
		   || if_info->branch_cost >= 2))
Richard Henderson committed
1100 1101
	normalize = 1;
      else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1102
	       && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
Richard Henderson committed
1103 1104 1105
	normalize = 1, reversep = 1;
      else if (itrue == -1
	       && (STORE_FLAG_VALUE == -1
1106
		   || if_info->branch_cost >= 2))
Richard Henderson committed
1107 1108
	normalize = -1;
      else if (ifalse == -1 && can_reverse
1109
	       && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
Richard Henderson committed
1110
	normalize = -1, reversep = 1;
1111 1112
      else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
	       || if_info->branch_cost >= 3)
Richard Henderson committed
1113 1114 1115 1116 1117
	normalize = -1;
      else
	return FALSE;

      if (reversep)
1118
	{
Richard Henderson committed
1119
	  tmp = itrue; itrue = ifalse; ifalse = tmp;
1120
	  diff = trunc_int_for_mode (-diff, mode);
Richard Henderson committed
1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134
	}

      start_sequence ();
      target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
      if (! target)
	{
	  end_sequence ();
	  return FALSE;
	}

      /* if (test) x = 3; else x = 4;
	 =>   x = 3 + (test == 0);  */
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
	{
1135 1136 1137 1138 1139
	  target = expand_simple_binop (mode,
					(diff == STORE_FLAG_VALUE
					 ? PLUS : MINUS),
					GEN_INT (ifalse), target, if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
1140 1141 1142 1143 1144 1145
	}

      /* if (test) x = 8; else x = 0;
	 =>   x = (test != 0) << 3;  */
      else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
	{
1146 1147 1148
	  target = expand_simple_binop (mode, ASHIFT,
					target, GEN_INT (tmp), if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
1149 1150 1151 1152 1153 1154
	}

      /* if (test) x = -1; else x = b;
	 =>   x = -(test != 0) | b;  */
      else if (itrue == -1)
	{
1155 1156 1157
	  target = expand_simple_binop (mode, IOR,
					target, GEN_INT (ifalse), if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
1158 1159 1160 1161 1162 1163
	}

      /* if (test) x = a; else x = b;
	 =>   x = (-(test != 0) & (b - a)) + a;  */
      else
	{
1164 1165 1166
	  target = expand_simple_binop (mode, AND,
					target, GEN_INT (diff), if_info->x, 0,
					OPTAB_WIDEN);
Richard Henderson committed
1167
	  if (target)
1168 1169 1170
	    target = expand_simple_binop (mode, PLUS,
					  target, GEN_INT (ifalse),
					  if_info->x, 0, OPTAB_WIDEN);
Richard Henderson committed
1171 1172 1173 1174 1175 1176 1177 1178 1179
	}

      if (! target)
	{
	  end_sequence ();
	  return FALSE;
	}

      if (target != if_info->x)
1180
	noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
1181

1182 1183
      seq = end_ifcvt_sequence (if_info);
      if (!seq)
1184 1185
	return FALSE;

1186 1187
      emit_insn_before_setloc (seq, if_info->jump,
			       INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1188 1189 1190 1191 1192 1193
      return TRUE;
    }

  return FALSE;
}

1194
/* Convert "if (test) foo++" into "foo += (test != 0)", and
Richard Henderson committed
1195 1196 1197
   similarly for "foo--".  */

static int
1198
noce_try_addcc (struct noce_if_info *if_info)
Richard Henderson committed
1199 1200 1201 1202
{
  rtx target, seq;
  int subtract, normalize;

1203
  if (GET_CODE (if_info->a) == PLUS
1204
      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1205 1206
      && (reversed_comparison_code (if_info->cond, if_info->jump)
	  != UNKNOWN))
Richard Henderson committed
1207
    {
1208 1209
      rtx cond = if_info->cond;
      enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
Richard Henderson committed
1210

1211
      /* First try to use addcc pattern.  */
1212 1213
      if (general_operand (XEXP (cond, 0), VOIDmode)
	  && general_operand (XEXP (cond, 1), VOIDmode))
Richard Henderson committed
1214
	{
1215 1216
	  start_sequence ();
	  target = emit_conditional_add (if_info->x, code,
1217 1218
					 XEXP (cond, 0),
					 XEXP (cond, 1),
1219
					 VOIDmode,
1220 1221
					 if_info->b,
					 XEXP (if_info->a, 1),
1222 1223 1224 1225 1226 1227 1228 1229
					 GET_MODE (if_info->x),
					 (code == LTU || code == GEU
					  || code == LEU || code == GTU));
	  if (target)
	    {
	      if (target != if_info->x)
		noce_emit_move_insn (if_info->x, target);

1230 1231 1232 1233
	      seq = end_ifcvt_sequence (if_info);
	      if (!seq)
		return FALSE;

1234
	      emit_insn_before_setloc (seq, if_info->jump,
1235
				       INSN_LOCATOR (if_info->insn_a));
1236 1237
	      return TRUE;
	    }
Richard Henderson committed
1238 1239
	  end_sequence ();
	}
1240

1241 1242
      /* If that fails, construct conditional increment or decrement using
	 setcc.  */
1243
      if (if_info->branch_cost >= 2
1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262
	  && (XEXP (if_info->a, 1) == const1_rtx
	      || XEXP (if_info->a, 1) == constm1_rtx))
        {
	  start_sequence ();
	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
	    subtract = 0, normalize = 0;
	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
	    subtract = 1, normalize = 0;
	  else
	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));


	  target = noce_emit_store_flag (if_info,
					 gen_reg_rtx (GET_MODE (if_info->x)),
					 1, normalize);

	  if (target)
	    target = expand_simple_binop (GET_MODE (if_info->x),
					  subtract ? MINUS : PLUS,
1263
					  if_info->b, target, if_info->x,
1264 1265 1266 1267 1268 1269
					  0, OPTAB_WIDEN);
	  if (target)
	    {
	      if (target != if_info->x)
		noce_emit_move_insn (if_info->x, target);

1270 1271
	      seq = end_ifcvt_sequence (if_info);
	      if (!seq)
1272 1273
		return FALSE;

1274
	      emit_insn_before_setloc (seq, if_info->jump,
1275
				       INSN_LOCATOR (if_info->insn_a));
1276 1277 1278 1279
	      return TRUE;
	    }
	  end_sequence ();
	}
Richard Henderson committed
1280 1281 1282 1283 1284 1285 1286 1287
    }

  return FALSE;
}

/* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */

static int
1288
noce_try_store_flag_mask (struct noce_if_info *if_info)
Richard Henderson committed
1289 1290 1291 1292 1293
{
  rtx target, seq;
  int reversep;

  reversep = 0;
1294
  if ((if_info->branch_cost >= 2
1295
       || STORE_FLAG_VALUE == -1)
Richard Henderson committed
1296 1297
      && ((if_info->a == const0_rtx
	   && rtx_equal_p (if_info->b, if_info->x))
1298 1299 1300
	  || ((reversep = (reversed_comparison_code (if_info->cond,
						     if_info->jump)
			   != UNKNOWN))
Richard Henderson committed
1301 1302 1303 1304 1305 1306 1307 1308
	      && if_info->b == const0_rtx
	      && rtx_equal_p (if_info->a, if_info->x))))
    {
      start_sequence ();
      target = noce_emit_store_flag (if_info,
				     gen_reg_rtx (GET_MODE (if_info->x)),
				     reversep, -1);
      if (target)
1309
        target = expand_simple_binop (GET_MODE (if_info->x), AND,
1310 1311
				      if_info->x,
				      target, if_info->x, 0,
1312
				      OPTAB_WIDEN);
Richard Henderson committed
1313 1314 1315 1316

      if (target)
	{
	  if (target != if_info->x)
1317
	    noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
1318

1319 1320
	  seq = end_ifcvt_sequence (if_info);
	  if (!seq)
1321 1322
	    return FALSE;

1323
	  emit_insn_before_setloc (seq, if_info->jump,
1324
				   INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336
	  return TRUE;
	}

      end_sequence ();
    }

  return FALSE;
}

/* Helper function for noce_try_cmove and noce_try_cmove_arith.  */

static rtx
1337 1338
noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
Richard Henderson committed
1339
{
1340 1341
  rtx target ATTRIBUTE_UNUSED;
  int unsignedp ATTRIBUTE_UNUSED;
1342

Richard Henderson committed
1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362
  /* If earliest == jump, try to build the cmove insn directly.
     This is helpful when combine has created some complex condition
     (like for alpha's cmovlbs) that we can't hope to regenerate
     through the normal interface.  */

  if (if_info->cond_earliest == if_info->jump)
    {
      rtx tmp;

      tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
      tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
      tmp = gen_rtx_SET (VOIDmode, x, tmp);

      start_sequence ();
      tmp = emit_insn (tmp);

      if (recog_memoized (tmp) >= 0)
	{
	  tmp = get_insns ();
	  end_sequence ();
1363
	  emit_insn (tmp);
Richard Henderson committed
1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375

	  return x;
	}

      end_sequence ();
    }

  /* Don't even try if the comparison operands are weird.  */
  if (! general_operand (cmp_a, GET_MODE (cmp_a))
      || ! general_operand (cmp_b, GET_MODE (cmp_b)))
    return NULL_RTX;

1376
#if HAVE_conditional_move
1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432
  unsignedp = (code == LTU || code == GEU
	       || code == LEU || code == GTU);

  target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
				  vtrue, vfalse, GET_MODE (x),
				  unsignedp);
  if (target)
    return target;

  /* We might be faced with a situation like:

     x = (reg:M TARGET)
     vtrue = (subreg:M (reg:N VTRUE) BYTE)
     vfalse = (subreg:M (reg:N VFALSE) BYTE)

     We can't do a conditional move in mode M, but it's possible that we
     could do a conditional move in mode N instead and take a subreg of
     the result.

     If we can't create new pseudos, though, don't bother.  */
  if (reload_completed)
    return NULL_RTX;

  if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
    {
      rtx reg_vtrue = SUBREG_REG (vtrue);
      rtx reg_vfalse = SUBREG_REG (vfalse);
      unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
      unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
      rtx promoted_target;

      if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
	  || byte_vtrue != byte_vfalse
	  || (SUBREG_PROMOTED_VAR_P (vtrue)
	      != SUBREG_PROMOTED_VAR_P (vfalse))
	  || (SUBREG_PROMOTED_UNSIGNED_P (vtrue)
	      != SUBREG_PROMOTED_UNSIGNED_P (vfalse)))
	return NULL_RTX;

      promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));

      target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
				      VOIDmode, reg_vtrue, reg_vfalse,
				      GET_MODE (reg_vtrue), unsignedp);
      /* Nope, couldn't do it in that mode either.  */
      if (!target)
	return NULL_RTX;

      target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
      SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
      SUBREG_PROMOTED_UNSIGNED_SET (target, SUBREG_PROMOTED_UNSIGNED_P (vtrue));
      emit_move_insn (x, target);
      return x;
    }
  else
    return NULL_RTX;
1433 1434 1435
#else
  /* We'll never get here, as noce_process_if_block doesn't call the
     functions involved.  Ifdef code, however, should be discouraged
1436
     because it leads to typos in the code not selected.  However,
1437 1438 1439
     emit_conditional_move won't exist either.  */
  return NULL_RTX;
#endif
Richard Henderson committed
1440 1441 1442 1443 1444 1445 1446
}

/* Try only simple constants and registers here.  More complex cases
   are handled in noce_try_cmove_arith after noce_try_store_flag_arith
   has had a go at it.  */

static int
1447
noce_try_cmove (struct noce_if_info *if_info)
Richard Henderson committed
1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465
{
  enum rtx_code code;
  rtx target, seq;

  if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
      && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
    {
      start_sequence ();

      code = GET_CODE (if_info->cond);
      target = noce_emit_cmove (if_info, if_info->x, code,
				XEXP (if_info->cond, 0),
				XEXP (if_info->cond, 1),
				if_info->a, if_info->b);

      if (target)
	{
	  if (target != if_info->x)
1466
	    noce_emit_move_insn (if_info->x, target);
Richard Henderson committed
1467

1468 1469 1470 1471
	  seq = end_ifcvt_sequence (if_info);
	  if (!seq)
	    return FALSE;

1472
	  emit_insn_before_setloc (seq, if_info->jump,
1473
				   INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488
	  return TRUE;
	}
      else
	{
	  end_sequence ();
	  return FALSE;
	}
    }

  return FALSE;
}

/* Try more complex cases involving conditional_move.  */

static int
1489
noce_try_cmove_arith (struct noce_if_info *if_info)
Richard Henderson committed
1490 1491 1492 1493
{
  rtx a = if_info->a;
  rtx b = if_info->b;
  rtx x = if_info->x;
1494
  rtx orig_a, orig_b;
Richard Henderson committed
1495 1496 1497
  rtx insn_a, insn_b;
  rtx tmp, target;
  int is_mem = 0;
1498
  int insn_cost;
Richard Henderson committed
1499 1500 1501 1502 1503 1504
  enum rtx_code code;

  /* A conditional move from two memory sources is equivalent to a
     conditional on their addresses followed by a load.  Don't do this
     early because it'll screw alias analysis.  Note that we've
     already checked for no side effects.  */
1505 1506
  /* ??? FIXME: Magic number 5.  */
  if (cse_not_expected
1507
      && MEM_P (a) && MEM_P (b)
1508
      && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1509
      && if_info->branch_cost >= 5)
Richard Henderson committed
1510
    {
1511 1512 1513
      enum machine_mode address_mode
	= targetm.addr_space.address_mode (MEM_ADDR_SPACE (a));

Richard Henderson committed
1514 1515
      a = XEXP (a, 0);
      b = XEXP (b, 0);
1516
      x = gen_reg_rtx (address_mode);
Richard Henderson committed
1517 1518 1519 1520
      is_mem = 1;
    }

  /* ??? We could handle this if we knew that a load from A or B could
1521
     not fault.  This is also true if we've already loaded
Richard Henderson committed
1522
     from the address along the path from ENTRY.  */
1523
  else if (may_trap_p (a) || may_trap_p (b))
Richard Henderson committed
1524 1525 1526 1527 1528 1529 1530 1531
    return FALSE;

  /* if (test) x = a + b; else x = c - d;
     => y = a + b;
        x = c - d;
	if (test)
	  x = y;
  */
1532

Richard Henderson committed
1533 1534 1535 1536
  code = GET_CODE (if_info->cond);
  insn_a = if_info->insn_a;
  insn_b = if_info->insn_b;

1537 1538 1539 1540
  /* Total insn_rtx_cost should be smaller than branch cost.  Exit
     if insn_rtx_cost can't be estimated.  */
  if (insn_a)
    {
1541 1542 1543
      insn_cost
	= insn_rtx_cost (PATTERN (insn_a),
      			 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1544
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1545 1546 1547
	return FALSE;
    }
  else
1548 1549 1550
    insn_cost = 0;

  if (insn_b)
1551
    {
1552 1553 1554
      insn_cost
	+= insn_rtx_cost (PATTERN (insn_b),
      			  optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1555
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1556
        return FALSE;
1557 1558
    }

Richard Henderson committed
1559
  /* Possibly rearrange operands to make things come out more natural.  */
1560
  if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
Richard Henderson committed
1561 1562 1563 1564 1565 1566 1567 1568 1569
    {
      int reversep = 0;
      if (rtx_equal_p (b, x))
	reversep = 1;
      else if (general_operand (b, GET_MODE (b)))
	reversep = 1;

      if (reversep)
	{
1570
	  code = reversed_comparison_code (if_info->cond, if_info->jump);
Richard Henderson committed
1571 1572 1573 1574 1575 1576 1577
	  tmp = a, a = b, b = tmp;
	  tmp = insn_a, insn_a = insn_b, insn_b = tmp;
	}
    }

  start_sequence ();

1578 1579 1580
  orig_a = a;
  orig_b = b;

Richard Henderson committed
1581 1582
  /* If either operand is complex, load it into a register first.
     The best way to do this is to copy the original insn.  In this
1583
     way we preserve any clobbers etc that the insn may have had.
Richard Henderson committed
1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608
     This is of course not possible in the IS_MEM case.  */
  if (! general_operand (a, GET_MODE (a)))
    {
      rtx set;

      if (is_mem)
	{
	  tmp = gen_reg_rtx (GET_MODE (a));
	  tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
	}
      else if (! insn_a)
	goto end_seq_and_fail;
      else
	{
	  a = gen_reg_rtx (GET_MODE (a));
	  tmp = copy_rtx (insn_a);
	  set = single_set (tmp);
	  SET_DEST (set) = a;
	  tmp = emit_insn (PATTERN (tmp));
	}
      if (recog_memoized (tmp) < 0)
	goto end_seq_and_fail;
    }
  if (! general_operand (b, GET_MODE (b)))
    {
1609
      rtx set, last;
Richard Henderson committed
1610 1611 1612 1613

      if (is_mem)
	{
          tmp = gen_reg_rtx (GET_MODE (b));
1614
	  tmp = gen_rtx_SET (VOIDmode, tmp, b);
Richard Henderson committed
1615 1616 1617 1618 1619 1620 1621 1622 1623
	}
      else if (! insn_b)
	goto end_seq_and_fail;
      else
	{
          b = gen_reg_rtx (GET_MODE (b));
	  tmp = copy_rtx (insn_b);
	  set = single_set (tmp);
	  SET_DEST (set) = b;
1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635
	  tmp = PATTERN (tmp);
	}

      /* If insn to set up A clobbers any registers B depends on, try to
	 swap insn that sets up A with the one that sets up B.  If even
	 that doesn't help, punt.  */
      last = get_last_insn ();
      if (last && modified_in_p (orig_b, last))
	{
	  tmp = emit_insn_before (tmp, get_insns ());
	  if (modified_in_p (orig_a, tmp))
	    goto end_seq_and_fail;
Richard Henderson committed
1636
	}
1637 1638 1639
      else
	tmp = emit_insn (tmp);

Richard Henderson committed
1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662
      if (recog_memoized (tmp) < 0)
	goto end_seq_and_fail;
    }

  target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
			    XEXP (if_info->cond, 1), a, b);

  if (! target)
    goto end_seq_and_fail;

  /* If we're handling a memory for above, emit the load now.  */
  if (is_mem)
    {
      tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);

      /* Copy over flags as appropriate.  */
      if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
	MEM_VOLATILE_P (tmp) = 1;
      if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
	MEM_IN_STRUCT_P (tmp) = 1;
      if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
	MEM_SCALAR_P (tmp) = 1;
      if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1663
	set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1664 1665
      set_mem_align (tmp,
		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
Richard Henderson committed
1666

1667 1668 1669
      gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
      set_mem_addr_space (tmp, MEM_ADDR_SPACE (if_info->a));

1670
      noce_emit_move_insn (if_info->x, tmp);
Richard Henderson committed
1671 1672
    }
  else if (target != x)
1673
    noce_emit_move_insn (x, target);
Richard Henderson committed
1674

1675 1676 1677 1678
  tmp = end_ifcvt_sequence (if_info);
  if (!tmp)
    return FALSE;

1679
  emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
Richard Henderson committed
1680 1681 1682 1683 1684 1685 1686
  return TRUE;

 end_seq_and_fail:
  end_sequence ();
  return FALSE;
}

1687 1688 1689 1690 1691
/* For most cases, the simplified condition we found is the best
   choice, but this is not the case for the min/max/abs transforms.
   For these we wish to know that it is A or B in the condition.  */

static rtx
1692 1693
noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
			rtx *earliest)
1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709
{
  rtx cond, set, insn;
  int reverse;

  /* If target is already mentioned in the known condition, return it.  */
  if (reg_mentioned_p (target, if_info->cond))
    {
      *earliest = if_info->cond_earliest;
      return if_info->cond;
    }

  set = pc_set (if_info->jump);
  cond = XEXP (SET_SRC (set), 0);
  reverse
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1710 1711
  if (if_info->then_else_reversed)
    reverse = !reverse;
1712

1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726
  /* If we're looking for a constant, try to make the conditional
     have that constant in it.  There are two reasons why it may
     not have the constant we want:

     1. GCC may have needed to put the constant in a register, because
        the target can't compare directly against that constant.  For
        this case, we look for a SET immediately before the comparison
        that puts a constant in that register.

     2. GCC may have canonicalized the conditional, for example
	replacing "if x < 4" with "if x <= 3".  We can undo that (or
	make equivalent types of changes) to get the constants we need
	if they're off by one in the right direction.  */

Shujing Zhao committed
1727
  if (CONST_INT_P (target))
1728 1729 1730 1731 1732 1733 1734
    {
      enum rtx_code code = GET_CODE (if_info->cond);
      rtx op_a = XEXP (if_info->cond, 0);
      rtx op_b = XEXP (if_info->cond, 1);
      rtx prev_insn;

      /* First, look to see if we put a constant in a register.  */
1735
      prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1736
      if (prev_insn
1737 1738
	  && BLOCK_FOR_INSN (prev_insn)
	     == BLOCK_FOR_INSN (if_info->cond_earliest)
1739 1740 1741 1742 1743 1744
	  && INSN_P (prev_insn)
	  && GET_CODE (PATTERN (prev_insn)) == SET)
	{
	  rtx src = find_reg_equal_equiv_note (prev_insn);
	  if (!src)
	    src = SET_SRC (PATTERN (prev_insn));
Shujing Zhao committed
1745
	  if (CONST_INT_P (src))
1746 1747
	    {
	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1748
		op_a = src;
1749
	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1750
		op_b = src;
1751

Shujing Zhao committed
1752
	      if (CONST_INT_P (op_a))
1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763
		{
		  rtx tmp = op_a;
		  op_a = op_b;
		  op_b = tmp;
		  code = swap_condition (code);
		}
	    }
	}

      /* Now, look to see if we can get the right constant by
	 adjusting the conditional.  */
Shujing Zhao committed
1764
      if (CONST_INT_P (op_b))
1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816
	{
	  HOST_WIDE_INT desired_val = INTVAL (target);
	  HOST_WIDE_INT actual_val = INTVAL (op_b);

	  switch (code)
	    {
	    case LT:
	      if (actual_val == desired_val + 1)
		{
		  code = LE;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    case LE:
	      if (actual_val == desired_val - 1)
		{
		  code = LT;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    case GT:
	      if (actual_val == desired_val - 1)
		{
		  code = GE;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    case GE:
	      if (actual_val == desired_val + 1)
		{
		  code = GT;
		  op_b = GEN_INT (desired_val);
		}
	      break;
	    default:
	      break;
	    }
	}

      /* If we made any changes, generate a new conditional that is
	 equivalent to what we started with, but has the right
	 constants in it.  */
      if (code != GET_CODE (if_info->cond)
	  || op_a != XEXP (if_info->cond, 0)
	  || op_b != XEXP (if_info->cond, 1))
	{
	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
	  *earliest = if_info->cond_earliest;
	  return cond;
	}
    }

1817
  cond = canonicalize_condition (if_info->jump, cond, reverse,
1818
				 earliest, target, false, true);
1819 1820 1821 1822 1823 1824 1825 1826
  if (! cond || ! reg_mentioned_p (target, cond))
    return NULL;

  /* We almost certainly searched back to a different place.
     Need to re-verify correct lifetimes.  */

  /* X may not be mentioned in the range (cond_earliest, jump].  */
  for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1827
    if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842
      return NULL;

  /* A and B may not be modified in the range [cond_earliest, jump).  */
  for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
    if (INSN_P (insn)
	&& (modified_in_p (if_info->a, insn)
	    || modified_in_p (if_info->b, insn)))
      return NULL;

  return cond;
}

/* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */

static int
1843 1844
noce_try_minmax (struct noce_if_info *if_info)
{
1845
  rtx cond, earliest, target, seq;
1846
  enum rtx_code code, op;
1847 1848
  int unsignedp;

1849 1850
  /* ??? Reject modes with NaNs or signed zeros since we don't know how
     they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1851
     to get the target to tell us...  */
1852 1853
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
      || HONOR_NANS (GET_MODE (if_info->x)))
1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884
    return FALSE;

  cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
  if (!cond)
    return FALSE;

  /* Verify the condition is of the form we expect, and canonicalize
     the comparison code.  */
  code = GET_CODE (cond);
  if (rtx_equal_p (XEXP (cond, 0), if_info->a))
    {
      if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
	return FALSE;
    }
  else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
    {
      if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
	return FALSE;
      code = swap_condition (code);
    }
  else
    return FALSE;

  /* Determine what sort of operation this is.  Note that the code is for
     a taken branch, so the code->operation mapping appears backwards.  */
  switch (code)
    {
    case LT:
    case LE:
    case UNLT:
    case UNLE:
1885
      op = SMAX;
1886 1887 1888 1889 1890 1891
      unsignedp = 0;
      break;
    case GT:
    case GE:
    case UNGT:
    case UNGE:
1892
      op = SMIN;
1893 1894 1895 1896
      unsignedp = 0;
      break;
    case LTU:
    case LEU:
1897
      op = UMAX;
1898 1899 1900 1901
      unsignedp = 1;
      break;
    case GTU:
    case GEU:
1902
      op = UMIN;
1903 1904 1905 1906 1907 1908 1909 1910
      unsignedp = 1;
      break;
    default:
      return FALSE;
    }

  start_sequence ();

1911 1912 1913
  target = expand_simple_binop (GET_MODE (if_info->x), op,
				if_info->a, if_info->b,
				if_info->x, unsignedp, OPTAB_WIDEN);
1914 1915 1916 1917 1918 1919
  if (! target)
    {
      end_sequence ();
      return FALSE;
    }
  if (target != if_info->x)
1920
    noce_emit_move_insn (if_info->x, target);
1921

1922 1923
  seq = end_ifcvt_sequence (if_info);
  if (!seq)
1924 1925
    return FALSE;

1926
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1927 1928 1929 1930 1931 1932
  if_info->cond = cond;
  if_info->cond_earliest = earliest;

  return TRUE;
}

1933 1934 1935
/* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
   "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
   etc.  */
1936 1937

static int
1938 1939
noce_try_abs (struct noce_if_info *if_info)
{
1940 1941
  rtx cond, earliest, target, seq, a, b, c;
  int negate;
1942
  bool one_cmpl = false;
1943

1944 1945 1946 1947
  /* Reject modes with signed zeros.  */
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
    return FALSE;

1948 1949 1950
  /* Recognize A and B as constituting an ABS or NABS.  The canonical
     form is a branch around the negation, taken when the object is the
     first operand of a comparison against 0 that evaluates to true.  */
1951 1952 1953 1954 1955 1956 1957 1958 1959
  a = if_info->a;
  b = if_info->b;
  if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
    negate = 0;
  else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
    {
      c = a; a = b; b = c;
      negate = 1;
    }
1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970
  else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
    {
      negate = 0;
      one_cmpl = true;
    }
  else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
    {
      c = a; a = b; b = c;
      negate = 1;
      one_cmpl = true;
    }
1971 1972
  else
    return FALSE;
1973

1974 1975 1976 1977 1978 1979 1980 1981
  cond = noce_get_alt_condition (if_info, b, &earliest);
  if (!cond)
    return FALSE;

  /* Verify the condition is of the form we expect.  */
  if (rtx_equal_p (XEXP (cond, 0), b))
    c = XEXP (cond, 1);
  else if (rtx_equal_p (XEXP (cond, 1), b))
1982 1983 1984 1985
    {
      c = XEXP (cond, 0);
      negate = !negate;
    }
1986 1987 1988
  else
    return FALSE;

1989 1990
  /* Verify that C is zero.  Search one step backward for a
     REG_EQUAL note or a simple source if necessary.  */
1991 1992
  if (REG_P (c))
    {
1993 1994
      rtx set, insn = prev_nonnote_insn (earliest);
      if (insn
1995
	  && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
1996 1997 1998 1999 2000 2001 2002 2003 2004 2005
	  && (set = single_set (insn))
	  && rtx_equal_p (SET_DEST (set), c))
	{
	  rtx note = find_reg_equal_equiv_note (insn);
	  if (note)
	    c = XEXP (note, 0);
	  else
	    c = SET_SRC (set);
	}
      else
2006 2007
	return FALSE;
    }
2008
  if (MEM_P (c)
2009 2010 2011 2012 2013
      && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
      && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
    c = get_pool_constant (XEXP (c, 0));

  /* Work around funny ideas get_condition has wrt canonicalization.
2014
     Note that these rtx constants are known to be CONST_INT, and
2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041
     therefore imply integer comparisons.  */
  if (c == constm1_rtx && GET_CODE (cond) == GT)
    ;
  else if (c == const1_rtx && GET_CODE (cond) == LT)
    ;
  else if (c != CONST0_RTX (GET_MODE (b)))
    return FALSE;

  /* Determine what sort of operation this is.  */
  switch (GET_CODE (cond))
    {
    case LT:
    case LE:
    case UNLT:
    case UNLE:
      negate = !negate;
      break;
    case GT:
    case GE:
    case UNGT:
    case UNGE:
      break;
    default:
      return FALSE;
    }

  start_sequence ();
2042 2043 2044 2045 2046
  if (one_cmpl)
    target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
                                         if_info->x);
  else
    target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2047

2048
  /* ??? It's a quandary whether cmove would be better here, especially
2049 2050
     for integers.  Perhaps combine will clean things up.  */
  if (target && negate)
2051 2052 2053 2054 2055 2056 2057 2058
    {
      if (one_cmpl)
        target = expand_simple_unop (GET_MODE (target), NOT, target,
                                     if_info->x, 0);
      else
        target = expand_simple_unop (GET_MODE (target), NEG, target,
                                     if_info->x, 0);
    }
2059 2060 2061 2062 2063 2064 2065 2066

  if (! target)
    {
      end_sequence ();
      return FALSE;
    }

  if (target != if_info->x)
2067
    noce_emit_move_insn (if_info->x, target);
2068

2069 2070
  seq = end_ifcvt_sequence (if_info);
  if (!seq)
2071 2072
    return FALSE;

2073
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
2074 2075 2076 2077 2078 2079
  if_info->cond = cond;
  if_info->cond_earliest = earliest;

  return TRUE;
}

2080 2081 2082 2083 2084 2085 2086 2087
/* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */

static int
noce_try_sign_mask (struct noce_if_info *if_info)
{
  rtx cond, t, m, c, seq;
  enum machine_mode mode;
  enum rtx_code code;
2088
  bool t_unconditional;
2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116

  cond = if_info->cond;
  code = GET_CODE (cond);
  m = XEXP (cond, 0);
  c = XEXP (cond, 1);

  t = NULL_RTX;
  if (if_info->a == const0_rtx)
    {
      if ((code == LT && c == const0_rtx)
	  || (code == LE && c == constm1_rtx))
	t = if_info->b;
    }
  else if (if_info->b == const0_rtx)
    {
      if ((code == GE && c == const0_rtx)
	  || (code == GT && c == constm1_rtx))
	t = if_info->a;
    }

  if (! t || side_effects_p (t))
    return FALSE;

  /* We currently don't handle different modes.  */
  mode = GET_MODE (t);
  if (GET_MODE (m) != mode)
    return FALSE;

2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129
  /* This is only profitable if T is unconditionally executed/evaluated in the
     original insn sequence or T is cheap.  The former happens if B is the
     non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
     INSN_B which can happen for e.g. conditional stores to memory.  For the
     cost computation use the block TEST_BB where the evaluation will end up
     after the transformation.  */
  t_unconditional =
    (t == if_info->b
     && (if_info->insn_b == NULL_RTX
	 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
  if (!(t_unconditional
	|| (rtx_cost (t, SET, optimize_bb_for_speed_p (if_info->test_bb))
	    < COSTS_N_INSNS (2))))
2130 2131 2132
    return FALSE;

  start_sequence ();
2133 2134 2135 2136
  /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
     "(signed) m >> 31" directly.  This benefits targets with specialized
     insns to obtain the signmask, but still uses ashr_optab otherwise.  */
  m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2137 2138 2139 2140 2141 2142 2143 2144 2145 2146
  t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
	: NULL_RTX;

  if (!t)
    {
      end_sequence ();
      return FALSE;
    }

  noce_emit_move_insn (if_info->x, t);
2147 2148 2149 2150 2151 2152

  seq = end_ifcvt_sequence (if_info);
  if (!seq)
    return FALSE;

  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
2153 2154 2155 2156
  return TRUE;
}


2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186
/* Optimize away "if (x & C) x |= C" and similar bit manipulation
   transformations.  */

static int
noce_try_bitop (struct noce_if_info *if_info)
{
  rtx cond, x, a, result, seq;
  enum machine_mode mode;
  enum rtx_code code;
  int bitnum;

  x = if_info->x;
  cond = if_info->cond;
  code = GET_CODE (cond);

  /* Check for no else condition.  */
  if (! rtx_equal_p (x, if_info->b))
    return FALSE;

  /* Check for a suitable condition.  */
  if (code != NE && code != EQ)
    return FALSE;
  if (XEXP (cond, 1) != const0_rtx)
    return FALSE;
  cond = XEXP (cond, 0);

  /* ??? We could also handle AND here.  */
  if (GET_CODE (cond) == ZERO_EXTRACT)
    {
      if (XEXP (cond, 1) != const1_rtx
Shujing Zhao committed
2187
	  || !CONST_INT_P (XEXP (cond, 2))
2188 2189 2190 2191
	  || ! rtx_equal_p (x, XEXP (cond, 0)))
	return FALSE;
      bitnum = INTVAL (XEXP (cond, 2));
      mode = GET_MODE (x);
2192 2193 2194
      if (BITS_BIG_ENDIAN)
	bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
      if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2195 2196 2197 2198 2199 2200 2201 2202 2203 2204
	return FALSE;
    }
  else
    return FALSE;

  a = if_info->a;
  if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
    {
      /* Check for "if (X & C) x = x op C".  */
      if (! rtx_equal_p (x, XEXP (a, 0))
Shujing Zhao committed
2205
          || !CONST_INT_P (XEXP (a, 1))
2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230
	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
	     != (unsigned HOST_WIDE_INT) 1 << bitnum)
        return FALSE;

      /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
      /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
      if (GET_CODE (a) == IOR)
	result = (code == NE) ? a : NULL_RTX;
      else if (code == NE)
	{
	  /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
	  result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
	  result = simplify_gen_binary (IOR, mode, x, result);
	}
      else
	{
	  /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
	  result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
	  result = simplify_gen_binary (AND, mode, x, result);
	}
    }
  else if (GET_CODE (a) == AND)
    {
      /* Check for "if (X & C) x &= ~C".  */
      if (! rtx_equal_p (x, XEXP (a, 0))
Shujing Zhao committed
2231
	  || !CONST_INT_P (XEXP (a, 1))
2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257
	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
	     != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
        return FALSE;

      /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
      /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
      result = (code == EQ) ? a : NULL_RTX;
    }
  else
    return FALSE;

  if (result)
    {
      start_sequence ();
      noce_emit_move_insn (x, result);
      seq = end_ifcvt_sequence (if_info);
      if (!seq)
	return FALSE;

      emit_insn_before_setloc (seq, if_info->jump,
			       INSN_LOCATOR (if_info->insn_a));
    }
  return TRUE;
}


2258
/* Similar to get_condition, only the resulting condition must be
2259 2260
   valid at JUMP, instead of at EARLIEST.

2261 2262
   If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
   THEN block of the caller, and we have to reverse the condition.  */
Richard Henderson committed
2263 2264

static rtx
2265
noce_get_condition (rtx jump, rtx *earliest, bool then_else_reversed)
Richard Henderson committed
2266
{
2267
  rtx cond, set, tmp;
2268
  bool reverse;
Richard Henderson committed
2269

2270
  if (! any_condjump_p (jump))
Richard Henderson committed
2271 2272
    return NULL_RTX;

2273 2274
  set = pc_set (jump);

2275 2276 2277 2278 2279
  /* If this branches to JUMP_LABEL when the condition is false,
     reverse the condition.  */
  reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
	     && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));

2280 2281 2282
  /* We may have to reverse because the caller's if block is not canonical,
     i.e. the THEN block isn't the fallthrough block for the TEST block
     (see find_if_header).  */
2283 2284 2285
  if (then_else_reversed)
    reverse = !reverse;

2286 2287
  /* If the condition variable is a register and is MODE_INT, accept it.  */

2288
  cond = XEXP (SET_SRC (set), 0);
2289 2290
  tmp = XEXP (cond, 0);
  if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
Richard Henderson committed
2291 2292 2293
    {
      *earliest = jump;

2294
      if (reverse)
Richard Henderson committed
2295
	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2296 2297
			       GET_MODE (cond), tmp, XEXP (cond, 1));
      return cond;
Richard Henderson committed
2298 2299
    }

2300 2301
  /* Otherwise, fall back on canonicalize_condition to do the dirty
     work of manipulating MODE_CC values and COMPARE rtx codes.  */
2302 2303 2304 2305 2306 2307 2308 2309 2310
  tmp = canonicalize_condition (jump, cond, reverse, earliest,
				NULL_RTX, false, true);

  /* We don't handle side-effects in the condition, like handling
     REG_INC notes and making sure no duplicate conditions are emitted.  */
  if (tmp != NULL_RTX && side_effects_p (tmp))
    return NULL_RTX;

  return tmp;
Richard Henderson committed
2311 2312
}

2313 2314 2315
/* Return true if OP is ok for if-then-else processing.  */

static int
2316
noce_operand_ok (const_rtx op)
2317 2318 2319
{
  /* We special-case memories, so handle any of them with
     no address side effects.  */
2320
  if (MEM_P (op))
2321 2322 2323 2324 2325 2326 2327 2328
    return ! side_effects_p (XEXP (op, 0));

  if (side_effects_p (op))
    return FALSE;

  return ! may_trap_p (op);
}

2329 2330 2331
/* Return true if a write into MEM may trap or fault.  */

static bool
2332
noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362
{
  rtx addr;

  if (MEM_READONLY_P (mem))
    return true;

  if (may_trap_or_fault_p (mem))
    return true;

  addr = XEXP (mem, 0);

  /* Call target hook to avoid the effects of -fpic etc....  */
  addr = targetm.delegitimize_address (addr);

  while (addr)
    switch (GET_CODE (addr))
      {
      case CONST:
      case PRE_DEC:
      case PRE_INC:
      case POST_DEC:
      case POST_INC:
      case POST_MODIFY:
	addr = XEXP (addr, 0);
	break;
      case LO_SUM:
      case PRE_MODIFY:
	addr = XEXP (addr, 1);
	break;
      case PLUS:
Shujing Zhao committed
2363
	if (CONST_INT_P (XEXP (addr, 1)))
2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381
	  addr = XEXP (addr, 0);
	else
	  return false;
	break;
      case LABEL_REF:
	return true;
      case SYMBOL_REF:
	if (SYMBOL_REF_DECL (addr)
	    && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
	  return true;
	return false;
      default:
	return false;
      }

  return false;
}

2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405
/* Return whether we can use store speculation for MEM.  TOP_BB is the
   basic block above the conditional block where we are considering
   doing the speculative store.  We look for whether MEM is set
   unconditionally later in the function.  */

static bool
noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
{
  basic_block dominator;

  for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
       dominator != NULL;
       dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
    {
      rtx insn;

      FOR_BB_INSNS (dominator, insn)
	{
	  /* If we see something that might be a memory barrier, we
	     have to stop looking.  Even if the MEM is set later in
	     the function, we still don't want to set it
	     unconditionally before the barrier.  */
	  if (INSN_P (insn)
	      && (volatile_insn_p (PATTERN (insn))
Kenneth Zadeck committed
2406
		  || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419
	    return false;

	  if (memory_modified_in_insn_p (mem, insn))
	    return true;
	  if (modified_in_p (XEXP (mem, 0), insn))
	    return false;

	}
    }

  return false;
}

2420 2421 2422
/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
   it without using conditional execution.  Return TRUE if we were successful
   at converting the block.  */
Richard Henderson committed
2423 2424

static int
2425
noce_process_if_block (struct noce_if_info *if_info)
Richard Henderson committed
2426
{
2427 2428 2429 2430 2431 2432
  basic_block test_bb = if_info->test_bb;	/* test block */
  basic_block then_bb = if_info->then_bb;	/* THEN */
  basic_block else_bb = if_info->else_bb;	/* ELSE or NULL */
  basic_block join_bb = if_info->join_bb;	/* JOIN */
  rtx jump = if_info->jump;
  rtx cond = if_info->cond;
2433 2434 2435 2436
  rtx insn_a, insn_b;
  rtx set_a, set_b;
  rtx orig_x, x, a, b;

Richard Henderson committed
2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449
  /* We're looking for patterns of the form

     (1) if (...) x = a; else x = b;
     (2) x = b; if (...) x = a;
     (3) if (...) x = a;   // as if with an initial x = x.

     The later patterns require jumps to be more expensive.

     ??? For future expansion, look for multiple X in such patterns.  */

  /* Look for one of the potential sets.  */
  insn_a = first_active_insn (then_bb);
  if (! insn_a
2450
      || insn_a != last_active_insn (then_bb, FALSE)
Richard Henderson committed
2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468
      || (set_a = single_set (insn_a)) == NULL_RTX)
    return FALSE;

  x = SET_DEST (set_a);
  a = SET_SRC (set_a);

  /* Look for the other potential set.  Make sure we've got equivalent
     destinations.  */
  /* ??? This is overconservative.  Storing to two different mems is
     as easy as conditionally computing the address.  Storing to a
     single mem merely requires a scratch memory to use as one of the
     destination addresses; often the memory immediately below the
     stack pointer is available for this.  */
  set_b = NULL_RTX;
  if (else_bb)
    {
      insn_b = first_active_insn (else_bb);
      if (! insn_b
2469
	  || insn_b != last_active_insn (else_bb, FALSE)
Richard Henderson committed
2470 2471 2472 2473 2474 2475
	  || (set_b = single_set (insn_b)) == NULL_RTX
	  || ! rtx_equal_p (x, SET_DEST (set_b)))
	return FALSE;
    }
  else
    {
2476
      insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2477 2478 2479
      /* We're going to be moving the evaluation of B down from above
	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
	 intact.  */
Richard Henderson committed
2480
      if (! insn_b
2481
	  || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2482
	  || !NONJUMP_INSN_P (insn_b)
Richard Henderson committed
2483 2484
	  || (set_b = single_set (insn_b)) == NULL_RTX
	  || ! rtx_equal_p (x, SET_DEST (set_b))
2485
	  || ! noce_operand_ok (SET_SRC (set_b))
2486
	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2487
	  || modified_between_p (SET_SRC (set_b), insn_b, jump)
2488 2489 2490 2491 2492
	  /* Likewise with X.  In particular this can happen when
	     noce_get_condition looks farther back in the instruction
	     stream than one might expect.  */
	  || reg_overlap_mentioned_p (x, cond)
	  || reg_overlap_mentioned_p (x, a)
2493
	  || modified_between_p (x, insn_b, jump))
Richard Henderson committed
2494 2495
	insn_b = set_b = NULL_RTX;
    }
2496 2497 2498

  /* If x has side effects then only the if-then-else form is safe to
     convert.  But even in that case we would need to restore any notes
2499
     (such as REG_INC) at then end.  That can be tricky if
2500 2501 2502 2503 2504
     noce_emit_move_insn expands to more than one insn, so disable the
     optimization entirely for now if there are side effects.  */
  if (side_effects_p (x))
    return FALSE;

Richard Henderson committed
2505 2506 2507 2508 2509
  b = (set_b ? SET_SRC (set_b) : x);

  /* Only operate on register destinations, and even then avoid extending
     the lifetime of hard registers on small register class machines.  */
  orig_x = x;
2510
  if (!REG_P (x)
2511 2512
      || (HARD_REGISTER_P (x)
	  && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
Richard Henderson committed
2513
    {
2514
      if (GET_MODE (x) == BLKmode)
Richard Henderson committed
2515
	return FALSE;
2516

2517
      if (GET_CODE (x) == ZERO_EXTRACT
Shujing Zhao committed
2518 2519
	  && (!CONST_INT_P (XEXP (x, 1))
	      || !CONST_INT_P (XEXP (x, 2))))
2520
	return FALSE;
2521

2522 2523
      x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
				 ? XEXP (x, 0) : x));
Richard Henderson committed
2524 2525 2526
    }

  /* Don't operate on sources that may trap or are volatile.  */
2527
  if (! noce_operand_ok (a) || ! noce_operand_ok (b))
Richard Henderson committed
2528 2529
    return FALSE;

2530
 retry:
Richard Henderson committed
2531
  /* Set up the info block for our subroutines.  */
2532 2533 2534 2535 2536
  if_info->insn_a = insn_a;
  if_info->insn_b = insn_b;
  if_info->x = x;
  if_info->a = a;
  if_info->b = b;
Richard Henderson committed
2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551

  /* Try optimizations in some approximation of a useful order.  */
  /* ??? Should first look to see if X is live incoming at all.  If it
     isn't, we don't need anything but an unconditional set.  */

  /* Look and see if A and B are really the same.  Avoid creating silly
     cmove constructs that no one will fix up later.  */
  if (rtx_equal_p (a, b))
    {
      /* If we have an INSN_B, we don't have to create any new rtl.  Just
	 move the instruction that we already have.  If we don't have an
	 INSN_B, that means that A == X, and we've got a noop move.  In
	 that case don't do anything and let the code below delete INSN_A.  */
      if (insn_b && else_bb)
	{
2552 2553
	  rtx note;

2554 2555
	  if (else_bb && insn_b == BB_END (else_bb))
	    BB_END (else_bb) = PREV_INSN (insn_b);
2556
	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2557 2558 2559 2560 2561 2562

	  /* If there was a REG_EQUAL note, delete it since it may have been
	     true due to this insn being after a jump.  */
	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
	    remove_note (insn_b, note);

Richard Henderson committed
2563 2564
	  insn_b = NULL_RTX;
	}
2565 2566 2567 2568
      /* If we have "x = b; if (...) x = a;", and x has side-effects, then
	 x must be executed twice.  */
      else if (insn_b && side_effects_p (orig_x))
	return FALSE;
2569

2570
      x = orig_x;
Richard Henderson committed
2571 2572 2573
      goto success;
    }

2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592 2593 2594 2595 2596 2597 2598
  if (!set_b && MEM_P (orig_x))
    {
      /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
	 for optimizations if writing to x may trap or fault,
	 i.e. it's a memory other than a static var or a stack slot,
	 is misaligned on strict aligned machines or is read-only.  If
	 x is a read-only memory, then the program is valid only if we
	 avoid the store into it.  If there are stores on both the
	 THEN and ELSE arms, then we can go ahead with the conversion;
	 either the program is broken, or the condition is always
	 false such that the other memory is selected.  */
      if (noce_mem_write_may_trap_or_fault_p (orig_x))
	return FALSE;

      /* Avoid store speculation: given "if (...) x = a" where x is a
	 MEM, we only want to do the store if x is always set
	 somewhere in the function.  This avoids cases like
	   if (pthread_mutex_trylock(mutex))
	     ++global_variable;
	 where we only want global_variable to be changed if the mutex
	 is held.  FIXME: This should ideally be expressed directly in
	 RTL somehow.  */
      if (!noce_can_store_speculate_p (test_bb, orig_x))
	return FALSE;
    }
2599

2600
  if (noce_try_move (if_info))
2601
    goto success;
2602
  if (noce_try_store_flag (if_info))
Richard Henderson committed
2603
    goto success;
2604
  if (noce_try_bitop (if_info))
2605
    goto success;
2606
  if (noce_try_minmax (if_info))
2607
    goto success;
2608
  if (noce_try_abs (if_info))
2609
    goto success;
Richard Henderson committed
2610
  if (HAVE_conditional_move
2611
      && noce_try_cmove (if_info))
Richard Henderson committed
2612
    goto success;
2613
  if (! targetm.have_conditional_execution ())
Richard Henderson committed
2614
    {
2615
      if (noce_try_store_flag_constants (if_info))
Richard Henderson committed
2616
	goto success;
2617
      if (noce_try_addcc (if_info))
Richard Henderson committed
2618
	goto success;
2619
      if (noce_try_store_flag_mask (if_info))
Richard Henderson committed
2620 2621
	goto success;
      if (HAVE_conditional_move
2622
	  && noce_try_cmove_arith (if_info))
Richard Henderson committed
2623
	goto success;
2624
      if (noce_try_sign_mask (if_info))
2625
	goto success;
Richard Henderson committed
2626 2627
    }

2628 2629 2630 2631 2632 2633 2634
  if (!else_bb && set_b)
    {
      insn_b = set_b = NULL_RTX;
      b = orig_x;
      goto retry;
    }

Richard Henderson committed
2635 2636 2637 2638 2639 2640 2641
  return FALSE;

 success:

  /* If we used a temporary, fix it up now.  */
  if (orig_x != x)
    {
2642 2643
      rtx seq;

Richard Henderson committed
2644
      start_sequence ();
2645
      noce_emit_move_insn (orig_x, x);
2646
      seq = get_insns ();
2647
      set_used_flags (orig_x);
2648
      unshare_all_rtl_in_chain (seq);
Richard Henderson committed
2649 2650
      end_sequence ();

2651
      emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATOR (insn_a));
Richard Henderson committed
2652 2653
    }

2654 2655 2656 2657 2658 2659 2660 2661 2662 2663
  /* The original THEN and ELSE blocks may now be removed.  The test block
     must now jump to the join block.  If the test block and the join block
     can be merged, do so.  */
  if (else_bb)
    {
      delete_basic_block (else_bb);
      num_true_changes++;
    }
  else
    remove_edge (find_edge (test_bb, join_bb));
Richard Henderson committed
2664

2665 2666 2667 2668
  remove_edge (find_edge (then_bb, join_bb));
  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
  delete_basic_block (then_bb);
  num_true_changes++;
2669

2670 2671 2672 2673 2674 2675 2676
  if (can_merge_blocks_p (test_bb, join_bb))
    {
      merge_blocks (test_bb, join_bb);
      num_true_changes++;
    }

  num_updated_if_blocks++;
Richard Henderson committed
2677 2678
  return TRUE;
}
2679 2680 2681 2682

/* Check whether a block is suitable for conditional move conversion.
   Every insn must be a simple set of a register to a constant or a
   register.  For each assignment, store the value in the array VALS,
2683 2684
   indexed by register number, then store the register number in
   REGS.  COND is the condition we will test.  */
2685 2686

static int
2687 2688
check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) **regs,
		       rtx cond)
2689 2690 2691
{
  rtx insn;

2692 2693 2694 2695 2696 2697
   /* We can only handle simple jumps at the end of the basic block.
      It is almost impossible to update the CFG otherwise.  */
  insn = BB_END (bb);
  if (JUMP_P (insn) && !onlyjump_p (insn))
    return FALSE;

2698 2699 2700 2701
  FOR_BB_INSNS (bb, insn)
    {
      rtx set, dest, src;

2702
      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2703 2704 2705 2706 2707 2708 2709 2710
	continue;
      set = single_set (insn);
      if (!set)
	return FALSE;

      dest = SET_DEST (set);
      src = SET_SRC (set);
      if (!REG_P (dest)
2711 2712
	  || (HARD_REGISTER_P (dest)
	      && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2713
	return FALSE;
2714 2715 2716 2717 2718 2719 2720 2721 2722 2723

      if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
	return FALSE;

      if (side_effects_p (src) || side_effects_p (dest))
	return FALSE;

      if (may_trap_p (src) || may_trap_p (dest))
	return FALSE;

2724 2725 2726 2727 2728 2729 2730 2731
      /* Don't try to handle this if the source register was
	 modified earlier in the block.  */
      if ((REG_P (src)
	   && vals[REGNO (src)] != NULL)
	  || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
	      && vals[REGNO (SUBREG_REG (src))] != NULL))
	return FALSE;

2732 2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745 2746
      /* Don't try to handle this if the destination register was
	 modified earlier in the block.  */
      if (vals[REGNO (dest)] != NULL)
	return FALSE;

      /* Don't try to handle this if the condition uses the
	 destination register.  */
      if (reg_overlap_mentioned_p (dest, cond))
	return FALSE;

      /* Don't try to handle this if the source register is modified
	 later in the block.  */
      if (!CONSTANT_P (src)
	  && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
	return FALSE;
2747 2748 2749

      vals[REGNO (dest)] = src;

2750
      VEC_safe_push (int, heap, *regs, REGNO (dest));
2751 2752 2753 2754 2755
    }

  return TRUE;
}

2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777 2778 2779 2780
/* Given a basic block BB suitable for conditional move conversion,
   a condition COND, and arrays THEN_VALS and ELSE_VALS containing the
   register values depending on COND, emit the insns in the block as
   conditional moves.  If ELSE_BLOCK is true, THEN_BB was already
   processed.  The caller has started a sequence for the conversion.
   Return true if successful, false if something goes wrong.  */

static bool
cond_move_convert_if_block (struct noce_if_info *if_infop,
			    basic_block bb, rtx cond,
			    rtx *then_vals, rtx *else_vals,
			    bool else_block_p)
{
  enum rtx_code code;
  rtx insn, cond_arg0, cond_arg1;

  code = GET_CODE (cond);
  cond_arg0 = XEXP (cond, 0);
  cond_arg1 = XEXP (cond, 1);

  FOR_BB_INSNS (bb, insn)
    {
      rtx set, target, dest, t, e;
      unsigned int regno;

2781 2782
      /* ??? Maybe emit conditional debug insn?  */
      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820
	continue;
      set = single_set (insn);
      gcc_assert (set && REG_P (SET_DEST (set)));

      dest = SET_DEST (set);
      regno = REGNO (dest);

      t = then_vals[regno];
      e = else_vals[regno];

      if (else_block_p)
	{
	  /* If this register was set in the then block, we already
	     handled this case there.  */
	  if (t)
	    continue;
	  t = dest;
	  gcc_assert (e);
	}
      else
	{
	  gcc_assert (t);
	  if (!e)
	    e = dest;
	}

      target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
				t, e);
      if (!target)
	return false;

      if (target != dest)
	noce_emit_move_insn (dest, target);
    }

  return true;
}

2821 2822
/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
   it using only conditional moves.  Return TRUE if we were successful at
2823 2824 2825
   converting the block.  */

static int
2826
cond_move_process_if_block (struct noce_if_info *if_info)
2827
{
2828 2829 2830 2831 2832 2833 2834
  basic_block test_bb = if_info->test_bb;
  basic_block then_bb = if_info->then_bb;
  basic_block else_bb = if_info->else_bb;
  basic_block join_bb = if_info->join_bb;
  rtx jump = if_info->jump;
  rtx cond = if_info->cond;
  rtx seq, loc_insn;
2835
  int max_reg, size, c, reg;
2836 2837
  rtx *then_vals;
  rtx *else_vals;
2838 2839 2840
  VEC (int, heap) *then_regs = NULL;
  VEC (int, heap) *else_regs = NULL;
  unsigned int i;
2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851

  /* Build a mapping for each block to the value used for each
     register.  */
  max_reg = max_reg_num ();
  size = (max_reg + 1) * sizeof (rtx);
  then_vals = (rtx *) alloca (size);
  else_vals = (rtx *) alloca (size);
  memset (then_vals, 0, size);
  memset (else_vals, 0, size);

  /* Make sure the blocks are suitable.  */
2852
  if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
2853 2854
      || (else_bb
	  && !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
2855 2856 2857 2858 2859
    {
      VEC_free (int, heap, then_regs);
      VEC_free (int, heap, else_regs);
      return FALSE;
    }
2860 2861 2862 2863 2864 2865 2866 2867

  /* Make sure the blocks can be used together.  If the same register
     is set in both blocks, and is not set to a constant in both
     cases, then both blocks must set it to the same register.  We
     have already verified that if it is set to a register, that the
     source register does not change after the assignment.  Also count
     the number of registers set in only one of the blocks.  */
  c = 0;
2868
  FOR_EACH_VEC_ELT (int, then_regs, i, reg)
2869
    {
2870
      if (!then_vals[reg] && !else_vals[reg])
2871 2872
	continue;

2873
      if (!else_vals[reg])
2874 2875 2876
	++c;
      else
	{
2877 2878 2879
	  if (!CONSTANT_P (then_vals[reg])
	      && !CONSTANT_P (else_vals[reg])
	      && !rtx_equal_p (then_vals[reg], else_vals[reg]))
2880 2881 2882 2883 2884
	    {
	      VEC_free (int, heap, then_regs);
	      VEC_free (int, heap, else_regs);
	      return FALSE;
	    }
2885 2886 2887
	}
    }

2888
  /* Finish off c for MAX_CONDITIONAL_EXECUTE.  */
2889
  FOR_EACH_VEC_ELT (int, else_regs, i, reg)
2890 2891 2892
    if (!then_vals[reg])
      ++c;

2893 2894 2895 2896 2897
  /* Make sure it is reasonable to convert this block.  What matters
     is the number of assignments currently made in only one of the
     branches, since if we convert we are going to always execute
     them.  */
  if (c > MAX_CONDITIONAL_EXECUTE)
2898 2899 2900 2901 2902
    {
      VEC_free (int, heap, then_regs);
      VEC_free (int, heap, else_regs);
      return FALSE;
    }
2903

2904 2905
  /* Try to emit the conditional moves.  First do the then block,
     then do anything left in the else blocks.  */
2906
  start_sequence ();
2907
  if (!cond_move_convert_if_block (if_info, then_bb, cond,
2908 2909
				   then_vals, else_vals, false)
      || (else_bb
2910
	  && !cond_move_convert_if_block (if_info, else_bb, cond,
2911
					  then_vals, else_vals, true)))
2912
    {
2913
      end_sequence ();
2914 2915
      VEC_free (int, heap, then_regs);
      VEC_free (int, heap, else_regs);
2916
      return FALSE;
2917
    }
2918
  seq = end_ifcvt_sequence (if_info);
2919
  if (!seq)
2920 2921 2922 2923 2924
    {
      VEC_free (int, heap, then_regs);
      VEC_free (int, heap, else_regs);
      return FALSE;
    }
2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935

  loc_insn = first_active_insn (then_bb);
  if (!loc_insn)
    {
      loc_insn = first_active_insn (else_bb);
      gcc_assert (loc_insn);
    }
  emit_insn_before_setloc (seq, jump, INSN_LOCATOR (loc_insn));

  if (else_bb)
    {
2936 2937
      delete_basic_block (else_bb);
      num_true_changes++;
2938
    }
2939 2940
  else
    remove_edge (find_edge (test_bb, join_bb));
2941

2942 2943 2944 2945
  remove_edge (find_edge (then_bb, join_bb));
  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
  delete_basic_block (then_bb);
  num_true_changes++;
2946

2947 2948 2949 2950 2951
  if (can_merge_blocks_p (test_bb, join_bb))
    {
      merge_blocks (test_bb, join_bb);
      num_true_changes++;
    }
2952

2953
  num_updated_if_blocks++;
2954 2955 2956

  VEC_free (int, heap, then_regs);
  VEC_free (int, heap, else_regs);
2957 2958
  return TRUE;
}
2959

Richard Henderson committed
2960

2961 2962 2963 2964 2965 2966 2967
/* Determine if a given basic block heads a simple IF-THEN-JOIN or an
   IF-THEN-ELSE-JOIN block.

   If so, we'll try to convert the insns to not require the branch,
   using only transformations that do not require conditional execution.

   Return TRUE if we were successful at converting the block.  */
Richard Henderson committed
2968 2969

static int
2970
noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
2971
		    int pass)
Richard Henderson committed
2972
{
2973 2974 2975
  basic_block then_bb, else_bb, join_bb;
  bool then_else_reversed = false;
  rtx jump, cond;
2976
  rtx cond_earliest;
2977
  struct noce_if_info if_info;
Richard Henderson committed
2978

2979 2980
  /* We only ever should get here before reload.  */
  gcc_assert (!reload_completed);
2981

2982 2983 2984 2985 2986 2987
  /* Recognize an IF-THEN-ELSE-JOIN block.  */
  if (single_pred_p (then_edge->dest)
      && single_succ_p (then_edge->dest)
      && single_pred_p (else_edge->dest)
      && single_succ_p (else_edge->dest)
      && single_succ (then_edge->dest) == single_succ (else_edge->dest))
2988
    {
2989 2990 2991 2992 2993 2994 2995 2996 2997 2998 2999 3000 3001 3002 3003 3004
      then_bb = then_edge->dest;
      else_bb = else_edge->dest;
      join_bb = single_succ (then_bb);
    }
  /* Recognize an IF-THEN-JOIN block.  */
  else if (single_pred_p (then_edge->dest)
	   && single_succ_p (then_edge->dest)
	   && single_succ (then_edge->dest) == else_edge->dest)
    {
      then_bb = then_edge->dest;
      else_bb = NULL_BLOCK;
      join_bb = else_edge->dest;
    }
  /* Recognize an IF-ELSE-JOIN block.  We can have those because the order
     of basic blocks in cfglayout mode does not matter, so the fallthrough
     edge can go to any basic block (and not just to bb->next_bb, like in
3005
     cfgrtl mode).  */
3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020
  else if (single_pred_p (else_edge->dest)
	   && single_succ_p (else_edge->dest)
	   && single_succ (else_edge->dest) == then_edge->dest)
    {
      /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
	 To make this work, we have to invert the THEN and ELSE blocks
	 and reverse the jump condition.  */
      then_bb = else_edge->dest;
      else_bb = NULL_BLOCK;
      join_bb = single_succ (then_bb);
      then_else_reversed = true;
    }
  else
    /* Not a form we can handle.  */
    return FALSE;
3021

3022 3023 3024 3025 3026 3027
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
  if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
    return FALSE;
  if (else_bb
      && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
    return FALSE;
3028

3029
  num_possible_if_blocks++;
3030

3031 3032 3033 3034 3035 3036 3037 3038 3039 3040 3041
  if (dump_file)
    {
      fprintf (dump_file,
	       "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
	       (else_bb) ? "-ELSE" : "",
	       pass, test_bb->index, then_bb->index);

      if (else_bb)
	fprintf (dump_file, ", else %d", else_bb->index);

      fprintf (dump_file, ", join %d\n", join_bb->index);
3042
    }
Richard Henderson committed
3043

3044 3045 3046 3047 3048 3049 3050
  /* If the conditional jump is more than just a conditional
     jump, then we can not do if-conversion on this block.  */
  jump = BB_END (test_bb);
  if (! onlyjump_p (jump))
    return FALSE;

  /* If this is not a standard conditional jump, we can't parse it.  */
3051
  cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3052 3053 3054 3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065
  if (!cond)
    return FALSE;

  /* We must be comparing objects whose modes imply the size.  */
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
    return FALSE;

  /* Initialize an IF_INFO struct to pass around.  */
  memset (&if_info, 0, sizeof if_info);
  if_info.test_bb = test_bb;
  if_info.then_bb = then_bb;
  if_info.else_bb = else_bb;
  if_info.join_bb = join_bb;
  if_info.cond = cond;
3066
  if_info.cond_earliest = cond_earliest;
3067
  if_info.jump = jump;
3068
  if_info.then_else_reversed = then_else_reversed;
3069 3070
  if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
				     predictable_edge_p (then_edge));
3071 3072 3073 3074 3075 3076 3077 3078 3079 3080

  /* Do the real work.  */

  if (noce_process_if_block (&if_info))
    return TRUE;

  if (HAVE_conditional_move
      && cond_move_process_if_block (&if_info))
    return TRUE;

Richard Henderson committed
3081 3082
  return FALSE;
}
3083

Richard Henderson committed
3084 3085 3086 3087

/* Merge the blocks and mark for local life update.  */

static void
3088
merge_if_block (struct ce_if_block * ce_info)
Richard Henderson committed
3089
{
3090 3091 3092 3093
  basic_block test_bb = ce_info->test_bb;	/* last test block */
  basic_block then_bb = ce_info->then_bb;	/* THEN */
  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
  basic_block join_bb = ce_info->join_bb;	/* join block */
Richard Henderson committed
3094 3095 3096 3097 3098
  basic_block combo_bb;

  /* All block merging is done into the lower block numbers.  */

  combo_bb = test_bb;
3099
  df_set_bb_dirty (test_bb);
Richard Henderson committed
3100

3101 3102 3103 3104 3105 3106 3107
  /* Merge any basic blocks to handle && and || subtests.  Each of
     the blocks are on the fallthru path from the predecessor block.  */
  if (ce_info->num_multiple_test_blocks > 0)
    {
      basic_block bb = test_bb;
      basic_block last_test_bb = ce_info->last_test_bb;
      basic_block fallthru = block_fallthru (bb);
3108

3109 3110 3111 3112
      do
	{
	  bb = fallthru;
	  fallthru = block_fallthru (bb);
3113
	  merge_blocks (combo_bb, bb);
3114
	  num_true_changes++;
3115 3116 3117 3118 3119 3120 3121 3122
	}
      while (bb != last_test_bb);
    }

  /* Merge TEST block into THEN block.  Normally the THEN block won't have a
     label, but it might if there were || tests.  That label's count should be
     zero, and it normally should be removed.  */

3123 3124
  if (then_bb)
    {
3125
      merge_blocks (combo_bb, then_bb);
3126
      num_true_changes++;
3127
    }
Richard Henderson committed
3128 3129 3130 3131 3132 3133

  /* The ELSE block, if it existed, had a label.  That label count
     will almost always be zero, but odd things can happen when labels
     get their addresses taken.  */
  if (else_bb)
    {
3134
      merge_blocks (combo_bb, else_bb);
3135
      num_true_changes++;
Richard Henderson committed
3136 3137 3138 3139 3140 3141 3142
    }

  /* If there was no join block reported, that means it was not adjacent
     to the others, and so we cannot merge them.  */

  if (! join_bb)
    {
3143
      rtx last = BB_END (combo_bb);
3144

Richard Henderson committed
3145 3146
      /* The outgoing edge for the current COMBO block should already
	 be correct.  Verify this.  */
3147
      if (EDGE_COUNT (combo_bb->succs) == 0)
3148 3149 3150 3151 3152
	gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
		    || (NONJUMP_INSN_P (last)
			&& GET_CODE (PATTERN (last)) == TRAP_IF
			&& (TRAP_CONDITION (PATTERN (last))
			    == const_true_rtx)));
Richard Henderson committed
3153

3154
      else
3155
      /* There should still be something at the end of the THEN or ELSE
Richard Henderson committed
3156
         blocks taking us to our final destination.  */
3157 3158 3159 3160 3161 3162
	gcc_assert (JUMP_P (last)
		    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
			&& CALL_P (last)
			&& SIBLING_CALL_P (last))
		    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
			&& can_throw_internal (last)));
Richard Henderson committed
3163 3164
    }

3165 3166 3167
  /* The JOIN block may have had quite a number of other predecessors too.
     Since we've already merged the TEST, THEN and ELSE blocks, we should
     have only one remaining edge from our if-then-else diamond.  If there
3168
     is more than one remaining edge, it must come from elsewhere.  There
3169
     may be zero incoming edges if the THEN block didn't actually join
3170
     back up (as with a call to a non-return function).  */
3171
  else if (EDGE_COUNT (join_bb->preds) < 2
3172
	   && join_bb != EXIT_BLOCK_PTR)
Richard Henderson committed
3173
    {
3174 3175
      /* We can merge the JOIN cleanly and update the dataflow try
	 again on this pass.*/
3176
      merge_blocks (combo_bb, join_bb);
3177
      num_true_changes++;
Richard Henderson committed
3178 3179 3180 3181 3182 3183 3184
    }
  else
    {
      /* We cannot merge the JOIN.  */

      /* The outgoing edge for the current COMBO block should already
	 be correct.  Verify this.  */
3185 3186
      gcc_assert (single_succ_p (combo_bb)
		  && single_succ (combo_bb) == join_bb);
Richard Henderson committed
3187 3188

      /* Remove the jump and cruft from the end of the COMBO block.  */
3189
      if (join_bb != EXIT_BLOCK_PTR)
3190
	tidy_fallthru_edge (single_succ_edge (combo_bb));
Richard Henderson committed
3191 3192 3193 3194 3195
    }

  num_updated_if_blocks++;
}

3196 3197 3198 3199
/* Find a block ending in a simple IF condition and try to transform it
   in some way.  When converting a multi-block condition, put the new code
   in the first such block and delete the rest.  Return a pointer to this
   first block if some transformation was done.  Return NULL otherwise.  */
Richard Henderson committed
3200

3201
static basic_block
3202
find_if_header (basic_block test_bb, int pass)
Richard Henderson committed
3203
{
3204
  ce_if_block_t ce_info;
Richard Henderson committed
3205 3206 3207 3208
  edge then_edge;
  edge else_edge;

  /* The kind of block we're looking for has exactly two successors.  */
3209
  if (EDGE_COUNT (test_bb->succs) != 2)
3210
    return NULL;
Richard Henderson committed
3211

3212 3213 3214
  then_edge = EDGE_SUCC (test_bb, 0);
  else_edge = EDGE_SUCC (test_bb, 1);

3215 3216 3217 3218 3219
  if (df_get_bb_dirty (then_edge->dest))
    return NULL;
  if (df_get_bb_dirty (else_edge->dest))
    return NULL;

Richard Henderson committed
3220 3221 3222
  /* Neither edge should be abnormal.  */
  if ((then_edge->flags & EDGE_COMPLEX)
      || (else_edge->flags & EDGE_COMPLEX))
3223
    return NULL;
Richard Henderson committed
3224

3225 3226 3227 3228 3229
  /* Nor exit the loop.  */
  if ((then_edge->flags & EDGE_LOOP_EXIT)
      || (else_edge->flags & EDGE_LOOP_EXIT))
    return NULL;

Richard Henderson committed
3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240
  /* The THEN edge is canonically the one that falls through.  */
  if (then_edge->flags & EDGE_FALLTHRU)
    ;
  else if (else_edge->flags & EDGE_FALLTHRU)
    {
      edge e = else_edge;
      else_edge = then_edge;
      then_edge = e;
    }
  else
    /* Otherwise this must be a multiway branch of some sort.  */
3241 3242
    return NULL;

3243
  memset (&ce_info, 0, sizeof (ce_info));
3244 3245 3246 3247
  ce_info.test_bb = test_bb;
  ce_info.then_bb = then_edge->dest;
  ce_info.else_bb = else_edge->dest;
  ce_info.pass = pass;
Richard Henderson committed
3248

3249 3250 3251 3252
#ifdef IFCVT_INIT_EXTRA_FIELDS
  IFCVT_INIT_EXTRA_FIELDS (&ce_info);
#endif

3253
  if (!reload_completed
3254 3255 3256
      && noce_find_if_block (test_bb, then_edge, else_edge, pass))
    goto success;

3257 3258
  if (reload_completed
      && targetm.have_conditional_execution ()
3259
      && cond_exec_find_if_block (&ce_info))
Richard Henderson committed
3260
    goto success;
3261

Paolo Bonzini committed
3262
  if (HAVE_trap
3263
      && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3264 3265
      && find_cond_trap (test_bb, then_edge, else_edge))
    goto success;
3266

3267
  if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3268
      && (reload_completed || !targetm.have_conditional_execution ()))
Richard Henderson committed
3269 3270 3271 3272 3273 3274 3275
    {
      if (find_if_case_1 (test_bb, then_edge, else_edge))
	goto success;
      if (find_if_case_2 (test_bb, then_edge, else_edge))
	goto success;
    }

3276
  return NULL;
Richard Henderson committed
3277 3278

 success:
3279 3280
  if (dump_file)
    fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3281 3282
  /* Set this so we continue looking.  */
  cond_exec_changed_p = TRUE;
3283 3284 3285 3286 3287 3288 3289 3290 3291
  return ce_info.test_bb;
}

/* Return true if a block has two edges, one of which falls through to the next
   block, and the other jumps to a specific block, so that we can tell if the
   block is part of an && test or an || test.  Returns either -1 or the number
   of non-note, non-jump, non-USE/CLOBBER insns in the block.  */

static int
3292
block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3293 3294 3295 3296 3297 3298 3299
{
  edge cur_edge;
  int fallthru_p = FALSE;
  int jump_p = FALSE;
  rtx insn;
  rtx end;
  int n_insns = 0;
3300
  edge_iterator ei;
3301 3302 3303 3304 3305

  if (!cur_bb || !target_bb)
    return -1;

  /* If no edges, obviously it doesn't jump or fallthru.  */
3306
  if (EDGE_COUNT (cur_bb->succs) == 0)
3307 3308
    return FALSE;

3309
  FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3310 3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331
    {
      if (cur_edge->flags & EDGE_COMPLEX)
	/* Anything complex isn't what we want.  */
	return -1;

      else if (cur_edge->flags & EDGE_FALLTHRU)
	fallthru_p = TRUE;

      else if (cur_edge->dest == target_bb)
	jump_p = TRUE;

      else
	return -1;
    }

  if ((jump_p & fallthru_p) == 0)
    return -1;

  /* Don't allow calls in the block, since this is used to group && and ||
     together for conditional execution support.  ??? we should support
     conditional execution support across calls for IA-64 some day, but
     for now it makes the code simpler.  */
3332 3333
  end = BB_END (cur_bb);
  insn = BB_HEAD (cur_bb);
3334 3335 3336

  while (insn != NULL_RTX)
    {
3337
      if (CALL_P (insn))
3338 3339 3340
	return -1;

      if (INSN_P (insn)
3341
	  && !JUMP_P (insn)
3342
	  && !DEBUG_INSN_P (insn)
3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353
	  && GET_CODE (PATTERN (insn)) != USE
	  && GET_CODE (PATTERN (insn)) != CLOBBER)
	n_insns++;

      if (insn == end)
	break;

      insn = NEXT_INSN (insn);
    }

  return n_insns;
Richard Henderson committed
3354 3355 3356 3357
}

/* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
   block.  If so, we'll try to convert the insns to not require the branch.
3358
   Return TRUE if we were successful at converting the block.  */
Richard Henderson committed
3359 3360

static int
3361
cond_exec_find_if_block (struct ce_if_block * ce_info)
Richard Henderson committed
3362
{
3363 3364 3365
  basic_block test_bb = ce_info->test_bb;
  basic_block then_bb = ce_info->then_bb;
  basic_block else_bb = ce_info->else_bb;
Richard Henderson committed
3366
  basic_block join_bb = NULL_BLOCK;
3367
  edge cur_edge;
3368
  basic_block next;
3369
  edge_iterator ei;
Richard Henderson committed
3370

3371 3372
  ce_info->last_test_bb = test_bb;

3373
  /* We only ever should get here after reload,
3374 3375
     and if we have conditional execution.  */
  gcc_assert (reload_completed && targetm.have_conditional_execution ());
3376

3377 3378 3379
  /* Discover if any fall through predecessors of the current test basic block
     were && tests (which jump to the else block) or || tests (which jump to
     the then block).  */
3380
  if (single_pred_p (test_bb)
3381
      && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3382
    {
3383
      basic_block bb = single_pred (test_bb);
3384 3385 3386 3387
      basic_block target_bb;
      int max_insns = MAX_CONDITIONAL_EXECUTE;
      int n_insns;

3388
      /* Determine if the preceding block is an && or || block.  */
3389 3390 3391 3392 3393 3394 3395
      if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
	{
	  ce_info->and_and_p = TRUE;
	  target_bb = else_bb;
	}
      else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
	{
3396
	  ce_info->and_and_p = FALSE;
3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415
	  target_bb = then_bb;
	}
      else
	target_bb = NULL_BLOCK;

      if (target_bb && n_insns <= max_insns)
	{
	  int total_insns = 0;
	  int blocks = 0;

	  ce_info->last_test_bb = test_bb;

	  /* Found at least one && or || block, look for more.  */
	  do
	    {
	      ce_info->test_bb = test_bb = bb;
	      total_insns += n_insns;
	      blocks++;

3416
	      if (!single_pred_p (bb))
3417 3418
		break;

3419
	      bb = single_pred (bb);
3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431 3432 3433
	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
	    }
	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);

	  ce_info->num_multiple_test_blocks = blocks;
	  ce_info->num_multiple_test_insns = total_insns;

	  if (ce_info->and_and_p)
	    ce_info->num_and_and_blocks = blocks;
	  else
	    ce_info->num_or_or_blocks = blocks;
	}
    }

3434 3435 3436 3437
  /* The THEN block of an IF-THEN combo must have exactly one predecessor,
     other than any || blocks which jump to the THEN block.  */
  if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
    return FALSE;
3438

3439
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3440
  FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3441 3442 3443 3444 3445
    {
      if (cur_edge->flags & EDGE_COMPLEX)
	return FALSE;
    }

3446
  FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3447 3448 3449 3450 3451
    {
      if (cur_edge->flags & EDGE_COMPLEX)
	return FALSE;
    }

3452
  /* The THEN block of an IF-THEN combo must have zero or one successors.  */
3453
  if (EDGE_COUNT (then_bb->succs) > 0
3454 3455
      && (!single_succ_p (then_bb)
          || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3456 3457
	  || (epilogue_completed
	      && tablejump_p (BB_END (then_bb), NULL, NULL))))
Richard Henderson committed
3458 3459
    return FALSE;

3460 3461
  /* If the THEN block has no successors, conditional execution can still
     make a conditional call.  Don't do this unless the ELSE block has
3462 3463 3464
     only one incoming edge -- the CFG manipulation is too ugly otherwise.
     Check for the last insn of the THEN block being an indirect jump, which
     is listed as not having any successors, but confuses the rest of the CE
3465
     code processing.  ??? we should fix this in the future.  */
3466
  if (EDGE_COUNT (then_bb->succs) == 0)
3467
    {
3468
      if (single_pred_p (else_bb))
3469
	{
3470
	  rtx last_insn = BB_END (then_bb);
3471

3472
	  while (last_insn
3473
		 && NOTE_P (last_insn)
3474
		 && last_insn != BB_HEAD (then_bb))
3475
	    last_insn = PREV_INSN (last_insn);
3476

3477
	  if (last_insn
3478
	      && JUMP_P (last_insn)
3479 3480 3481
	      && ! simplejump_p (last_insn))
	    return FALSE;

3482 3483 3484 3485 3486 3487 3488
	  join_bb = else_bb;
	  else_bb = NULL_BLOCK;
	}
      else
	return FALSE;
    }

Richard Henderson committed
3489 3490
  /* If the THEN block's successor is the other edge out of the TEST block,
     then we have an IF-THEN combo without an ELSE.  */
3491
  else if (single_succ (then_bb) == else_bb)
Richard Henderson committed
3492 3493 3494 3495 3496 3497 3498 3499
    {
      join_bb = else_bb;
      else_bb = NULL_BLOCK;
    }

  /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
     has exactly one predecessor and one successor, and the outgoing edge
     is not complex, then we have an IF-THEN-ELSE combo.  */
3500 3501 3502
  else if (single_succ_p (else_bb)
	   && single_succ (then_bb) == single_succ (else_bb)
	   && single_pred_p (else_bb)
3503 3504 3505
	   && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
	   && !(epilogue_completed
		&& tablejump_p (BB_END (else_bb), NULL, NULL)))
3506
    join_bb = single_succ (else_bb);
Richard Henderson committed
3507 3508 3509

  /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
  else
3510
    return FALSE;
Richard Henderson committed
3511 3512 3513

  num_possible_if_blocks++;

3514
  if (dump_file)
Richard Henderson committed
3515
    {
3516 3517 3518
      fprintf (dump_file,
	       "\nIF-THEN%s block found, pass %d, start block %d "
	       "[insn %d], then %d [%d]",
3519 3520
	       (else_bb) ? "-ELSE" : "",
	       ce_info->pass,
3521 3522 3523 3524
	       test_bb->index,
	       BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
	       then_bb->index,
	       BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
Richard Henderson committed
3525

3526
      if (else_bb)
3527 3528 3529
	fprintf (dump_file, ", else %d [%d]",
		 else_bb->index,
		 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3530

3531 3532 3533
      fprintf (dump_file, ", join %d [%d]",
	       join_bb->index,
	       BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3534 3535

      if (ce_info->num_multiple_test_blocks > 0)
3536
	fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3537 3538 3539 3540
		 ce_info->num_multiple_test_blocks,
		 (ce_info->and_and_p) ? "&&" : "||",
		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
		 ce_info->last_test_bb->index,
3541 3542
		 ((BB_HEAD (ce_info->last_test_bb))
		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3543 3544
		  : -1));

3545
      fputc ('\n', dump_file);
3546 3547 3548 3549 3550 3551 3552
    }

  /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
     first condition for free, since we've already asserted that there's a
     fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
     we checked the FALLTHRU flag, those are already adjacent to the last IF
     block.  */
3553
  /* ??? As an enhancement, move the ELSE block.  Have to deal with
3554
     BLOCK notes, if by no other means than backing out the merge if they
Richard Henderson committed
3555
     exist.  Sticky enough I don't want to think about it now.  */
3556 3557
  next = then_bb;
  if (else_bb && (next = next->next_bb) != else_bb)
Richard Henderson committed
3558
    return FALSE;
3559
  if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
Richard Henderson committed
3560 3561 3562 3563 3564 3565 3566 3567
    {
      if (else_bb)
	join_bb = NULL;
      else
	return FALSE;
    }

  /* Do the real work.  */
3568

3569 3570 3571
  ce_info->else_bb = else_bb;
  ce_info->join_bb = join_bb;

3572 3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585
  /* If we have && and || tests, try to first handle combining the && and ||
     tests into the conditional code, and if that fails, go back and handle
     it without the && and ||, which at present handles the && case if there
     was no ELSE block.  */
  if (cond_exec_process_if_block (ce_info, TRUE))
    return TRUE;

  if (ce_info->num_multiple_test_blocks)
    {
      cancel_changes (0);

      if (cond_exec_process_if_block (ce_info, FALSE))
	return TRUE;
    }
3586 3587

  return FALSE;
Richard Henderson committed
3588 3589
}

3590 3591
/* Convert a branch over a trap, or a branch
   to a trap, into a conditional trap.  */
3592 3593

static int
3594
find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3595
{
3596 3597 3598
  basic_block then_bb = then_edge->dest;
  basic_block else_bb = else_edge->dest;
  basic_block other_bb, trap_bb;
3599 3600 3601 3602 3603 3604
  rtx trap, jump, cond, cond_earliest, seq;
  enum rtx_code code;

  /* Locate the block with the trap instruction.  */
  /* ??? While we look for no successors, we really ought to allow
     EH successors.  Need to fix merge_if_block for that to work.  */
3605
  if ((trap = block_has_only_trap (then_bb)) != NULL)
3606
    trap_bb = then_bb, other_bb = else_bb;
3607
  else if ((trap = block_has_only_trap (else_bb)) != NULL)
3608
    trap_bb = else_bb, other_bb = then_bb;
3609 3610 3611
  else
    return FALSE;

3612
  if (dump_file)
3613
    {
3614
      fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3615
	       test_bb->index, trap_bb->index);
3616 3617 3618
    }

  /* If this is not a standard conditional jump, we can't parse it.  */
3619
  jump = BB_END (test_bb);
3620
  cond = noce_get_condition (jump, &cond_earliest, false);
3621 3622 3623
  if (! cond)
    return FALSE;

3624 3625
  /* If the conditional jump is more than just a conditional jump, then
     we can not do if-conversion on this block.  */
3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642
  if (! onlyjump_p (jump))
    return FALSE;

  /* We must be comparing objects whose modes imply the size.  */
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
    return FALSE;

  /* Reverse the comparison code, if necessary.  */
  code = GET_CODE (cond);
  if (then_bb == trap_bb)
    {
      code = reversed_comparison_code (cond, jump);
      if (code == UNKNOWN)
	return FALSE;
    }

  /* Attempt to generate the conditional trap.  */
3643 3644
  seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
		       copy_rtx (XEXP (cond, 1)),
3645 3646 3647 3648
		       TRAP_CODE (PATTERN (trap)));
  if (seq == NULL)
    return FALSE;

3649
  /* Emit the new insns before cond_earliest.  */
3650
  emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
3651

3652 3653
  /* Delete the trap block if possible.  */
  remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3654 3655 3656 3657
  df_set_bb_dirty (test_bb);
  df_set_bb_dirty (then_bb);
  df_set_bb_dirty (else_bb);

3658
  if (EDGE_COUNT (trap_bb->preds) == 0)
3659
    {
3660 3661
      delete_basic_block (trap_bb);
      num_true_changes++;
3662
    }
3663 3664 3665 3666

  /* Wire together the blocks again.  */
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3667
  else
3668 3669
    {
      rtx lab, newjump;
3670

3671 3672 3673 3674 3675
      lab = JUMP_LABEL (jump);
      newjump = emit_jump_insn_after (gen_jump (lab), jump);
      LABEL_NUSES (lab) += 1;
      JUMP_LABEL (newjump) = lab;
      emit_barrier_after (newjump);
3676 3677
    }
  delete_insn (jump);
3678

3679 3680 3681 3682
  if (can_merge_blocks_p (test_bb, other_bb))
    {
      merge_blocks (test_bb, other_bb);
      num_true_changes++;
3683
    }
3684

3685
  num_updated_if_blocks++;
3686 3687 3688
  return TRUE;
}

3689
/* Subroutine of find_cond_trap: if BB contains only a trap insn,
3690 3691 3692
   return it.  */

static rtx
3693
block_has_only_trap (basic_block bb)
3694 3695 3696 3697 3698 3699 3700 3701
{
  rtx trap;

  /* We're not the exit block.  */
  if (bb == EXIT_BLOCK_PTR)
    return NULL_RTX;

  /* The block must have no successors.  */
3702
  if (EDGE_COUNT (bb->succs) > 0)
3703 3704 3705 3706
    return NULL_RTX;

  /* The only instruction in the THEN block must be the trap.  */
  trap = first_active_insn (bb);
3707
  if (! (trap == BB_END (bb)
3708 3709 3710 3711 3712 3713 3714
	 && GET_CODE (PATTERN (trap)) == TRAP_IF
         && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
    return NULL_RTX;

  return trap;
}

Richard Henderson committed
3715 3716 3717 3718
/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
   transformable, but not necessarily the other.  There need be no
   JOIN block.

3719
   Return TRUE if we were successful at converting the block.
Richard Henderson committed
3720 3721 3722 3723 3724 3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751 3752 3753 3754 3755 3756 3757 3758 3759 3760 3761 3762 3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775 3776 3777 3778 3779 3780 3781 3782 3783 3784 3785 3786 3787 3788 3789 3790 3791 3792

   Cases we'd like to look at:

   (1)
	if (test) goto over; // x not live
	x = a;
	goto label;
	over:

   becomes

	x = a;
	if (! test) goto label;

   (2)
	if (test) goto E; // x not live
	x = big();
	goto L;
	E:
	x = b;
	goto M;

   becomes

	x = b;
	if (test) goto M;
	x = big();
	goto L;

   (3) // This one's really only interesting for targets that can do
       // multiway branching, e.g. IA-64 BBB bundles.  For other targets
       // it results in multiple branches on a cache line, which often
       // does not sit well with predictors.

	if (test1) goto E; // predicted not taken
	x = a;
	if (test2) goto F;
	...
	E:
	x = b;
	J:

   becomes

	x = a;
	if (test1) goto E;
	if (test2) goto F;

   Notes:

   (A) Don't do (2) if the branch is predicted against the block we're
   eliminating.  Do it anyway if we can eliminate a branch; this requires
   that the sole successor of the eliminated block postdominate the other
   side of the if.

   (B) With CE, on (3) we can steal from both sides of the if, creating

	if (test1) x = a;
	if (!test1) x = b;
	if (test1) goto J;
	if (test2) goto F;
	...
	J:

   Again, this is most useful if J postdominates.

   (C) CE substitutes for helpful life information.

   (D) These heuristics need a lot of work.  */

/* Tests for case 1 above.  */

static int
3793
find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
Richard Henderson committed
3794 3795
{
  basic_block then_bb = then_edge->dest;
3796 3797
  basic_block else_bb = else_edge->dest;
  basic_block new_bb;
3798
  int then_bb_index;
Richard Henderson committed
3799

3800 3801
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
3802
     and cold sections.
3803

3804
     Basic block partitioning may result in some jumps that appear to
3805 3806 3807
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
3808 3809
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */

3810
  if ((BB_END (then_bb)
3811 3812 3813 3814
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
      || (BB_END (test_bb)
	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
      || (BB_END (else_bb)
3815
	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3816
			    NULL_RTX)))
3817 3818
    return FALSE;

Richard Henderson committed
3819
  /* THEN has one successor.  */
3820
  if (!single_succ_p (then_bb))
Richard Henderson committed
3821 3822 3823
    return FALSE;

  /* THEN does not fall through, but is not strange either.  */
3824
  if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
Richard Henderson committed
3825 3826 3827
    return FALSE;

  /* THEN has one predecessor.  */
3828
  if (!single_pred_p (then_bb))
Richard Henderson committed
3829 3830
    return FALSE;

3831 3832
  /* THEN must do something.  */
  if (forwarder_block_p (then_bb))
Richard Henderson committed
3833 3834 3835
    return FALSE;

  num_possible_if_blocks++;
3836 3837
  if (dump_file)
    fprintf (dump_file,
Richard Henderson committed
3838
	     "\nIF-CASE-1 found, start %d, then %d\n",
3839
	     test_bb->index, then_bb->index);
Richard Henderson committed
3840 3841

  /* THEN is small.  */
3842 3843 3844
  if (! cheap_bb_rtx_cost_p (then_bb,
	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
				    predictable_edge_p (then_edge)))))
Richard Henderson committed
3845 3846 3847
    return FALSE;

  /* Registers set are dead, or are predicable.  */
3848
  if (! dead_or_predicable (test_bb, then_bb, else_bb,
3849
			    single_succ (then_bb), 1))
Richard Henderson committed
3850 3851 3852 3853 3854
    return FALSE;

  /* Conversion went ok, including moving the insns and fixing up the
     jump.  Adjust the CFG to match.  */

3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867
  /* We can avoid creating a new basic block if then_bb is immediately
     followed by else_bb, i.e. deleting then_bb allows test_bb to fall
     thru to else_bb.  */

  if (then_bb->next_bb == else_bb
      && then_bb->prev_bb == test_bb
      && else_bb != EXIT_BLOCK_PTR)
    {
      redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
      new_bb = 0;
    }
  else
    new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3868 3869 3870 3871
					     else_bb);

  df_set_bb_dirty (test_bb);
  df_set_bb_dirty (else_bb);
3872

3873
  then_bb_index = then_bb->index;
3874
  delete_basic_block (then_bb);
3875

3876
  /* Make rest of code believe that the newly created block is the THEN_BB
3877
     block we removed.  */
3878
  if (new_bb)
3879
    {
3880
      df_bb_replace (then_bb_index, new_bb);
3881 3882 3883
      /* Since the fallthru edge was redirected from test_bb to new_bb,
         we need to ensure that new_bb is in the same partition as
         test bb (you can not fall through across section boundaries).  */
3884
      BB_COPY_PARTITION (new_bb, test_bb);
3885
    }
Richard Henderson committed
3886

3887
  num_true_changes++;
Richard Henderson committed
3888 3889 3890 3891 3892 3893 3894 3895
  num_updated_if_blocks++;

  return TRUE;
}

/* Test for case 2 above.  */

static int
3896
find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
Richard Henderson committed
3897 3898 3899
{
  basic_block then_bb = then_edge->dest;
  basic_block else_bb = else_edge->dest;
3900
  edge else_succ;
3901
  rtx note;
Richard Henderson committed
3902

3903 3904
  /* If we are partitioning hot/cold basic blocks, we don't want to
     mess up unconditional or indirect jumps that cross between hot
3905
     and cold sections.
3906

3907
     Basic block partitioning may result in some jumps that appear to
3908 3909 3910
     be optimizable (or blocks that appear to be mergeable), but which really
     must be left untouched (they are required to make it safely across
     partition boundaries).  See  the comments at the top of
3911 3912
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */

3913 3914 3915 3916
  if ((BB_END (then_bb)
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
      || (BB_END (test_bb)
	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3917 3918
      || (BB_END (else_bb)
	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3919
			    NULL_RTX)))
3920 3921
    return FALSE;

Richard Henderson committed
3922
  /* ELSE has one successor.  */
3923
  if (!single_succ_p (else_bb))
Richard Henderson committed
3924
    return FALSE;
3925
  else
3926
    else_succ = single_succ_edge (else_bb);
Richard Henderson committed
3927 3928 3929 3930 3931 3932

  /* ELSE outgoing edge is not complex.  */
  if (else_succ->flags & EDGE_COMPLEX)
    return FALSE;

  /* ELSE has one predecessor.  */
3933
  if (!single_pred_p (else_bb))
Richard Henderson committed
3934 3935
    return FALSE;

3936
  /* THEN is not EXIT.  */
3937
  if (then_bb->index < NUM_FIXED_BLOCKS)
3938 3939
    return FALSE;

Richard Henderson committed
3940
  /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
3941
  note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
Richard Henderson committed
3942 3943
  if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
    ;
3944
  else if (else_succ->dest->index < NUM_FIXED_BLOCKS
3945
	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3946
			      else_succ->dest))
Richard Henderson committed
3947 3948 3949 3950 3951
    ;
  else
    return FALSE;

  num_possible_if_blocks++;
3952 3953
  if (dump_file)
    fprintf (dump_file,
Richard Henderson committed
3954
	     "\nIF-CASE-2 found, start %d, else %d\n",
3955
	     test_bb->index, else_bb->index);
Richard Henderson committed
3956 3957

  /* ELSE is small.  */
H.J. Lu committed
3958
  if (! cheap_bb_rtx_cost_p (else_bb,
3959 3960
	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
				    predictable_edge_p (else_edge)))))
Richard Henderson committed
3961 3962 3963
    return FALSE;

  /* Registers set are dead, or are predicable.  */
3964
  if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
Richard Henderson committed
3965 3966 3967 3968 3969
    return FALSE;

  /* Conversion went ok, including moving the insns and fixing up the
     jump.  Adjust the CFG to match.  */

3970 3971
  df_set_bb_dirty (test_bb);
  df_set_bb_dirty (then_bb);
3972
  delete_basic_block (else_bb);
Richard Henderson committed
3973

3974
  num_true_changes++;
Richard Henderson committed
3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991
  num_updated_if_blocks++;

  /* ??? We may now fallthru from one of THEN's successors into a join
     block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */

  return TRUE;
}

/* Used by the code above to perform the actual rtl transformations.
   Return TRUE if successful.

   TEST_BB is the block containing the conditional branch.  MERGE_BB
   is the block containing the code to manipulate.  NEW_DEST is the
   label TEST_BB should be branching to after the conversion.
   REVERSEP is true if the sense of the branch should be reversed.  */

static int
3992 3993
dead_or_predicable (basic_block test_bb, basic_block merge_bb,
		    basic_block other_bb, basic_block new_dest, int reversep)
Richard Henderson committed
3994
{
3995
  rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3996
  bitmap merge_set = NULL;
3997 3998
  /* Number of pending changes.  */
  int n_validated_changes = 0;
Richard Henderson committed
3999

4000
  jump = BB_END (test_bb);
Richard Henderson committed
4001 4002

  /* Find the extent of the real code in the merge block.  */
4003 4004
  head = BB_HEAD (merge_bb);
  end = BB_END (merge_bb);
Richard Henderson committed
4005

4006 4007 4008
  while (DEBUG_INSN_P (end) && end != head)
    end = PREV_INSN (end);

4009 4010 4011 4012 4013 4014 4015
  /* If merge_bb ends with a tablejump, predicating/moving insn's
     into test_bb and then deleting merge_bb will result in the jumptable
     that follows merge_bb being removed along with merge_bb and then we
     get an unresolved reference to the jumptable.  */
  if (tablejump_p (end, NULL, NULL))
    return FALSE;

4016
  if (LABEL_P (head))
Richard Henderson committed
4017
    head = NEXT_INSN (head);
4018 4019
  while (DEBUG_INSN_P (head) && head != end)
    head = NEXT_INSN (head);
4020
  if (NOTE_P (head))
Richard Henderson committed
4021 4022 4023 4024 4025 4026 4027
    {
      if (head == end)
	{
	  head = end = NULL_RTX;
	  goto no_body;
	}
      head = NEXT_INSN (head);
4028 4029
      while (DEBUG_INSN_P (head) && head != end)
	head = NEXT_INSN (head);
Richard Henderson committed
4030 4031
    }

4032
  if (JUMP_P (end))
Richard Henderson committed
4033 4034 4035 4036 4037 4038 4039
    {
      if (head == end)
	{
	  head = end = NULL_RTX;
	  goto no_body;
	}
      end = PREV_INSN (end);
4040 4041
      while (DEBUG_INSN_P (end) && end != head)
	end = PREV_INSN (end);
Richard Henderson committed
4042 4043
    }

4044 4045 4046
  /* Disable handling dead code by conditional execution if the machine needs
     to do anything funny with the tests, etc.  */
#ifndef IFCVT_MODIFY_TESTS
4047
  if (targetm.have_conditional_execution ())
Richard Henderson committed
4048 4049
    {
      /* In the conditional execution case, we have things easy.  We know
4050 4051
	 the condition is reversible.  We don't have to check life info
	 because we're going to conditionally execute the code anyway.
Richard Henderson committed
4052 4053 4054
	 All that's left is making sure the insns involved can actually
	 be predicated.  */

4055
      rtx cond, prob_val;
Richard Henderson committed
4056 4057

      cond = cond_exec_get_condition (jump);
4058 4059
      if (! cond)
	return FALSE;
4060 4061 4062 4063 4064

      prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
      if (prob_val)
	prob_val = XEXP (prob_val, 0);

Richard Henderson committed
4065
      if (reversep)
4066
	{
4067 4068
	  enum rtx_code rev = reversed_comparison_code (cond, jump);
	  if (rev == UNKNOWN)
4069
	    return FALSE;
4070
	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4071 4072 4073 4074
			         XEXP (cond, 1));
	  if (prob_val)
	    prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
	}
Richard Henderson committed
4075

4076 4077 4078 4079 4080
      if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
	  && verify_changes (0))
	n_validated_changes = num_validated_changes ();
      else
	cancel_changes (0);
Richard Henderson committed
4081 4082 4083

      earliest = jump;
    }
4084
#endif
4085

4086 4087 4088 4089 4090 4091
  /* If we allocated new pseudos (e.g. in the conditional move
     expander called from noce_emit_cmove), we must resize the
     array first.  */
  if (max_regno < max_reg_num ())
    max_regno = max_reg_num ();

4092 4093
  /* Try the NCE path if the CE path did not result in any changes.  */
  if (n_validated_changes == 0)
Richard Henderson committed
4094
    {
4095 4096 4097 4098
      rtx cond, insn;
      regset live;
      bool success;

Richard Henderson committed
4099 4100 4101 4102
      /* In the non-conditional execution case, we have to verify that there
	 are no trapping operations, no calls, no references to memory, and
	 that any registers modified are dead at the branch site.  */

4103
      if (!any_condjump_p (jump))
Richard Henderson committed
4104 4105 4106
	return FALSE;

      /* Find the extent of the conditional.  */
4107
      cond = noce_get_condition (jump, &earliest, false);
4108
      if (!cond)
Richard Henderson committed
4109 4110
	return FALSE;

4111 4112 4113 4114 4115 4116 4117 4118
      live = BITMAP_ALLOC (&reg_obstack);
      simulate_backwards_to_point (merge_bb, live, end);
      success = can_move_insns_across (head, end, earliest, jump,
				       merge_bb, live,
				       df_get_live_in (other_bb), NULL);
      BITMAP_FREE (live);
      if (!success)
	return FALSE;
H.J. Lu committed
4119

4120
      /* Collect the set of registers set in MERGE_BB.  */
H.J. Lu committed
4121 4122 4123
      merge_set = BITMAP_ALLOC (&reg_obstack);

      FOR_BB_INSNS (merge_bb, insn)
4124 4125
	if (NONDEBUG_INSN_P (insn))
	  df_simulate_find_defs (insn, merge_set);
Richard Henderson committed
4126 4127 4128 4129 4130 4131 4132 4133
    }

 no_body:
  /* We don't want to use normal invert_jump or redirect_jump because
     we don't want to delete_insn called.  Also, we want to do our own
     change group management.  */

  old_dest = JUMP_LABEL (jump);
4134 4135 4136 4137 4138 4139 4140 4141
  if (other_bb != new_dest)
    {
      new_label = block_label (new_dest);
      if (reversep
	  ? ! invert_jump_1 (jump, new_label)
	  : ! redirect_jump_1 (jump, new_label))
	goto cancel;
    }
Richard Henderson committed
4142

4143 4144 4145 4146
  if (verify_changes (n_validated_changes))
    confirm_change_group ();
  else
    goto cancel;
Richard Henderson committed
4147

4148
  if (other_bb != new_dest)
4149
    {
4150
      redirect_jump_2 (jump, old_dest, new_label, 0, reversep);
4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164

      redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
      if (reversep)
	{
	  gcov_type count, probability;
	  count = BRANCH_EDGE (test_bb)->count;
	  BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
	  FALLTHRU_EDGE (test_bb)->count = count;
	  probability = BRANCH_EDGE (test_bb)->probability;
	  BRANCH_EDGE (test_bb)->probability
	    = FALLTHRU_EDGE (test_bb)->probability;
	  FALLTHRU_EDGE (test_bb)->probability = probability;
	  update_br_prob_note (test_bb);
	}
4165 4166
    }

Richard Henderson committed
4167 4168 4169
  /* Move the insns out of MERGE_BB to before the branch.  */
  if (head != NULL)
    {
4170 4171
      rtx insn;

4172 4173
      if (end == BB_END (merge_bb))
	BB_END (merge_bb) = PREV_INSN (head);
4174

4175 4176
      /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
	 notes being moved might become invalid.  */
4177 4178 4179 4180 4181 4182 4183 4184 4185 4186 4187
      insn = head;
      do
	{
	  rtx note, set;

	  if (! INSN_P (insn))
	    continue;
	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
	  if (! note)
	    continue;
	  set = single_set (insn);
4188 4189
	  if (!set || !function_invariant_p (SET_SRC (set))
	      || !function_invariant_p (XEXP (note, 0)))
4190 4191 4192
	    remove_note (insn, note);
	} while (insn != end && (insn = NEXT_INSN (insn)));

4193 4194 4195 4196 4197 4198 4199
      /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
	 notes referring to the registers being set might become invalid.  */
      if (merge_set)
	{
	  unsigned i;
	  bitmap_iterator bi;

4200
	  EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4201 4202 4203 4204 4205
	    remove_reg_equal_equiv_notes_for_regno (i);

	  BITMAP_FREE (merge_set);
	}

Richard Henderson committed
4206 4207
      reorder_insns (head, end, PREV_INSN (earliest));
    }
4208 4209 4210 4211 4212 4213 4214 4215 4216 4217

  /* Remove the jump and edge if we can.  */
  if (other_bb == new_dest)
    {
      delete_insn (jump);
      remove_edge (BRANCH_EDGE (test_bb));
      /* ??? Can't merge blocks here, as then_bb is still in use.
	 At minimum, the merge will get done just before bb-reorder.  */
    }

Richard Henderson committed
4218 4219 4220 4221
  return TRUE;

 cancel:
  cancel_changes (0);
4222

4223
  if (merge_set)
4224 4225
    BITMAP_FREE (merge_set);

Richard Henderson committed
4226 4227 4228 4229 4230
  return FALSE;
}

/* Main entry point for all if-conversion.  */

4231
static void
4232
if_convert (void)
Richard Henderson committed
4233
{
4234
  basic_block bb;
4235
  int pass;
Richard Henderson committed
4236

4237 4238 4239 4240 4241 4242
  if (optimize == 1)
    {
      df_live_add_problem ();
      df_live_set_all_dirty ();
    }

Richard Henderson committed
4243 4244
  num_possible_if_blocks = 0;
  num_updated_if_blocks = 0;
4245
  num_true_changes = 0;
Richard Henderson committed
4246

4247
  loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4248 4249
  mark_loop_exit_edges ();
  loop_optimizer_finalize ();
4250
  free_dominance_info (CDI_DOMINATORS);
4251

4252 4253
  /* Compute postdominators.  */
  calculate_dominance_info (CDI_POST_DOMINATORS);
4254

4255
  df_set_flags (DF_LR_RUN_DCE);
Richard Henderson committed
4256

4257 4258 4259 4260 4261 4262
  /* Go through each of the basic blocks looking for things to convert.  If we
     have conditional execution, we make multiple passes to allow us to handle
     IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
  pass = 0;
  do
    {
4263 4264 4265
      df_analyze ();
      /* Only need to do dce on the first pass.  */
      df_clear_flags (DF_LR_RUN_DCE);
4266 4267 4268 4269
      cond_exec_changed_p = FALSE;
      pass++;

#ifdef IFCVT_MULTIPLE_DUMPS
4270 4271
      if (dump_file && pass > 1)
	fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4272 4273 4274 4275
#endif

      FOR_EACH_BB (bb)
	{
4276
          basic_block new_bb;
H.J. Lu committed
4277
          while (!df_get_bb_dirty (bb)
4278 4279
                 && (new_bb = find_if_header (bb, pass)) != NULL)
            bb = new_bb;
4280 4281 4282
	}

#ifdef IFCVT_MULTIPLE_DUMPS
4283
      if (dump_file && cond_exec_changed_p)
4284 4285 4286 4287 4288 4289
	{
	  if (dump_flags & TDF_SLIM)
	    print_rtl_slim_with_bb (dump_file, get_insns (), dump_flags);
	  else
	    print_rtl_with_bb (dump_file, get_insns ());
	}
4290 4291 4292 4293 4294
#endif
    }
  while (cond_exec_changed_p);

#ifdef IFCVT_MULTIPLE_DUMPS
4295 4296
  if (dump_file)
    fprintf (dump_file, "\n\n========== no more changes\n");
4297
#endif
Richard Henderson committed
4298

4299
  free_dominance_info (CDI_POST_DOMINATORS);
Richard Henderson committed
4300

4301 4302
  if (dump_file)
    fflush (dump_file);
Richard Henderson committed
4303

4304 4305
  clear_aux_for_blocks ();

4306 4307 4308
  /* If we allocated new pseudos, we must resize the array for sched1.  */
  if (max_regno < max_reg_num ())
    max_regno = max_reg_num ();
Richard Henderson committed
4309 4310

  /* Write the final stats.  */
4311
  if (dump_file && num_possible_if_blocks > 0)
Richard Henderson committed
4312
    {
4313
      fprintf (dump_file,
Richard Henderson committed
4314 4315
	       "\n%d possible IF blocks searched.\n",
	       num_possible_if_blocks);
4316
      fprintf (dump_file,
Richard Henderson committed
4317 4318
	       "%d IF blocks converted.\n",
	       num_updated_if_blocks);
4319
      fprintf (dump_file,
4320 4321
	       "%d true changes made.\n\n\n",
	       num_true_changes);
Richard Henderson committed
4322 4323
    }

4324 4325 4326
  if (optimize == 1)
    df_remove_problem (df_live);

4327
#ifdef ENABLE_CHECKING
4328
  verify_flow_info ();
4329
#endif
Richard Henderson committed
4330
}
4331 4332 4333 4334

static bool
gate_handle_if_conversion (void)
{
4335 4336
  return (optimize > 0)
    && dbg_cnt (if_conversion);
4337 4338 4339
}

/* If-conversion and CFG cleanup.  */
4340
static unsigned int
4341 4342 4343 4344 4345
rest_of_handle_if_conversion (void)
{
  if (flag_if_conversion)
    {
      if (dump_file)
4346
        dump_flow_info (dump_file, dump_flags);
4347
      cleanup_cfg (CLEANUP_EXPENSIVE);
4348
      if_convert ();
4349 4350
    }

4351
  cleanup_cfg (0);
4352
  return 0;
4353 4354
}

4355
struct rtl_opt_pass pass_rtl_ifcvt =
4356
{
4357 4358
 {
  RTL_PASS,
4359 4360 4361 4362 4363 4364 4365 4366 4367 4368 4369
  "ce1",                                /* name */
  gate_handle_if_conversion,            /* gate */
  rest_of_handle_if_conversion,         /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_IFCVT,                             /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
4370
  TODO_df_finish | TODO_verify_rtl_sharing |
4371 4372
  TODO_dump_func                        /* todo_flags_finish */
 }
4373 4374 4375 4376 4377
};

static bool
gate_handle_if_after_combine (void)
{
4378 4379
  return optimize > 0 && flag_if_conversion
    && dbg_cnt (if_after_combine);
4380 4381 4382 4383 4384
}


/* Rerun if-conversion, as combine may have simplified things enough
   to now meet sequence length restrictions.  */
4385
static unsigned int
4386 4387
rest_of_handle_if_after_combine (void)
{
4388
  if_convert ();
4389
  return 0;
4390 4391
}

4392
struct rtl_opt_pass pass_if_after_combine =
4393
{
4394 4395
 {
  RTL_PASS,
4396 4397 4398 4399 4400 4401 4402 4403 4404 4405 4406
  "ce2",                                /* name */
  gate_handle_if_after_combine,         /* gate */
  rest_of_handle_if_after_combine,      /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_IFCVT,                             /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
4407
  TODO_df_finish | TODO_verify_rtl_sharing |
4408
  TODO_dump_func |
4409 4410
  TODO_ggc_collect                      /* todo_flags_finish */
 }
4411 4412 4413 4414 4415 4416
};


static bool
gate_handle_if_after_reload (void)
{
4417 4418
  return optimize > 0 && flag_if_conversion2
    && dbg_cnt (if_after_reload);
4419 4420
}

4421
static unsigned int
4422 4423
rest_of_handle_if_after_reload (void)
{
4424
  if_convert ();
4425
  return 0;
4426 4427 4428
}


4429
struct rtl_opt_pass pass_if_after_reload =
4430
{
4431 4432
 {
  RTL_PASS,
4433 4434 4435 4436 4437 4438 4439 4440 4441 4442 4443
  "ce3",                                /* name */
  gate_handle_if_after_reload,          /* gate */
  rest_of_handle_if_after_reload,       /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_IFCVT2,                            /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
4444
  TODO_df_finish | TODO_verify_rtl_sharing |
4445
  TODO_dump_func |
4446 4447
  TODO_ggc_collect                      /* todo_flags_finish */
 }
4448
};