gimple-low.c 26.5 KB
Newer Older
1
/* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2

3
   Copyright (C) 2003-2016 Free Software Foundation, Inc.
4 5 6 7 8

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10 11 12 13 14 15 16 17
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
18 19
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
20 21 22 23

#include "config.h"
#include "system.h"
#include "coretypes.h"
24
#include "backend.h"
25
#include "tree.h"
26
#include "gimple.h"
27
#include "tree-pass.h"
28
#include "fold-const.h"
29 30
#include "tree-nested.h"
#include "calls.h"
31
#include "gimple-iterator.h"
32
#include "gimple-low.h"
33

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
/* The differences between High GIMPLE and Low GIMPLE are the
   following:

   1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).

   2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
      flow and exception regions are built as an on-the-side region
      hierarchy (See tree-eh.c:lower_eh_constructs).

   3- Multiple identical return statements are grouped into a single
      return and gotos to the unique return site.  */

/* Match a return statement with a label.  During lowering, we identify
   identical return statements and replace duplicates with a jump to
   the corresponding label.  */
struct return_statements_t
{
  tree label;
52
  greturn *stmt;
53 54 55 56
};
typedef struct return_statements_t return_statements_t;


57 58 59 60
struct lower_data
{
  /* Block the current statement belongs to.  */
  tree block;
61

62
  /* A vector of label and return statements to be moved to the end
63
     of the function.  */
64
  vec<return_statements_t> return_statements;
65

66 67
  /* True if the current statement cannot fall through.  */
  bool cannot_fallthru;
68 69
};

70 71
static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
72
static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
73 74
static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
static void lower_builtin_setjmp (gimple_stmt_iterator *);
75
static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
76

77 78 79

/* Lower the body of current_function_decl from High GIMPLE into Low
   GIMPLE.  */
80

81
static unsigned int
82 83 84
lower_function_body (void)
{
  struct lower_data data;
85 86 87
  gimple_seq body = gimple_body (current_function_decl);
  gimple_seq lowered_body;
  gimple_stmt_iterator i;
88 89
  gimple *bind;
  gimple *x;
90 91 92 93 94

  /* The gimplifier should've left a body of exactly one statement,
     namely a GIMPLE_BIND.  */
  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
95

Diego Novillo committed
96
  memset (&data, 0, sizeof (data));
97 98 99 100
  data.block = DECL_INITIAL (current_function_decl);
  BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
  BLOCK_CHAIN (data.block) = NULL_TREE;
  TREE_ASM_WRITTEN (data.block) = 1;
101
  data.return_statements.create (8);
102 103 104 105 106 107

  bind = gimple_seq_first_stmt (body);
  lowered_body = NULL;
  gimple_seq_add_stmt (&lowered_body, bind);
  i = gsi_start (lowered_body);
  lower_gimple_bind (&i, &data);
108

109
  i = gsi_last (lowered_body);
110 111

  /* If the function falls off the end, we need a null return statement.
112
     If we've already got one in the return_statements vector, we don't
113
     need to do anything special.  Otherwise build one by hand.  */
114 115
  bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
  if (may_fallthru
116
      && (data.return_statements.is_empty ()
117 118
	  || (gimple_return_retval (data.return_statements.last().stmt)
	      != NULL)))
119
    {
120 121
      x = gimple_build_return (NULL);
      gimple_set_location (x, cfun->function_end_locus);
122
      gimple_set_block (x, DECL_INITIAL (current_function_decl));
123
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
124
      may_fallthru = false;
125 126 127 128
    }

  /* If we lowered any return statements, emit the representative
     at the end of the function.  */
129
  while (!data.return_statements.is_empty ())
130
    {
131
      return_statements_t t = data.return_statements.pop ();
132 133 134
      x = gimple_build_label (t.label);
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
135 136 137 138 139 140 141 142
      if (may_fallthru)
	{
	  /* Remove the line number from the representative return statement.
	     It now fills in for the fallthru too.  Failure to remove this
	     will result in incorrect results for coverage analysis.  */
	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
	  may_fallthru = false;
	}
143 144
    }

145 146 147 148
  /* Once the old body has been lowered, replace it with the new
     lowered sequence.  */
  gimple_set_body (current_function_decl, lowered_body);

149
  gcc_assert (data.block == DECL_INITIAL (current_function_decl));
150 151 152 153
  BLOCK_SUBBLOCKS (data.block)
    = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));

  clear_block_marks (data.block);
154
  data.return_statements.release ();
155
  return 0;
156 157
}

158 159 160
namespace {

const pass_data pass_data_lower_cf =
161
{
162 163 164 165 166 167 168 169 170
  GIMPLE_PASS, /* type */
  "lower", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  PROP_gimple_any, /* properties_required */
  PROP_gimple_lcf, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
171 172
};

173 174 175
class pass_lower_cf : public gimple_opt_pass
{
public:
176 177
  pass_lower_cf (gcc::context *ctxt)
    : gimple_opt_pass (pass_data_lower_cf, ctxt)
178 179 180
  {}

  /* opt_pass methods: */
181
  virtual unsigned int execute (function *) { return lower_function_body (); }
182 183 184 185 186 187 188 189 190 191 192

}; // class pass_lower_cf

} // anon namespace

gimple_opt_pass *
make_pass_lower_cf (gcc::context *ctxt)
{
  return new pass_lower_cf (ctxt);
}

193
/* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
194 195 196
   when they are changed -- if this has to be done, the lowering routine must
   do it explicitly.  DATA is passed through the recursion.  */

197
static void
198
lower_sequence (gimple_seq *seq, struct lower_data *data)
199
{
200
  gimple_stmt_iterator gsi;
201

202
  for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
203
    lower_stmt (&gsi, data);
204 205
}

206

207
/* Lower the OpenMP directive statement pointed by GSI.  DATA is
208 209 210
   passed through the recursion.  */

static void
211
lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
212
{
213
  gimple *stmt;
H.J. Lu committed
214

215
  stmt = gsi_stmt (*gsi);
216

217 218
  lower_sequence (gimple_omp_body_ptr (stmt), data);
  gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
219
  gimple_omp_set_body (stmt, NULL);
220
  gsi_next (gsi);
221 222 223
}


224 225 226 227 228 229
/* Lower statement GSI.  DATA is passed through the recursion.  We try to
   track the fallthruness of statements and get rid of unreachable return
   statements in order to prevent the EH lowering pass from adding useless
   edges that can cause bogus warnings to be issued later; this guess need
   not be 100% accurate, simply be conservative and reset cannot_fallthru
   to false if we don't know.  */
230 231

static void
232
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
233
{
234
  gimple *stmt = gsi_stmt (*gsi);
235

236
  gimple_set_block (stmt, data->block);
237

238
  switch (gimple_code (stmt))
239
    {
240 241
    case GIMPLE_BIND:
      lower_gimple_bind (gsi, data);
242
      /* Propagate fallthruness.  */
243
      return;
244

245
    case GIMPLE_COND:
246 247 248 249 250
    case GIMPLE_GOTO:
    case GIMPLE_SWITCH:
      data->cannot_fallthru = true;
      gsi_next (gsi);
      return;
251 252

    case GIMPLE_RETURN:
253 254 255 256 257 258 259 260 261 262
      if (data->cannot_fallthru)
	{
	  gsi_remove (gsi, false);
	  /* Propagate fallthruness.  */
	}
      else
	{
	  lower_gimple_return (gsi, data);
	  data->cannot_fallthru = true;
	}
263 264 265
      return;

    case GIMPLE_TRY:
266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287
      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
	lower_try_catch (gsi, data);
      else
	{
	  /* It must be a GIMPLE_TRY_FINALLY.  */
	  bool cannot_fallthru;
	  lower_sequence (gimple_try_eval_ptr (stmt), data);
	  cannot_fallthru = data->cannot_fallthru;

	  /* The finally clause is always executed after the try clause,
	     so if it does not fall through, then the try-finally will not
	     fall through.  Otherwise, if the try clause does not fall
	     through, then when the finally clause falls through it will
	     resume execution wherever the try clause was going.  So the
	     whole try-finally will only fall through if both the try
	     clause and the finally clause fall through.  */
	  data->cannot_fallthru = false;
	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
	  data->cannot_fallthru |= cannot_fallthru;
	  gsi_next (gsi);
	}
      return;
288

289
    case GIMPLE_EH_ELSE:
290 291 292 293 294
      {
	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
      }
295 296
      break;

297 298 299 300 301
    case GIMPLE_NOP:
    case GIMPLE_ASM:
    case GIMPLE_ASSIGN:
    case GIMPLE_PREDICT:
    case GIMPLE_LABEL:
302
    case GIMPLE_EH_MUST_NOT_THROW:
303 304 305 306 307 308
    case GIMPLE_OMP_FOR:
    case GIMPLE_OMP_SECTIONS:
    case GIMPLE_OMP_SECTIONS_SWITCH:
    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_SINGLE:
    case GIMPLE_OMP_MASTER:
Jakub Jelinek committed
309
    case GIMPLE_OMP_TASKGROUP:
310 311 312 313 314 315 316
    case GIMPLE_OMP_ORDERED:
    case GIMPLE_OMP_CRITICAL:
    case GIMPLE_OMP_RETURN:
    case GIMPLE_OMP_ATOMIC_LOAD:
    case GIMPLE_OMP_ATOMIC_STORE:
    case GIMPLE_OMP_CONTINUE:
      break;
317

318
    case GIMPLE_CALL:
319
      {
320
	tree decl = gimple_call_fndecl (stmt);
321 322 323 324 325 326 327 328
	unsigned i;

	for (i = 0; i < gimple_call_num_args (stmt); i++)
	  {
	    tree arg = gimple_call_arg (stmt, i);
	    if (EXPR_P (arg))
	      TREE_SET_BLOCK (arg, data->block);
	  }
329

330
	if (decl
331
	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
332
	  {
333 334 335 336 337 338
	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
	      {
		lower_builtin_setjmp (gsi);
		data->cannot_fallthru = false;
		return;
	      }
339
	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
340 341
		     && flag_tree_bit_ccp
		     && gimple_builtin_call_types_compatible_p (stmt, decl))
342 343 344 345
	      {
		lower_builtin_posix_memalign (gsi);
		return;
	      }
346
	  }
347 348 349

	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
	  {
350
	    data->cannot_fallthru = true;
351 352 353
	    gsi_next (gsi);
	    return;
	  }
354 355 356
      }
      break;

357 358
    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
Jakub Jelinek committed
359 360
    case GIMPLE_OMP_TARGET:
    case GIMPLE_OMP_TEAMS:
Martin Jambor committed
361
    case GIMPLE_OMP_GRID_BODY:
362
      data->cannot_fallthru = false;
363
      lower_omp_directive (gsi, data);
364
      data->cannot_fallthru = false;
365 366
      return;

367
    case GIMPLE_TRANSACTION:
368 369 370
      lower_sequence (gimple_transaction_body_ptr (
			as_a <gtransaction *> (stmt)),
		      data);
371 372
      break;

373
    default:
374
      gcc_unreachable ();
375 376
    }

377
  data->cannot_fallthru = false;
378
  gsi_next (gsi);
379 380
}

381
/* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
382 383

static void
384
lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
385 386
{
  tree old_block = data->block;
387
  gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
388
  tree new_block = gimple_bind_block (stmt);
389 390 391 392 393 394 395 396

  if (new_block)
    {
      if (new_block == old_block)
	{
	  /* The outermost block of the original function may not be the
	     outermost statement chain of the gimplified function.  So we
	     may see the outermost block just inside the function.  */
397
	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
398 399 400 401 402
	  new_block = NULL;
	}
      else
	{
	  /* We do not expect to handle duplicate blocks.  */
403
	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
404 405 406 407 408 409 410 411 412 413 414 415 416 417
	  TREE_ASM_WRITTEN (new_block) = 1;

	  /* Block tree may get clobbered by inlining.  Normally this would
	     be fixed in rest_of_decl_compilation using block notes, but
	     since we are not going to emit them, it is up to us.  */
	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
	  BLOCK_SUBBLOCKS (old_block) = new_block;
	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
	  BLOCK_SUPERCONTEXT (new_block) = old_block;

	  data->block = new_block;
	}
    }

418
  record_vars (gimple_bind_vars (stmt));
419
  lower_sequence (gimple_bind_body_ptr (stmt), data);
420 421 422

  if (new_block)
    {
423
      gcc_assert (data->block == new_block);
424 425 426 427 428 429

      BLOCK_SUBBLOCKS (new_block)
	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
      data->block = old_block;
    }

430 431 432
  /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
  gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
  gsi_remove (gsi, false);
433 434
}

435 436 437 438 439 440
/* Same as above, but for a GIMPLE_TRY_CATCH.  */

static void
lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
{
  bool cannot_fallthru;
441
  gimple *stmt = gsi_stmt (*gsi);
442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459
  gimple_stmt_iterator i;

  /* We don't handle GIMPLE_TRY_FINALLY.  */
  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);

  lower_sequence (gimple_try_eval_ptr (stmt), data);
  cannot_fallthru = data->cannot_fallthru;

  i = gsi_start (*gimple_try_cleanup_ptr (stmt));
  switch (gimple_code (gsi_stmt (i)))
    {
    case GIMPLE_CATCH:
      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
	 catch expression and a body.  The whole try/catch may fall
	 through iff any of the catch bodies falls through.  */
      for (; !gsi_end_p (i); gsi_next (&i))
	{
	  data->cannot_fallthru = false;
460 461 462
	  lower_sequence (gimple_catch_handler_ptr (
                            as_a <gcatch *> (gsi_stmt (i))),
			  data);
463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497
	  if (!data->cannot_fallthru)
	    cannot_fallthru = false;
	}
      break;

    case GIMPLE_EH_FILTER:
      /* The exception filter expression only matters if there is an
	 exception.  If the exception does not match EH_FILTER_TYPES,
	 we will execute EH_FILTER_FAILURE, and we will fall through
	 if that falls through.  If the exception does match
	 EH_FILTER_TYPES, the stack unwinder will continue up the
	 stack, so we will not fall through.  We don't know whether we
	 will throw an exception which matches EH_FILTER_TYPES or not,
	 so we just ignore EH_FILTER_TYPES and assume that we might
	 throw an exception which doesn't match.  */
      data->cannot_fallthru = false;
      lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
      if (!data->cannot_fallthru)
	cannot_fallthru = false;
      break;

    default:
      /* This case represents statements to be executed when an
	 exception occurs.  Those statements are implicitly followed
	 by a GIMPLE_RESX to resume execution after the exception.  So
	 in this case the try/catch never falls through.  */
      data->cannot_fallthru = false;
      lower_sequence (gimple_try_cleanup_ptr (stmt), data);
      break;
    }

  data->cannot_fallthru = cannot_fallthru;
  gsi_next (gsi);
}

498

499 500
/* Try to determine whether a TRY_CATCH expression can fall through.
   This is a subroutine of gimple_stmt_may_fallthru.  */
501 502

static bool
503
gimple_try_catch_may_fallthru (gtry *stmt)
504 505 506 507 508 509 510 511 512 513 514
{
  gimple_stmt_iterator i;

  /* We don't handle GIMPLE_TRY_FINALLY.  */
  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);

  /* If the TRY block can fall through, the whole TRY_CATCH can
     fall through.  */
  if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
    return true;

515
  i = gsi_start (*gimple_try_cleanup_ptr (stmt));
516 517 518 519 520 521 522 523
  switch (gimple_code (gsi_stmt (i)))
    {
    case GIMPLE_CATCH:
      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
	 catch expression and a body.  The whole try/catch may fall
	 through iff any of the catch bodies falls through.  */
      for (; !gsi_end_p (i); gsi_next (&i))
	{
524 525
	  if (gimple_seq_may_fallthru (gimple_catch_handler (
					 as_a <gcatch *> (gsi_stmt (i)))))
526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558
	    return true;
	}
      return false;

    case GIMPLE_EH_FILTER:
      /* The exception filter expression only matters if there is an
	 exception.  If the exception does not match EH_FILTER_TYPES,
	 we will execute EH_FILTER_FAILURE, and we will fall through
	 if that falls through.  If the exception does match
	 EH_FILTER_TYPES, the stack unwinder will continue up the
	 stack, so we will not fall through.  We don't know whether we
	 will throw an exception which matches EH_FILTER_TYPES or not,
	 so we just ignore EH_FILTER_TYPES and assume that we might
	 throw an exception which doesn't match.  */
      return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));

    default:
      /* This case represents statements to be executed when an
	 exception occurs.  Those statements are implicitly followed
	 by a GIMPLE_RESX to resume execution after the exception.  So
	 in this case the try/catch never falls through.  */
      return false;
    }
}


/* Try to determine if we can continue executing the statement
   immediately following STMT.  This guess need not be 100% accurate;
   simply be conservative and return true if we don't know.  This is
   used only to avoid stupidly generating extra code. If we're wrong,
   we'll just delete the extra code later.  */

bool
559
gimple_stmt_may_fallthru (gimple *stmt)
560
{
561 562
  if (!stmt)
    return true;
563

564 565 566 567 568
  switch (gimple_code (stmt))
    {
    case GIMPLE_GOTO:
    case GIMPLE_RETURN:
    case GIMPLE_RESX:
H.J. Lu committed
569
      /* Easy cases.  If the last statement of the seq implies
570 571
	 control transfer, then we can't fall through.  */
      return false;
572

573
    case GIMPLE_SWITCH:
574 575 576
      /* Switch has already been lowered and represents a branch
	 to a selected label and hence can't fall through.  */
      return false;
577

578 579 580 581
    case GIMPLE_COND:
      /* GIMPLE_COND's are already lowered into a two-way branch.  They
	 can't fall through.  */
      return false;
582

583
    case GIMPLE_BIND:
584 585
      return gimple_seq_may_fallthru (
	       gimple_bind_body (as_a <gbind *> (stmt)));
586

587 588
    case GIMPLE_TRY:
      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
589
        return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
590

591
      /* It must be a GIMPLE_TRY_FINALLY.  */
592

593 594 595 596 597 598 599 600 601 602
      /* The finally clause is always executed after the try clause,
	 so if it does not fall through, then the try-finally will not
	 fall through.  Otherwise, if the try clause does not fall
	 through, then when the finally clause falls through it will
	 resume execution wherever the try clause was going.  So the
	 whole try-finally will only fall through if both the try
	 clause and the finally clause fall through.  */
      return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));

603
    case GIMPLE_EH_ELSE:
604 605 606 607 608 609
      {
	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
	return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
		|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
					      eh_else_stmt)));
      }
610

611 612 613
    case GIMPLE_CALL:
      /* Functions that do not return do not fall through.  */
      return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
614

615 616
    default:
      return true;
617
    }
618 619
}

620

621
/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
622

623 624 625 626
bool
gimple_seq_may_fallthru (gimple_seq seq)
{
  return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
627
}
628

629 630

/* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
631

632
static void
633
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
634
{
635
  greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
636
  gimple *t;
637 638
  int i;
  return_statements_t tmp_rs;
639

640
  /* Match this up with an existing return statement that's been created.  */
641
  for (i = data->return_statements.length () - 1;
642
       i >= 0; i--)
643
    {
644
      tmp_rs = data->return_statements[i];
645

646
      if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
647 648 649 650 651 652 653 654
	{
	  /* Remove the line number from the representative return statement.
	     It now fills in for many such returns.  Failure to remove this
	     will result in incorrect results for coverage analysis.  */
	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);

	  goto found;
	}
655 656
    }

657
  /* Not found.  Create a new label and record the return statement.  */
658
  tmp_rs.label = create_artificial_label (cfun->function_end_locus);
659
  tmp_rs.stmt = stmt;
660
  data->return_statements.safe_push (tmp_rs);
661 662 663

  /* Generate a goto statement and remove the return statement.  */
 found:
664 665 666
  /* When not optimizing, make sure user returns are preserved.  */
  if (!optimize && gimple_has_location (stmt))
    DECL_ARTIFICIAL (tmp_rs.label) = 0;
667 668
  t = gimple_build_goto (tmp_rs.label);
  gimple_set_location (t, gimple_location (stmt));
669
  gimple_set_block (t, gimple_block (stmt));
670 671
  gsi_insert_before (gsi, t, GSI_SAME_STMT);
  gsi_remove (gsi, false);
672 673
}

674
/* Lower a __builtin_setjmp GSI.
675 676 677 678 679

   __builtin_setjmp is passed a pointer to an array of five words (not
   all will be used on all machines).  It operates similarly to the C
   library function of the same name, but is more efficient.

680 681
   It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
   __builtin_setjmp_receiver.
682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716

   After full lowering, the body of the function should look like:

    {
      int D.1844;
      int D.2844;

      [...]

      __builtin_setjmp_setup (&buf, &<D1847>);
      D.1844 = 0;
      goto <D1846>;
      <D1847>:;
      __builtin_setjmp_receiver (&<D1847>);
      D.1844 = 1;
      <D1846>:;
      if (D.1844 == 0) goto <D1848>; else goto <D1849>;

      [...]

      __builtin_setjmp_setup (&buf, &<D2847>);
      D.2844 = 0;
      goto <D2846>;
      <D2847>:;
      __builtin_setjmp_receiver (&<D2847>);
      D.2844 = 1;
      <D2846>:;
      if (D.2844 == 0) goto <D2848>; else goto <D2849>;

      [...]

      <D3850>:;
      return;
    }

717 718 719 720 721
   During cfg creation an extra per-function (or per-OpenMP region)
   block with ABNORMAL_DISPATCHER internal call will be added, unique
   destination of all the abnormal call edges and the unique source of
   all the abnormal edges to the receivers, thus keeping the complexity
   explosion localized.  */
722 723

static void
724
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
725
{
726
  gimple *stmt = gsi_stmt (*gsi);
727 728 729
  location_t loc = gimple_location (stmt);
  tree cont_label = create_artificial_label (loc);
  tree next_label = create_artificial_label (loc);
730
  tree dest, t, arg;
731
  gimple *g;
732

733 734 735 736
  /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
     these builtins are modelled as non-local label jumps to the label
     that is passed to these two builtins, so pretend we have a non-local
     label during GIMPLE passes too.  See PR60003.  */ 
737
  cfun->has_nonlocal_label = 1;
738

739 740 741 742
  /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
     passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
  FORCED_LABEL (next_label) = 1;

743
  dest = gimple_call_lhs (stmt);
744 745

  /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
746
  arg = build_addr (next_label);
747
  t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
748
  g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
749
  gimple_set_location (g, loc);
750
  gimple_set_block (g, gimple_block (stmt));
751
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
752 753 754 755

  /* Build 'DEST = 0' and insert.  */
  if (dest)
    {
756
      g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
757
      gimple_set_location (g, loc);
758
      gimple_set_block (g, gimple_block (stmt));
759
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
760 761 762
    }

  /* Build 'goto CONT_LABEL' and insert.  */
763
  g = gimple_build_goto (cont_label);
764
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
765 766

  /* Build 'NEXT_LABEL:' and insert.  */
767 768
  g = gimple_build_label (next_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
769 770

  /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
771
  arg = build_addr (next_label);
772
  t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
773
  g = gimple_build_call (t, 1, arg);
774
  gimple_set_location (g, loc);
775
  gimple_set_block (g, gimple_block (stmt));
776
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
777 778 779 780

  /* Build 'DEST = 1' and insert.  */
  if (dest)
    {
781 782 783
      g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
						       integer_one_node));
      gimple_set_location (g, loc);
784
      gimple_set_block (g, gimple_block (stmt));
785
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
786 787 788
    }

  /* Build 'CONT_LABEL:' and insert.  */
789 790
  g = gimple_build_label (cont_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
791 792

  /* Remove the call to __builtin_setjmp.  */
793
  gsi_remove (gsi, false);
794
}
795 796

/* Lower calls to posix_memalign to
797 798 799
     res = posix_memalign (ptr, align, size);
     if (res == 0)
       *ptr = __builtin_assume_aligned (*ptr, align);
800 801
   or to
     void *tem;
802 803 804
     res = posix_memalign (&tem, align, size);
     if (res == 0)
       ptr = __builtin_assume_aligned (tem, align);
805 806 807 808 809 810
   in case the first argument was &ptr.  That way we can get at the
   alignment of the heap pointer in CCP.  */

static void
lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
{
811
  gimple *stmt, *call = gsi_stmt (*gsi);
812 813 814
  tree pptr = gimple_call_arg (call, 0);
  tree align = gimple_call_arg (call, 1);
  tree res = gimple_call_lhs (call);
815
  tree ptr = create_tmp_reg (ptr_type_node);
816 817
  if (TREE_CODE (pptr) == ADDR_EXPR)
    {
818
      tree tem = create_tmp_var (ptr_type_node);
819
      TREE_ADDRESSABLE (tem) = 1;
820
      gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
821 822 823 824 825 826
      stmt = gimple_build_assign (ptr, tem);
    }
  else
    stmt = gimple_build_assign (ptr,
				fold_build2 (MEM_REF, ptr_type_node, pptr,
					     build_int_cst (ptr_type_node, 0)));
827 828
  if (res == NULL_TREE)
    {
829
      res = create_tmp_reg (integer_type_node);
830 831 832 833
      gimple_call_set_lhs (call, res);
    }
  tree align_label = create_artificial_label (UNKNOWN_LOCATION);
  tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
834
  gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
835 836 837
				   align_label, noalign_label);
  gsi_insert_after (gsi, cond, GSI_NEW_STMT);
  gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
838 839 840 841 842 843 844 845 846
  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
  stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
			    2, ptr, align);
  gimple_call_set_lhs (stmt, ptr);
  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
  stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
					   build_int_cst (ptr_type_node, 0)),
			      ptr);
  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
847
  gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
848
}
849 850


851
/* Record the variables in VARS into function FN.  */
852 853

void
854
record_vars_into (tree vars, tree fn)
855
{
856
  for (; vars; vars = DECL_CHAIN (vars))
857 858 859
    {
      tree var = vars;

860 861 862 863
      /* BIND_EXPRs contains also function/type/constant declarations
         we don't need to care about.  */
      if (TREE_CODE (var) != VAR_DECL)
	continue;
864

865 866 867 868 869
      /* Nothing to do in this case.  */
      if (DECL_EXTERNAL (var))
	continue;

      /* Record the variable.  */
870
      add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
871
    }
872 873 874 875 876 877 878 879 880
}


/* Record the variables in VARS into current_function_decl.  */

void
record_vars (tree vars)
{
  record_vars_into (vars, current_function_decl);
881
}