tree-nested.c 77 KB
Newer Older
1
/* Nested function decomposition for GIMPLE.
2
   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4 5 6 7 8

   This file is part of GCC.

   GCC is free software; you can redistribute it and/or modify
   it under the terms of the GNU General Public License as published by
9
   the Free Software Foundation; either version 3, or (at your option)
10 11 12 13 14 15 16 17
   any later version.

   GCC is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   GNU General Public License for more details.

   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING3.  If not see
19
   <http://www.gnu.org/licenses/>.  */
20 21 22 23 24 25 26 27 28 29

#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
#include "tm_p.h"
#include "function.h"
#include "tree-dump.h"
#include "tree-inline.h"
30
#include "gimple.h"
31 32 33
#include "tree-iterator.h"
#include "tree-flow.h"
#include "cgraph.h"
34
#include "expr.h"	/* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL.  */
35
#include "langhooks.h"
36
#include "pointer-set.h"
37 38 39 40 41 42 43


/* The object of this pass is to lower the representation of a set of nested
   functions in order to expose all of the gory details of the various
   nonlocal references.  We want to do this sooner rather than later, in
   order to give us more freedom in emitting all of the functions in question.

H.J. Lu committed
44
   Back in olden times, when gcc was young, we developed an insanely
45 46 47 48
   complicated scheme whereby variables which were referenced nonlocally
   were forced to live in the stack of the declaring function, and then
   the nested functions magically discovered where these variables were
   placed.  In order for this scheme to function properly, it required
H.J. Lu committed
49
   that the outer function be partially expanded, then we switch to
50 51
   compiling the inner function, and once done with those we switch back
   to compiling the outer function.  Such delicate ordering requirements
H.J. Lu committed
52
   makes it difficult to do whole translation unit optimizations
53 54 55 56
   involving such functions.

   The implementation here is much more direct.  Everything that can be
   referenced by an inner function is a member of an explicitly created
57
   structure herein called the "nonlocal frame struct".  The incoming
H.J. Lu committed
58
   static chain for a nested function is a pointer to this struct in
59 60 61 62 63 64 65 66
   the parent.  In this way, we settle on known offsets from a known
   base, and so are decoupled from the logic that places objects in the
   function's stack frame.  More importantly, we don't have to wait for
   that to happen -- since the compilation of the inner function is no
   longer tied to a real stack frame, the nonlocal frame struct can be
   allocated anywhere.  Which means that the outer function is now
   inlinable.

H.J. Lu committed
67 68
   Theory of operation here is very simple.  Iterate over all the
   statements in all the functions (depth first) several times,
69 70 71 72 73 74 75 76 77 78
   allocating structures and fields on demand.  In general we want to
   examine inner functions first, so that we can avoid making changes
   to outer functions which are unnecessary.

   The order of the passes matters a bit, in that later passes will be
   skipped if it is discovered that the functions don't actually interact
   at all.  That is, they're nested in the lexical sense but could have
   been written as independent functions without change.  */


79
struct nesting_info
80 81 82 83
{
  struct nesting_info *outer;
  struct nesting_info *inner;
  struct nesting_info *next;
H.J. Lu committed
84

85 86
  struct pointer_map_t *field_map;
  struct pointer_map_t *var_map;
87
  struct pointer_set_t *mem_refs;
Diego Novillo committed
88 89
  bitmap suppress_expansion;

90 91
  tree context;
  tree new_local_var_chain;
Diego Novillo committed
92
  tree debug_var_chain;
93 94 95 96 97 98 99 100
  tree frame_type;
  tree frame_decl;
  tree chain_field;
  tree chain_decl;
  tree nl_goto_field;

  bool any_parm_remapped;
  bool any_tramp_created;
101
  char static_chain_added;
102 103 104
};


105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125
/* Iterate over the nesting tree, starting with ROOT, depth first.  */

static inline struct nesting_info *
iter_nestinfo_start (struct nesting_info *root)
{
  while (root->inner)
    root = root->inner;
  return root;
}

static inline struct nesting_info *
iter_nestinfo_next (struct nesting_info *node)
{
  if (node->next)
    return iter_nestinfo_start (node->next);
  return node->outer;
}

#define FOR_EACH_NEST_INFO(I, ROOT) \
  for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))

126 127
/* Obstack used for the bitmaps in the struct above.  */
static struct bitmap_obstack nesting_info_bitmap_obstack;
128 129 130 131 132 133 134 135 136 137 138 139 140 141


/* We're working in so many different function contexts simultaneously,
   that create_tmp_var is dangerous.  Prevent mishap.  */
#define create_tmp_var cant_use_create_tmp_var_here_dummy

/* Like create_tmp_var, except record the variable for registration at
   the given nesting level.  */

static tree
create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
{
  tree tmp_var;

142
  /* If the type is of variable size or a type which must be created by the
143 144
     frontend, something is wrong.  Note that we explicitly allow
     incomplete types here, since we create them ourselves here.  */
145
  gcc_assert (!TREE_ADDRESSABLE (type));
146 147
  gcc_assert (!TYPE_SIZE_UNIT (type)
	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
148 149 150

  tmp_var = create_tmp_var_raw (type, prefix);
  DECL_CONTEXT (tmp_var) = info->context;
151
  DECL_CHAIN (tmp_var) = info->new_local_var_chain;
152
  DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
153 154
  if (TREE_CODE (type) == COMPLEX_TYPE
      || TREE_CODE (type) == VECTOR_TYPE)
155
    DECL_GIMPLE_REG_P (tmp_var) = 1;
156

157 158 159 160 161
  info->new_local_var_chain = tmp_var;

  return tmp_var;
}

162 163
/* Take the address of EXP to be used within function CONTEXT.
   Mark it for addressability as necessary.  */
164

165
tree
166
build_addr (tree exp, tree context)
167 168
{
  tree base = exp;
169 170
  tree save_context;
  tree retval;
171

172
  while (handled_component_p (base))
173
    base = TREE_OPERAND (base, 0);
174

175 176 177
  if (DECL_P (base))
    TREE_ADDRESSABLE (base) = 1;

178 179
  /* Building the ADDR_EXPR will compute a set of properties for
     that ADDR_EXPR.  Those properties are unfortunately context
180
     specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
181 182 183 184 185 186

     Temporarily set CURRENT_FUNCTION_DECL to the desired context,
     build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL.  That
     way the properties are for the ADDR_EXPR are computed properly.  */
  save_context = current_function_decl;
  current_function_decl = context;
187
  retval = build_fold_addr_expr (exp);
188
  current_function_decl = save_context;
189
  return retval;
190 191 192 193
}

/* Insert FIELD into TYPE, sorted by alignment requirements.  */

Diego Novillo committed
194
void
195 196 197 198 199 200
insert_field_into_struct (tree type, tree field)
{
  tree *p;

  DECL_CONTEXT (field) = type;

201
  for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
202 203 204
    if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
      break;

205
  DECL_CHAIN (field) = *p;
206
  *p = field;
207 208 209 210

  /* Set correct alignment for frame struct type.  */
  if (TYPE_ALIGN (type) < DECL_ALIGN (field))
    TYPE_ALIGN (type) = DECL_ALIGN (field);
211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237
}

/* Build or return the RECORD_TYPE that describes the frame state that is
   shared between INFO->CONTEXT and its nested functions.  This record will
   not be complete until finalize_nesting_tree; up until that point we'll
   be adding fields as necessary.

   We also build the DECL that represents this frame in the function.  */

static tree
get_frame_type (struct nesting_info *info)
{
  tree type = info->frame_type;
  if (!type)
    {
      char *name;

      type = make_node (RECORD_TYPE);

      name = concat ("FRAME.",
		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
		     NULL);
      TYPE_NAME (type) = get_identifier (name);
      free (name);

      info->frame_type = type;
      info->frame_decl = create_tmp_var_for (info, type, "FRAME");
238 239 240 241 242 243 244 245

      /* ??? Always make it addressable for now, since it is meant to
	 be pointed to by the static chain pointer.  This pessimizes
	 when it turns out that no static chains are needed because
	 the nested functions referencing non-local variables are not
	 reachable, but the true pessimization is to create the non-
	 local frame structure in the first place.  */
      TREE_ADDRESSABLE (info->frame_decl) = 1;
246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278
    }
  return type;
}

/* Return true if DECL should be referenced by pointer in the non-local
   frame structure.  */

static bool
use_pointer_in_frame (tree decl)
{
  if (TREE_CODE (decl) == PARM_DECL)
    {
      /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
         sized decls, and inefficient to copy large aggregates.  Don't bother
         moving anything but scalar variables.  */
      return AGGREGATE_TYPE_P (TREE_TYPE (decl));
    }
  else
    {
      /* Variable sized types make things "interesting" in the frame.  */
      return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
    }
}

/* Given DECL, a non-locally accessed variable, find or create a field
   in the non-local frame structure for the given nesting context.  */

static tree
lookup_field_for_decl (struct nesting_info *info, tree decl,
		       enum insert_option insert)
{
  void **slot;

279
  if (insert == NO_INSERT)
280
    {
281
      slot = pointer_map_contains (info->field_map, decl);
282
      return slot ? (tree) *slot : NULL_TREE;
283 284
    }

285 286
  slot = pointer_map_insert (info->field_map, decl);
  if (!*slot)
287
    {
288
      tree field = make_node (FIELD_DECL);
289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308
      DECL_NAME (field) = DECL_NAME (decl);

      if (use_pointer_in_frame (decl))
	{
	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
	  DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
	  DECL_NONADDRESSABLE_P (field) = 1;
	}
      else
	{
          TREE_TYPE (field) = TREE_TYPE (decl);
          DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
          DECL_ALIGN (field) = DECL_ALIGN (decl);
          DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
          TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
          DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
          TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
	}

      insert_field_into_struct (get_frame_type (info), field);
309
      *slot = field;
310 311 312 313 314

      if (TREE_CODE (decl) == PARM_DECL)
	info->any_parm_remapped = true;
    }

315
  return (tree) *slot;
316 317 318 319 320 321 322 323 324
}

/* Build or return the variable that holds the static chain within
   INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */

static tree
get_chain_decl (struct nesting_info *info)
{
  tree decl = info->chain_decl;
325

326 327 328 329 330 331 332 333 334
  if (!decl)
    {
      tree type;

      type = get_frame_type (info->outer);
      type = build_pointer_type (type);

      /* Note that this variable is *not* entered into any BIND_EXPR;
	 the construction of this variable is handled specially in
335 336
	 expand_function_start and initialize_inlined_parameters.
	 Note also that it's represented as a parameter.  This is more
H.J. Lu committed
337
	 close to the truth, since the initial value does come from
338
	 the caller.  */
339 340
      decl = build_decl (DECL_SOURCE_LOCATION (info->context),
			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
341 342 343
      DECL_ARTIFICIAL (decl) = 1;
      DECL_IGNORED_P (decl) = 1;
      TREE_USED (decl) = 1;
344
      DECL_CONTEXT (decl) = info->context;
345
      DECL_ARG_TYPE (decl) = type;
346 347 348 349 350 351

      /* Tell tree-inline.c that we never write to this variable, so
	 it can copy-prop the replacement value immediately.  */
      TREE_READONLY (decl) = 1;

      info->chain_decl = decl;
352 353 354

      if (dump_file
          && (dump_flags & TDF_DETAILS)
355 356
	  && !DECL_STATIC_CHAIN (info->context))
	fprintf (dump_file, "Setting static-chain for %s\n",
357 358
		 lang_hooks.decl_printable_name (info->context, 2));

359
      DECL_STATIC_CHAIN (info->context) = 1;
360 361 362 363 364 365 366 367 368 369 370 371
    }
  return decl;
}

/* Build or return the field within the non-local frame state that holds
   the static chain for INFO->CONTEXT.  This is the way to walk back up
   multiple nesting levels.  */

static tree
get_chain_field (struct nesting_info *info)
{
  tree field = info->chain_field;
372

373 374 375 376 377 378 379 380 381 382 383 384 385
  if (!field)
    {
      tree type = build_pointer_type (get_frame_type (info->outer));

      field = make_node (FIELD_DECL);
      DECL_NAME (field) = get_identifier ("__chain");
      TREE_TYPE (field) = type;
      DECL_ALIGN (field) = TYPE_ALIGN (type);
      DECL_NONADDRESSABLE_P (field) = 1;

      insert_field_into_struct (get_frame_type (info), field);

      info->chain_field = field;
386 387 388

      if (dump_file
          && (dump_flags & TDF_DETAILS)
389 390
	  && !DECL_STATIC_CHAIN (info->context))
	fprintf (dump_file, "Setting static-chain for %s\n",
391 392
		 lang_hooks.decl_printable_name (info->context, 2));

393
      DECL_STATIC_CHAIN (info->context) = 1;
394 395 396 397
    }
  return field;
}

398 399 400 401 402 403 404 405
/* Initialize a new temporary with the GIMPLE_CALL STMT.  */

static tree
init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
		        gimple call)
{
  tree t;

406
  t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
407 408 409 410 411 412 413 414
  gimple_call_set_lhs (call, t);
  if (! gsi_end_p (*gsi))
    gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
  gsi_insert_before (gsi, call, GSI_SAME_STMT);

  return t;
}

H.J. Lu committed
415

416
/* Copy EXP into a temporary.  Allocate the temporary in the context of
417
   INFO and insert the initialization statement before GSI.  */
418 419

static tree
420
init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
421
{
422 423
  tree t;
  gimple stmt;
424 425

  t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
426 427 428 429
  stmt = gimple_build_assign (t, exp);
  if (! gsi_end_p (*gsi))
    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
  gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
430 431 432 433

  return t;
}

434

435 436 437
/* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */

static tree
438 439
gsi_gimplify_val (struct nesting_info *info, tree exp,
		  gimple_stmt_iterator *gsi)
440 441 442 443
{
  if (is_gimple_val (exp))
    return exp;
  else
444
    return init_tmp_var (info, exp, gsi);
445 446
}

447 448 449 450
/* Similarly, but copy from the temporary and insert the statement
   after the iterator.  */

static tree
451
save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
452
{
453 454
  tree t;
  gimple stmt;
455 456

  t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
457 458 459 460
  stmt = gimple_build_assign (exp, t);
  if (! gsi_end_p (*gsi))
    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
  gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
461 462 463 464

  return t;
}

465 466 467 468 469
/* Build or return the type used to represent a nested function trampoline.  */

static GTY(()) tree trampoline_type;

static tree
470
get_trampoline_type (struct nesting_info *info)
471 472
{
  unsigned align, size;
473
  tree t;
474 475 476 477 478 479 480 481 482 483 484 485 486 487 488

  if (trampoline_type)
    return trampoline_type;

  align = TRAMPOLINE_ALIGNMENT;
  size = TRAMPOLINE_SIZE;

  /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
     then allocate extra space so that we can do dynamic alignment.  */
  if (align > STACK_BOUNDARY)
    {
      size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
      align = STACK_BOUNDARY;
    }

489
  t = build_index_type (size_int (size - 1));
490
  t = build_array_type (char_type_node, t);
491 492
  t = build_decl (DECL_SOURCE_LOCATION (info->context),
		  FIELD_DECL, get_identifier ("__data"), t);
493 494 495
  DECL_ALIGN (t) = align;
  DECL_USER_ALIGN (t) = 1;

496 497 498 499
  trampoline_type = make_node (RECORD_TYPE);
  TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
  TYPE_FIELDS (trampoline_type) = t;
  layout_type (trampoline_type);
Diego Novillo committed
500
  DECL_CONTEXT (t) = trampoline_type;
501

502
  return trampoline_type;
503 504 505 506 507 508 509 510 511 512 513
}

/* Given DECL, a nested function, find or create a field in the non-local
   frame structure for a trampoline for this function.  */

static tree
lookup_tramp_for_decl (struct nesting_info *info, tree decl,
		       enum insert_option insert)
{
  void **slot;

514
  if (insert == NO_INSERT)
515
    {
516
      slot = pointer_map_contains (info->var_map, decl);
517
      return slot ? (tree) *slot : NULL_TREE;
518 519
    }

520 521
  slot = pointer_map_insert (info->var_map, decl);
  if (!*slot)
522
    {
523
      tree field = make_node (FIELD_DECL);
524
      DECL_NAME (field) = DECL_NAME (decl);
525
      TREE_TYPE (field) = get_trampoline_type (info);
526 527 528
      TREE_ADDRESSABLE (field) = 1;

      insert_field_into_struct (get_frame_type (info), field);
529
      *slot = field;
530 531 532 533

      info->any_tramp_created = true;
    }

534
  return (tree) *slot;
H.J. Lu committed
535
}
536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551

/* Build or return the field within the non-local frame state that holds
   the non-local goto "jmp_buf".  The buffer itself is maintained by the
   rtl middle-end as dynamic stack space is allocated.  */

static tree
get_nl_goto_field (struct nesting_info *info)
{
  tree field = info->nl_goto_field;
  if (!field)
    {
      unsigned size;
      tree type;

      /* For __builtin_nonlocal_goto, we need N words.  The first is the
	 frame pointer, the rest is for the target's stack pointer save
552
	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
553 554 555 556 557 558 559 560 561 562
	 not the best interface, but it'll do for now.  */
      if (Pmode == ptr_mode)
	type = ptr_type_node;
      else
	type = lang_hooks.types.type_for_mode (Pmode, 1);

      size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
      size = size / GET_MODE_SIZE (Pmode);
      size = size + 1;

563
      type = build_array_type
564
	(type, build_index_type (size_int (size)));
565 566 567 568 569 570 571 572 573 574 575 576 577 578 579

      field = make_node (FIELD_DECL);
      DECL_NAME (field) = get_identifier ("__nl_goto_buf");
      TREE_TYPE (field) = type;
      DECL_ALIGN (field) = TYPE_ALIGN (type);
      TREE_ADDRESSABLE (field) = 1;

      insert_field_into_struct (get_frame_type (info), field);

      info->nl_goto_field = field;
    }

  return field;
}

580
/* Invoke CALLBACK on all statements of GIMPLE sequence SEQ.  */
581 582

static void
583 584
walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
	   struct nesting_info *info, gimple_seq seq)
585 586 587 588 589 590
{
  struct walk_stmt_info wi;

  memset (&wi, 0, sizeof (wi));
  wi.info = info;
  wi.val_only = true;
591
  walk_gimple_seq (seq, callback_stmt, callback_op, &wi);
Diego Novillo committed
592 593
}

594 595

/* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
Diego Novillo committed
596 597

static inline void
598 599
walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
	       struct nesting_info *info)
Diego Novillo committed
600
{
601
  walk_body (callback_stmt, callback_op, info, gimple_body (info->context));
602 603
}

604
/* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
605 606

static void
607 608 609
walk_gimple_omp_for (gimple for_stmt,
    		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
    		     struct nesting_info *info)
610 611
{
  struct walk_stmt_info wi;
612 613 614
  gimple_seq seq;
  tree t;
  size_t i;
615

616
  walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body (for_stmt));
617

618
  seq = gimple_seq_alloc ();
619 620
  memset (&wi, 0, sizeof (wi));
  wi.info = info;
621
  wi.gsi = gsi_last (seq);
622

623
  for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
624 625
    {
      wi.val_only = false;
626 627
      walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
		 &wi, NULL);
628 629
      wi.val_only = true;
      wi.is_lhs = false;
630 631
      walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
		 &wi, NULL);
632 633 634

      wi.val_only = true;
      wi.is_lhs = false;
635 636
      walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
		 &wi, NULL);
637

638
      t = gimple_omp_for_incr (for_stmt, i);
639 640
      gcc_assert (BINARY_CLASS_P (t));
      wi.val_only = false;
641
      walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
642 643
      wi.val_only = true;
      wi.is_lhs = false;
644
      walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
645
    }
646

647 648 649 650 651 652 653 654 655
  if (gimple_seq_empty_p (seq))
    gimple_seq_free (seq);
  else
    {
      gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
      annotate_all_with_location (seq, gimple_location (for_stmt));
      gimple_seq_add_seq (&pre_body, seq);
      gimple_omp_for_set_pre_body (for_stmt, pre_body);
    }
656 657
}

658
/* Similarly for ROOT and all functions nested underneath, depth first.  */
H.J. Lu committed
659

660
static void
661 662
walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
		    struct nesting_info *root)
663
{
664 665 666
  struct nesting_info *n;
  FOR_EACH_NEST_INFO (n, root)
    walk_function (callback_stmt, callback_op, n);
667
}
668 669


670
/* We have to check for a fairly pathological case.  The operands of function
671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695
   nested function are to be interpreted in the context of the enclosing
   function.  So if any are variably-sized, they will get remapped when the
   enclosing function is inlined.  But that remapping would also have to be
   done in the types of the PARM_DECLs of the nested function, meaning the
   argument types of that function will disagree with the arguments in the
   calls to that function.  So we'd either have to make a copy of the nested
   function corresponding to each time the enclosing function was inlined or
   add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
   function.  The former is not practical.  The latter would still require
   detecting this case to know when to add the conversions.  So, for now at
   least, we don't inline such an enclosing function.

   We have to do that check recursively, so here return indicating whether
   FNDECL has such a nested function.  ORIG_FN is the function we were
   trying to inline to use for checking whether any argument is variably
   modified by anything in it.

   It would be better to do this in tree-inline.c so that we could give
   the appropriate warning for why a function can't be inlined, but that's
   too late since the nesting structure has already been flattened and
   adding a flag just to record this fact seems a waste of a flag.  */

static bool
check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
{
696
  struct cgraph_node *cgn = cgraph_get_node (fndecl);
697 698 699 700
  tree arg;

  for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
    {
701
      for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
702
	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
703 704 705 706 707 708 709 710 711
	  return true;

      if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
	return true;
    }

  return false;
}

712 713 714 715 716 717
/* Construct our local datastructure describing the function nesting
   tree rooted by CGN.  */

static struct nesting_info *
create_nesting_tree (struct cgraph_node *cgn)
{
718 719 720
  struct nesting_info *info = XCNEW (struct nesting_info);
  info->field_map = pointer_map_create ();
  info->var_map = pointer_map_create ();
721
  info->mem_refs = pointer_set_create ();
722
  info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
723 724 725 726 727 728 729 730 731 732
  info->context = cgn->decl;

  for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
    {
      struct nesting_info *sub = create_nesting_tree (cgn);
      sub->outer = info;
      sub->next = info->inner;
      info->inner = sub;
    }

733 734 735 736 737
  /* See discussion at check_for_nested_with_variably_modified for a
     discussion of why this has to be here.  */
  if (check_for_nested_with_variably_modified (info->context, info->context))
    DECL_UNINLINABLE (info->context) = true;

738 739 740 741 742 743 744 745
  return info;
}

/* Return an expression computing the static chain for TARGET_CONTEXT
   from INFO->CONTEXT.  Insert any necessary computations before TSI.  */

static tree
get_static_chain (struct nesting_info *info, tree target_context,
746
		  gimple_stmt_iterator *gsi)
747 748 749 750 751 752
{
  struct nesting_info *i;
  tree x;

  if (info->context == target_context)
    {
753
      x = build_addr (info->frame_decl, target_context);
754 755 756 757 758 759 760 761 762
    }
  else
    {
      x = get_chain_decl (info);

      for (i = info->outer; i->context != target_context; i = i->outer)
	{
	  tree field = get_chain_field (i);

763
	  x = build_simple_mem_ref (x);
764
	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
765
	  x = init_tmp_var (info, x, gsi);
766 767 768 769 770 771
	}
    }

  return x;
}

772

773 774
/* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
   frame as seen from INFO->CONTEXT.  Insert any necessary computations
775
   before GSI.  */
776 777 778

static tree
get_frame_field (struct nesting_info *info, tree target_context,
779
		 tree field, gimple_stmt_iterator *gsi)
780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797
{
  struct nesting_info *i;
  tree x;

  if (info->context == target_context)
    {
      /* Make sure frame_decl gets created.  */
      (void) get_frame_type (info);
      x = info->frame_decl;
    }
  else
    {
      x = get_chain_decl (info);

      for (i = info->outer; i->context != target_context; i = i->outer)
	{
	  tree field = get_chain_field (i);

798
	  x = build_simple_mem_ref (x);
799
	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
800
	  x = init_tmp_var (info, x, gsi);
801 802
	}

803
      x = build_simple_mem_ref (x);
804 805
    }

806
  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
807 808 809
  return x;
}

810
static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
811 812

/* A subroutine of convert_nonlocal_reference_op.  Create a local variable
Diego Novillo committed
813
   in the nested function with DECL_VALUE_EXPR set to reference the true
H.J. Lu committed
814
   variable in the parent function.  This is used both for debug info
Diego Novillo committed
815 816 817 818 819 820 821 822 823 824
   and in OpenMP lowering.  */

static tree
get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
{
  tree target_context;
  struct nesting_info *i;
  tree x, field, new_decl;
  void **slot;

825
  slot = pointer_map_insert (info->var_map, decl);
Diego Novillo committed
826

827
  if (*slot)
828
    return (tree) *slot;
Diego Novillo committed
829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845

  target_context = decl_function_context (decl);

  /* A copy of the code in get_frame_field, but without the temporaries.  */
  if (info->context == target_context)
    {
      /* Make sure frame_decl gets created.  */
      (void) get_frame_type (info);
      x = info->frame_decl;
      i = info;
    }
  else
    {
      x = get_chain_decl (info);
      for (i = info->outer; i->context != target_context; i = i->outer)
	{
	  field = get_chain_field (i);
846
	  x = build_simple_mem_ref (x);
Diego Novillo committed
847 848
	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
	}
849
      x = build_simple_mem_ref (x);
Diego Novillo committed
850 851 852 853 854
    }

  field = lookup_field_for_decl (i, decl, INSERT);
  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
  if (use_pointer_in_frame (decl))
855
    x = build_simple_mem_ref (x);
Diego Novillo committed
856 857

  /* ??? We should be remapping types as well, surely.  */
858 859
  new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
Diego Novillo committed
860 861 862 863 864 865 866 867
  DECL_CONTEXT (new_decl) = info->context;
  DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
  DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
  TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
  TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
  TREE_READONLY (new_decl) = TREE_READONLY (decl);
  TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
  DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
868 869 870 871 872
  if ((TREE_CODE (decl) == PARM_DECL
       || TREE_CODE (decl) == RESULT_DECL
       || TREE_CODE (decl) == VAR_DECL)
      && DECL_BY_REFERENCE (decl))
    DECL_BY_REFERENCE (new_decl) = 1;
Diego Novillo committed
873 874 875 876

  SET_DECL_VALUE_EXPR (new_decl, x);
  DECL_HAS_VALUE_EXPR_P (new_decl) = 1;

877
  *slot = new_decl;
878
  DECL_CHAIN (new_decl) = info->debug_var_chain;
Diego Novillo committed
879 880
  info->debug_var_chain = new_decl;

881 882 883 884 885
  if (!optimize
      && info->context != target_context
      && variably_modified_type_p (TREE_TYPE (decl), NULL))
    note_nonlocal_vla_type (info, TREE_TYPE (decl));

Diego Novillo committed
886 887 888
  return new_decl;
}

889 890

/* Callback for walk_gimple_stmt, rewrite all references to VAR
891 892 893 894 895 896 897 898
   and PARM_DECLs that belong to outer functions.

   The rewrite will involve some number of structure accesses back up
   the static chain.  E.g. for a variable FOO up one nesting level it'll
   be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
   indirections apply to decls for which use_pointer_in_frame is true.  */

static tree
899
convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
900
{
901
  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
902
  struct nesting_info *const info = (struct nesting_info *) wi->info;
903 904 905 906 907 908 909 910 911 912 913 914 915 916 917
  tree t = *tp;

  *walk_subtrees = 0;
  switch (TREE_CODE (t))
    {
    case VAR_DECL:
      /* Non-automatic variables are never processed.  */
      if (TREE_STATIC (t) || DECL_EXTERNAL (t))
	break;
      /* FALLTHRU */

    case PARM_DECL:
      if (decl_function_context (t) != info->context)
	{
	  tree x;
918
	  wi->changed = true;
919

Diego Novillo committed
920 921
	  x = get_nonlocal_debug_decl (info, t);
	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
922
	    {
Diego Novillo committed
923 924 925 926 927
	      tree target_context = decl_function_context (t);
	      struct nesting_info *i;
	      for (i = info->outer; i->context != target_context; i = i->outer)
		continue;
	      x = lookup_field_for_decl (i, t, INSERT);
928
	      x = get_frame_field (info, target_context, x, &wi->gsi);
Diego Novillo committed
929 930
	      if (use_pointer_in_frame (t))
		{
931
		  x = init_tmp_var (info, x, &wi->gsi);
932
		  x = build_simple_mem_ref (x);
Diego Novillo committed
933
		}
934
	    }
935

936
	  if (wi->val_only)
937 938
	    {
	      if (wi->is_lhs)
939
		x = save_tmp_var (info, x, &wi->gsi);
940
	      else
941
		x = init_tmp_var (info, x, &wi->gsi);
942
	    }
943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961

	  *tp = x;
	}
      break;

    case LABEL_DECL:
      /* We're taking the address of a label from a parent function, but
	 this is not itself a non-local goto.  Mark the label such that it
	 will not be deleted, much as we would with a label address in
	 static storage.  */
      if (decl_function_context (t) != info->context)
        FORCED_LABEL (t) = 1;
      break;

    case ADDR_EXPR:
      {
	bool save_val_only = wi->val_only;

	wi->val_only = false;
962 963
	wi->is_lhs = false;
	wi->changed = false;
964
	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
965 966
	wi->val_only = true;

967
	if (wi->changed)
968
	  {
969 970
	    tree save_context;

971 972
	    /* If we changed anything, we might no longer be directly
	       referencing a decl.  */
973 974
	    save_context = current_function_decl;
	    current_function_decl = info->context;
975
	    recompute_tree_invariant_for_addr_expr (t);
976
	    current_function_decl = save_context;
977 978 979 980 981

	    /* If the callback converted the address argument in a context
	       where we only accept variables (and min_invariant, presumably),
	       then compute the address into a temporary.  */
	    if (save_val_only)
982 983
	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
				      t, &wi->gsi);
984 985 986 987 988 989
	  }
      }
      break;

    case REALPART_EXPR:
    case IMAGPART_EXPR:
990
    case COMPONENT_REF:
991
    case ARRAY_REF:
992
    case ARRAY_RANGE_REF:
993
    case BIT_FIELD_REF:
994 995 996
      /* Go down this entire nest and just look at the final prefix and
	 anything that describes the references.  Otherwise, we lose track
	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
997
      wi->val_only = true;
998
      wi->is_lhs = false;
999
      for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1000 1001
	{
	  if (TREE_CODE (t) == COMPONENT_REF)
1002
	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1003 1004 1005 1006
		       NULL);
	  else if (TREE_CODE (t) == ARRAY_REF
		   || TREE_CODE (t) == ARRAY_RANGE_REF)
	    {
1007 1008 1009 1010 1011 1012
	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
			 wi, NULL);
	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
			 wi, NULL);
	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
			 wi, NULL);
1013 1014 1015
	    }
	  else if (TREE_CODE (t) == BIT_FIELD_REF)
	    {
1016 1017 1018 1019
	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
			 wi, NULL);
	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
			 wi, NULL);
1020 1021 1022
	    }
	}
      wi->val_only = false;
1023
      walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1024 1025
      break;

1026 1027 1028 1029 1030 1031 1032
    case VIEW_CONVERT_EXPR:
      /* Just request to look at the subtrees, leaving val_only and lhs
	 untouched.  This might actually be for !val_only + lhs, in which
	 case we don't want to force a replacement by a temporary.  */
      *walk_subtrees = 1;
      break;

1033
    default:
1034
      if (!IS_TYPE_OR_DECL_P (t))
1035 1036 1037
	{
	  *walk_subtrees = 1;
          wi->val_only = true;
1038
	  wi->is_lhs = false;
1039 1040 1041 1042 1043 1044 1045
	}
      break;
    }

  return NULL_TREE;
}

1046 1047 1048 1049 1050 1051
static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
					     struct walk_stmt_info *);

/* Helper for convert_nonlocal_references, rewrite all references to VAR
   and PARM_DECLs that belong to outer functions.  */

Diego Novillo committed
1052 1053 1054
static bool
convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
{
1055
  struct nesting_info *const info = (struct nesting_info *) wi->info;
1056
  bool need_chain = false, need_stmts = false;
Diego Novillo committed
1057 1058 1059 1060 1061 1062 1063 1064 1065
  tree clause, decl;
  int dummy;
  bitmap new_suppress;

  new_suppress = BITMAP_GGC_ALLOC ();
  bitmap_copy (new_suppress, info->suppress_expansion);

  for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
    {
1066
      switch (OMP_CLAUSE_CODE (clause))
Diego Novillo committed
1067
	{
1068 1069 1070 1071 1072 1073
	case OMP_CLAUSE_REDUCTION:
	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
	    need_stmts = true;
	  goto do_decl_clause;

	case OMP_CLAUSE_LASTPRIVATE:
1074
	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1075 1076 1077
	    need_stmts = true;
	  goto do_decl_clause;

Diego Novillo committed
1078 1079 1080 1081
	case OMP_CLAUSE_PRIVATE:
	case OMP_CLAUSE_FIRSTPRIVATE:
	case OMP_CLAUSE_COPYPRIVATE:
	case OMP_CLAUSE_SHARED:
1082
	do_decl_clause:
Diego Novillo committed
1083
	  decl = OMP_CLAUSE_DECL (clause);
1084 1085 1086
	  if (TREE_CODE (decl) == VAR_DECL
	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
	    break;
Diego Novillo committed
1087 1088 1089 1090
	  if (decl_function_context (decl) != info->context)
	    {
	      bitmap_set_bit (new_suppress, DECL_UID (decl));
	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1091 1092
	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
		need_chain = true;
Diego Novillo committed
1093 1094 1095 1096 1097 1098 1099
	    }
	  break;

	case OMP_CLAUSE_SCHEDULE:
	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
	    break;
	  /* FALLTHRU */
1100
	case OMP_CLAUSE_FINAL:
Diego Novillo committed
1101 1102 1103 1104
	case OMP_CLAUSE_IF:
	case OMP_CLAUSE_NUM_THREADS:
	  wi->val_only = true;
	  wi->is_lhs = false;
1105 1106
	  convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
	                                 &dummy, wi);
Diego Novillo committed
1107 1108 1109 1110 1111 1112
	  break;

	case OMP_CLAUSE_NOWAIT:
	case OMP_CLAUSE_ORDERED:
	case OMP_CLAUSE_DEFAULT:
	case OMP_CLAUSE_COPYIN:
1113 1114
	case OMP_CLAUSE_COLLAPSE:
	case OMP_CLAUSE_UNTIED:
1115
	case OMP_CLAUSE_MERGEABLE:
Diego Novillo committed
1116 1117 1118 1119 1120 1121 1122 1123 1124
	  break;

	default:
	  gcc_unreachable ();
	}
    }

  info->suppress_expansion = new_suppress;

1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135
  if (need_stmts)
    for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
      switch (OMP_CLAUSE_CODE (clause))
	{
	case OMP_CLAUSE_REDUCTION:
	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
	    {
	      tree old_context
		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
		= info->context;
1136 1137 1138 1139 1140 1141
	      walk_body (convert_nonlocal_reference_stmt,
			 convert_nonlocal_reference_op, info,
			 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
	      walk_body (convert_nonlocal_reference_stmt,
			 convert_nonlocal_reference_op, info,
			 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1142 1143 1144 1145 1146 1147
	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
		= old_context;
	    }
	  break;

	case OMP_CLAUSE_LASTPRIVATE:
1148 1149 1150
	  walk_body (convert_nonlocal_reference_stmt,
		     convert_nonlocal_reference_op, info,
		     OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1151 1152 1153 1154 1155 1156
	  break;

	default:
	  break;
	}

Diego Novillo committed
1157 1158 1159
  return need_chain;
}

1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206
/* Create nonlocal debug decls for nonlocal VLA array bounds.  */

static void
note_nonlocal_vla_type (struct nesting_info *info, tree type)
{
  while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
    type = TREE_TYPE (type);

  if (TYPE_NAME (type)
      && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
      && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
    type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));

  while (POINTER_TYPE_P (type)
	 || TREE_CODE (type) == VECTOR_TYPE
	 || TREE_CODE (type) == FUNCTION_TYPE
	 || TREE_CODE (type) == METHOD_TYPE)
    type = TREE_TYPE (type);

  if (TREE_CODE (type) == ARRAY_TYPE)
    {
      tree domain, t;

      note_nonlocal_vla_type (info, TREE_TYPE (type));
      domain = TYPE_DOMAIN (type);
      if (domain)
	{
	  t = TYPE_MIN_VALUE (domain);
	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
	      && decl_function_context (t) != info->context)
	    get_nonlocal_debug_decl (info, t);
	  t = TYPE_MAX_VALUE (domain);
	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
	      && decl_function_context (t) != info->context)
	    get_nonlocal_debug_decl (info, t);
	}
    }
}

/* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
   in BLOCK.  */

static void
note_nonlocal_block_vlas (struct nesting_info *info, tree block)
{
  tree var;

1207
  for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1208 1209 1210 1211 1212 1213
    if (TREE_CODE (var) == VAR_DECL
	&& variably_modified_type_p (TREE_TYPE (var), NULL)
	&& DECL_HAS_VALUE_EXPR_P (var)
	&& decl_function_context (var) != info->context)
      note_nonlocal_vla_type (info, TREE_TYPE (var));
}
1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251

/* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
   PARM_DECLs that belong to outer functions.  This handles statements
   that are not handled via the standard recursion done in
   walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
   convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
   operands of STMT have been handled by this function.  */

static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
				 struct walk_stmt_info *wi)
{
  struct nesting_info *info = (struct nesting_info *) wi->info;
  tree save_local_var_chain;
  bitmap save_suppress;
  gimple stmt = gsi_stmt (*gsi);

  switch (gimple_code (stmt))
    {
    case GIMPLE_GOTO:
      /* Don't walk non-local gotos for now.  */
      if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
	{
	  wi->val_only = true;
	  wi->is_lhs = false;
	  *handled_ops_p = true;
	  return NULL_TREE;
	}
      break;

    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
      save_suppress = info->suppress_expansion;
      if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
	                                wi))
	{
	  tree c, decl;
	  decl = get_chain_decl (info);
1252 1253
	  c = build_omp_clause (gimple_location (stmt),
				OMP_CLAUSE_FIRSTPRIVATE);
1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305
	  OMP_CLAUSE_DECL (c) = decl;
	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
	  gimple_omp_taskreg_set_clauses (stmt, c);
	}

      save_local_var_chain = info->new_local_var_chain;
      info->new_local_var_chain = NULL;

      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
	         info, gimple_omp_body (stmt));

      if (info->new_local_var_chain)
	declare_vars (info->new_local_var_chain,
	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
		      false);
      info->new_local_var_chain = save_local_var_chain;
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_FOR:
      save_suppress = info->suppress_expansion;
      convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
      walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
	  		   convert_nonlocal_reference_op, info);
      walk_body (convert_nonlocal_reference_stmt,
	  	 convert_nonlocal_reference_op, info, gimple_omp_body (stmt));
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_SECTIONS:
      save_suppress = info->suppress_expansion;
      convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
	         info, gimple_omp_body (stmt));
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_SINGLE:
      save_suppress = info->suppress_expansion;
      convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
	         info, gimple_omp_body (stmt));
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_MASTER:
    case GIMPLE_OMP_ORDERED:
      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
	         info, gimple_omp_body (stmt));
      break;

1306 1307 1308 1309 1310 1311 1312
    case GIMPLE_BIND:
      if (!optimize && gimple_bind_block (stmt))
	note_nonlocal_block_vlas (info, gimple_bind_block (stmt));

      *handled_ops_p = false;
      return NULL_TREE;

1313 1314 1315 1316 1317 1318
    case GIMPLE_COND:
      wi->val_only = true;
      wi->is_lhs = false;
      *handled_ops_p = false;
      return NULL_TREE;

1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331
    default:
      /* For every other statement that we are not interested in
	 handling here, let the walker traverse the operands.  */
      *handled_ops_p = false;
      return NULL_TREE;
    }

  /* We have handled all of STMT operands, no need to traverse the operands.  */
  *handled_ops_p = true;
  return NULL_TREE;
}


Diego Novillo committed
1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342
/* A subroutine of convert_local_reference.  Create a local variable
   in the parent function with DECL_VALUE_EXPR set to reference the
   field in FRAME.  This is used both for debug info and in OpenMP
   lowering.  */

static tree
get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
{
  tree x, new_decl;
  void **slot;

1343 1344
  slot = pointer_map_insert (info->var_map, decl);
  if (*slot)
1345
    return (tree) *slot;
Diego Novillo committed
1346 1347 1348 1349 1350 1351

  /* Make sure frame_decl gets created.  */
  (void) get_frame_type (info);
  x = info->frame_decl;
  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);

1352 1353
  new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
Diego Novillo committed
1354 1355 1356 1357 1358 1359 1360 1361
  DECL_CONTEXT (new_decl) = info->context;
  DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
  DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
  TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
  TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
  TREE_READONLY (new_decl) = TREE_READONLY (decl);
  TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
  DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1362 1363 1364 1365 1366
  if ((TREE_CODE (decl) == PARM_DECL
       || TREE_CODE (decl) == RESULT_DECL
       || TREE_CODE (decl) == VAR_DECL)
      && DECL_BY_REFERENCE (decl))
    DECL_BY_REFERENCE (new_decl) = 1;
Diego Novillo committed
1367 1368 1369

  SET_DECL_VALUE_EXPR (new_decl, x);
  DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1370
  *slot = new_decl;
Diego Novillo committed
1371

1372
  DECL_CHAIN (new_decl) = info->debug_var_chain;
Diego Novillo committed
1373 1374
  info->debug_var_chain = new_decl;

1375 1376 1377
  /* Do not emit debug info twice.  */
  DECL_IGNORED_P (decl) = 1;

Diego Novillo committed
1378 1379 1380
  return new_decl;
}

1381 1382

/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1383 1384 1385
   and PARM_DECLs that were referenced by inner nested functions.
   The rewrite will be a structure reference to the local frame variable.  */

Diego Novillo committed
1386 1387
static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);

1388
static tree
1389
convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1390
{
1391
  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1392
  struct nesting_info *const info = (struct nesting_info *) wi->info;
1393
  tree t = *tp, field, x;
1394
  bool save_val_only;
1395

1396
  *walk_subtrees = 0;
1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417
  switch (TREE_CODE (t))
    {
    case VAR_DECL:
      /* Non-automatic variables are never processed.  */
      if (TREE_STATIC (t) || DECL_EXTERNAL (t))
	break;
      /* FALLTHRU */

    case PARM_DECL:
      if (decl_function_context (t) == info->context)
	{
	  /* If we copied a pointer to the frame, then the original decl
	     is used unchanged in the parent function.  */
	  if (use_pointer_in_frame (t))
	    break;

	  /* No need to transform anything if no child references the
	     variable.  */
	  field = lookup_field_for_decl (info, t, NO_INSERT);
	  if (!field)
	    break;
1418
	  wi->changed = true;
1419

Diego Novillo committed
1420 1421
	  x = get_local_debug_decl (info, t, field);
	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1422
	    x = get_frame_field (info, info->context, field, &wi->gsi);
1423

1424
	  if (wi->val_only)
1425 1426
	    {
	      if (wi->is_lhs)
1427
		x = save_tmp_var (info, x, &wi->gsi);
1428
	      else
1429
		x = init_tmp_var (info, x, &wi->gsi);
1430 1431
	    }

1432 1433 1434 1435 1436
	  *tp = x;
	}
      break;

    case ADDR_EXPR:
1437 1438 1439 1440
      save_val_only = wi->val_only;
      wi->val_only = false;
      wi->is_lhs = false;
      wi->changed = false;
1441
      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1442
      wi->val_only = save_val_only;
1443

1444 1445 1446 1447
      /* If we converted anything ... */
      if (wi->changed)
	{
	  tree save_context;
1448

1449 1450
	  /* Then the frame decl is now addressable.  */
	  TREE_ADDRESSABLE (info->frame_decl) = 1;
H.J. Lu committed
1451

1452 1453
	  save_context = current_function_decl;
	  current_function_decl = info->context;
1454
	  recompute_tree_invariant_for_addr_expr (t);
1455 1456 1457 1458 1459
	  current_function_decl = save_context;

	  /* If we are in a context where we only accept values, then
	     compute the address into a temporary.  */
	  if (save_val_only)
1460 1461
	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
				    t, &wi->gsi);
1462
	}
1463 1464 1465 1466
      break;

    case REALPART_EXPR:
    case IMAGPART_EXPR:
1467
    case COMPONENT_REF:
1468
    case ARRAY_REF:
1469
    case ARRAY_RANGE_REF:
1470
    case BIT_FIELD_REF:
1471 1472 1473
      /* Go down this entire nest and just look at the final prefix and
	 anything that describes the references.  Otherwise, we lose track
	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1474
      save_val_only = wi->val_only;
1475
      wi->val_only = true;
1476
      wi->is_lhs = false;
1477
      for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1478 1479
	{
	  if (TREE_CODE (t) == COMPONENT_REF)
1480
	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1481 1482 1483 1484
		       NULL);
	  else if (TREE_CODE (t) == ARRAY_REF
		   || TREE_CODE (t) == ARRAY_RANGE_REF)
	    {
1485
	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1486
			 NULL);
1487
	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1488
			 NULL);
1489
	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1490 1491 1492 1493
			 NULL);
	    }
	  else if (TREE_CODE (t) == BIT_FIELD_REF)
	    {
1494
	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1495
			 NULL);
1496
	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1497 1498 1499 1500
			 NULL);
	    }
	}
      wi->val_only = false;
1501
      walk_tree (tp, convert_local_reference_op, wi, NULL);
1502
      wi->val_only = save_val_only;
1503 1504
      break;

1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519
    case MEM_REF:
      save_val_only = wi->val_only;
      wi->val_only = true;
      wi->is_lhs = false;
      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
		 wi, NULL);
      /* We need to re-fold the MEM_REF as component references as
	 part of a ADDR_EXPR address are not allowed.  But we cannot
	 fold here, as the chain record type is not yet finalized.  */
      if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
	pointer_set_insert (info->mem_refs, tp);
      wi->val_only = save_val_only;
      break;

1520 1521 1522 1523 1524 1525 1526
    case VIEW_CONVERT_EXPR:
      /* Just request to look at the subtrees, leaving val_only and lhs
	 untouched.  This might actually be for !val_only + lhs, in which
	 case we don't want to force a replacement by a temporary.  */
      *walk_subtrees = 1;
      break;

1527
    default:
1528
      if (!IS_TYPE_OR_DECL_P (t))
1529 1530 1531
	{
	  *walk_subtrees = 1;
	  wi->val_only = true;
1532
	  wi->is_lhs = false;
1533 1534 1535 1536 1537 1538 1539
	}
      break;
    }

  return NULL_TREE;
}

1540 1541 1542 1543 1544 1545
static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
					  struct walk_stmt_info *);

/* Helper for convert_local_reference.  Convert all the references in
   the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */

Diego Novillo committed
1546 1547 1548
static bool
convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
{
1549
  struct nesting_info *const info = (struct nesting_info *) wi->info;
1550
  bool need_frame = false, need_stmts = false;
Diego Novillo committed
1551 1552 1553 1554 1555 1556 1557 1558 1559
  tree clause, decl;
  int dummy;
  bitmap new_suppress;

  new_suppress = BITMAP_GGC_ALLOC ();
  bitmap_copy (new_suppress, info->suppress_expansion);

  for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
    {
1560
      switch (OMP_CLAUSE_CODE (clause))
Diego Novillo committed
1561
	{
1562 1563 1564 1565 1566 1567
	case OMP_CLAUSE_REDUCTION:
	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
	    need_stmts = true;
	  goto do_decl_clause;

	case OMP_CLAUSE_LASTPRIVATE:
1568
	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1569 1570 1571
	    need_stmts = true;
	  goto do_decl_clause;

Diego Novillo committed
1572 1573 1574 1575
	case OMP_CLAUSE_PRIVATE:
	case OMP_CLAUSE_FIRSTPRIVATE:
	case OMP_CLAUSE_COPYPRIVATE:
	case OMP_CLAUSE_SHARED:
1576
	do_decl_clause:
Diego Novillo committed
1577
	  decl = OMP_CLAUSE_DECL (clause);
1578 1579 1580
	  if (TREE_CODE (decl) == VAR_DECL
	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
	    break;
Diego Novillo committed
1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598
	  if (decl_function_context (decl) == info->context
	      && !use_pointer_in_frame (decl))
	    {
	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
	      if (field)
		{
		  bitmap_set_bit (new_suppress, DECL_UID (decl));
		  OMP_CLAUSE_DECL (clause)
		    = get_local_debug_decl (info, decl, field);
		  need_frame = true;
		}
	    }
	  break;

	case OMP_CLAUSE_SCHEDULE:
	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
	    break;
	  /* FALLTHRU */
1599
	case OMP_CLAUSE_FINAL:
Diego Novillo committed
1600 1601 1602 1603
	case OMP_CLAUSE_IF:
	case OMP_CLAUSE_NUM_THREADS:
	  wi->val_only = true;
	  wi->is_lhs = false;
1604 1605
	  convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
				      wi);
Diego Novillo committed
1606 1607 1608 1609 1610 1611
	  break;

	case OMP_CLAUSE_NOWAIT:
	case OMP_CLAUSE_ORDERED:
	case OMP_CLAUSE_DEFAULT:
	case OMP_CLAUSE_COPYIN:
1612 1613
	case OMP_CLAUSE_COLLAPSE:
	case OMP_CLAUSE_UNTIED:
1614
	case OMP_CLAUSE_MERGEABLE:
Diego Novillo committed
1615 1616 1617 1618 1619 1620 1621 1622 1623
	  break;

	default:
	  gcc_unreachable ();
	}
    }

  info->suppress_expansion = new_suppress;

1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634
  if (need_stmts)
    for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
      switch (OMP_CLAUSE_CODE (clause))
	{
	case OMP_CLAUSE_REDUCTION:
	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
	    {
	      tree old_context
		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
		= info->context;
1635 1636 1637 1638 1639 1640
	      walk_body (convert_local_reference_stmt,
			 convert_local_reference_op, info,
			 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
	      walk_body (convert_local_reference_stmt,
			 convert_local_reference_op, info,
			 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1641 1642 1643 1644 1645 1646
	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
		= old_context;
	    }
	  break;

	case OMP_CLAUSE_LASTPRIVATE:
1647 1648 1649
	  walk_body (convert_local_reference_stmt,
		     convert_local_reference_op, info,
		     OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1650 1651 1652 1653 1654 1655
	  break;

	default:
	  break;
	}

Diego Novillo committed
1656 1657 1658
  return need_frame;
}

1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682

/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
   and PARM_DECLs that were referenced by inner nested functions.
   The rewrite will be a structure reference to the local frame variable.  */

static tree
convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
			      struct walk_stmt_info *wi)
{
  struct nesting_info *info = (struct nesting_info *) wi->info;
  tree save_local_var_chain;
  bitmap save_suppress;
  gimple stmt = gsi_stmt (*gsi);

  switch (gimple_code (stmt))
    {
    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
      save_suppress = info->suppress_expansion;
      if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
	                             wi))
	{
	  tree c;
	  (void) get_frame_type (info);
1683 1684
	  c = build_omp_clause (gimple_location (stmt),
				OMP_CLAUSE_SHARED);
1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735
	  OMP_CLAUSE_DECL (c) = info->frame_decl;
	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
	  gimple_omp_taskreg_set_clauses (stmt, c);
	}

      save_local_var_chain = info->new_local_var_chain;
      info->new_local_var_chain = NULL;

      walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
	         gimple_omp_body (stmt));

      if (info->new_local_var_chain)
	declare_vars (info->new_local_var_chain,
		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
      info->new_local_var_chain = save_local_var_chain;
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_FOR:
      save_suppress = info->suppress_expansion;
      convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
      walk_gimple_omp_for (stmt, convert_local_reference_stmt,
			   convert_local_reference_op, info);
      walk_body (convert_local_reference_stmt, convert_local_reference_op,
		 info, gimple_omp_body (stmt));
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_SECTIONS:
      save_suppress = info->suppress_expansion;
      convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
      walk_body (convert_local_reference_stmt, convert_local_reference_op,
		 info, gimple_omp_body (stmt));
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_SINGLE:
      save_suppress = info->suppress_expansion;
      convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
      walk_body (convert_local_reference_stmt, convert_local_reference_op,
		 info, gimple_omp_body (stmt));
      info->suppress_expansion = save_suppress;
      break;

    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_MASTER:
    case GIMPLE_OMP_ORDERED:
      walk_body (convert_local_reference_stmt, convert_local_reference_op,
		 info, gimple_omp_body (stmt));
      break;

1736 1737 1738 1739 1740 1741
    case GIMPLE_COND:
      wi->val_only = true;
      wi->is_lhs = false;
      *handled_ops_p = false;
      return NULL_TREE;

1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756
    default:
      /* For every other statement that we are not interested in
	 handling here, let the walker traverse the operands.  */
      *handled_ops_p = false;
      return NULL_TREE;
    }

  /* Indicate that we have handled all the operands ourselves.  */
  *handled_ops_p = true;
  return NULL_TREE;
}


/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
   that reference labels from outer functions.  The rewrite will be a
1757 1758 1759
   call to __builtin_nonlocal_goto.  */

static tree
1760 1761
convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
			   struct walk_stmt_info *wi)
1762
{
1763
  struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1764
  tree label, new_label, target_context, x, field;
1765
  void **slot;
1766 1767
  gimple call;
  gimple stmt = gsi_stmt (*gsi);
1768

1769 1770 1771 1772 1773 1774 1775
  if (gimple_code (stmt) != GIMPLE_GOTO)
    {
      *handled_ops_p = false;
      return NULL_TREE;
    }

  label = gimple_goto_dest (stmt);
1776
  if (TREE_CODE (label) != LABEL_DECL)
1777 1778 1779 1780 1781
    {
      *handled_ops_p = false;
      return NULL_TREE;
    }

1782 1783
  target_context = decl_function_context (label);
  if (target_context == info->context)
1784 1785 1786 1787
    {
      *handled_ops_p = false;
      return NULL_TREE;
    }
1788 1789 1790 1791 1792 1793 1794

  for (i = info->outer; target_context != i->context; i = i->outer)
    continue;

  /* The original user label may also be use for a normal goto, therefore
     we must create a new label that will actually receive the abnormal
     control transfer.  This new label will be marked LABEL_NONLOCAL; this
1795
     mark will trigger proper behavior in the cfg, as well as cause the
1796
     (hairy target-specific) non-local goto receiver code to be generated
1797 1798
     when we expand rtl.  Enter this association into var_map so that we
     can insert the new label into the IL during a second pass.  */
1799 1800
  slot = pointer_map_insert (i->var_map, label);
  if (*slot == NULL)
1801
    {
1802
      new_label = create_artificial_label (UNKNOWN_LOCATION);
1803
      DECL_NONLOCAL (new_label) = 1;
1804
      *slot = new_label;
1805 1806
    }
  else
1807
    new_label = (tree) *slot;
H.J. Lu committed
1808

1809 1810
  /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
  field = get_nl_goto_field (i);
1811
  x = get_frame_field (info, target_context, field, &wi->gsi);
1812
  x = build_addr (x, target_context);
1813 1814 1815 1816
  x = gsi_gimplify_val (info, x, &wi->gsi);
  call = gimple_build_call (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
			    build_addr (new_label, target_context), x);
  gsi_replace (&wi->gsi, call, false);
1817

1818 1819
  /* We have handled all of STMT's operands, no need to keep going.  */
  *handled_ops_p = true;
1820 1821 1822
  return NULL_TREE;
}

1823 1824

/* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1825 1826
   are referenced via nonlocal goto from a nested function.  The rewrite
   will involve installing a newly generated DECL_NONLOCAL label, and
1827
   (potentially) a branch around the rtl gunk that is assumed to be
1828 1829 1830
   attached to such a label.  */

static tree
1831 1832
convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
			  struct walk_stmt_info *wi)
1833
{
1834
  struct nesting_info *const info = (struct nesting_info *) wi->info;
1835 1836
  tree label, new_label;
  gimple_stmt_iterator tmp_gsi;
1837
  void **slot;
1838
  gimple stmt = gsi_stmt (*gsi);
1839

1840 1841 1842 1843 1844 1845 1846
  if (gimple_code (stmt) != GIMPLE_LABEL)
    {
      *handled_ops_p = false;
      return NULL_TREE;
    }

  label = gimple_label_label (stmt);
1847

1848 1849
  slot = pointer_map_contains (info->var_map, label);
  if (!slot)
1850 1851 1852 1853
    {
      *handled_ops_p = false;
      return NULL_TREE;
    }
1854 1855 1856

  /* If there's any possibility that the previous statement falls through,
     then we must branch around the new non-local label.  */
1857 1858 1859
  tmp_gsi = wi->gsi;
  gsi_prev (&tmp_gsi);
  if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1860
    {
1861 1862
      gimple stmt = gimple_build_goto (label);
      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1863
    }
1864 1865

  new_label = (tree) *slot;
1866 1867
  stmt = gimple_build_label (new_label);
  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1868

1869
  *handled_ops_p = true;
1870 1871 1872
  return NULL_TREE;
}

1873 1874

/* Called via walk_function+walk_stmt, rewrite all references to addresses
1875 1876 1877 1878
   of nested functions that require the use of trampolines.  The rewrite
   will involve a reference a trampoline generated for the occasion.  */

static tree
1879
convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1880
{
1881
  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1882
  struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1883 1884
  tree t = *tp, decl, target_context, x, builtin;
  gimple call;
1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906

  *walk_subtrees = 0;
  switch (TREE_CODE (t))
    {
    case ADDR_EXPR:
      /* Build
	   T.1 = &CHAIN->tramp;
	   T.2 = __builtin_adjust_trampoline (T.1);
	   T.3 = (func_type)T.2;
      */

      decl = TREE_OPERAND (t, 0);
      if (TREE_CODE (decl) != FUNCTION_DECL)
	break;

      /* Only need to process nested functions.  */
      target_context = decl_function_context (decl);
      if (!target_context)
	break;

      /* If the nested function doesn't use a static chain, then
	 it doesn't need a trampoline.  */
1907
      if (!DECL_STATIC_CHAIN (decl))
1908 1909
	break;

1910 1911 1912 1913
      /* If we don't want a trampoline, then don't build one.  */
      if (TREE_NO_TRAMPOLINE (t))
	break;

1914 1915 1916 1917 1918 1919 1920
      /* Lookup the immediate parent of the callee, as that's where
	 we need to insert the trampoline.  */
      for (i = info; i->context != target_context; i = i->outer)
	continue;
      x = lookup_tramp_for_decl (i, decl, INSERT);

      /* Compute the address of the field holding the trampoline.  */
1921
      x = get_frame_field (info, target_context, x, &wi->gsi);
1922
      x = build_addr (x, target_context);
1923
      x = gsi_gimplify_val (info, x, &wi->gsi);
1924 1925 1926

      /* Do machine-specific ugliness.  Normally this will involve
	 computing extra alignment, but it can really be anything.  */
1927 1928 1929
      builtin = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
      call = gimple_build_call (builtin, 1, x);
      x = init_tmp_var_with_call (info, &wi->gsi, call);
1930 1931 1932

      /* Cast back to the proper function type.  */
      x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1933
      x = init_tmp_var (info, x, &wi->gsi);
1934 1935 1936 1937

      *tp = x;
      break;

1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961
    default:
      if (!IS_TYPE_OR_DECL_P (t))
	*walk_subtrees = 1;
      break;
    }

  return NULL_TREE;
}


/* Called via walk_function+walk_gimple_stmt, rewrite all references
   to addresses of nested functions that require the use of
   trampolines.  The rewrite will involve a reference a trampoline
   generated for the occasion.  */

static tree
convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
			      struct walk_stmt_info *wi)
{
  gimple stmt = gsi_stmt (*gsi);

  switch (gimple_code (stmt))
    {
    case GIMPLE_CALL:
1962
      {
1963 1964 1965
	/* Only walk call arguments, lest we generate trampolines for
	   direct calls.  */
	unsigned long i, nargs = gimple_call_num_args (stmt);
1966
	for (i = 0; i < nargs; i++)
1967 1968 1969 1970 1971
	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
		     wi, NULL);

	*handled_ops_p = true;
	return NULL_TREE;
1972
      }
1973 1974 1975 1976 1977

    default:
      break;
    }

1978
  *handled_ops_p = false;
1979 1980 1981
  return NULL_TREE;
}

1982 1983 1984 1985 1986


/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
   that reference nested functions to make sure that the static chain
   is set up properly for the call.  */
1987 1988

static tree
1989 1990
convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
                     struct walk_stmt_info *wi)
1991
{
1992
  struct nesting_info *const info = (struct nesting_info *) wi->info;
1993
  tree decl, target_context;
1994 1995
  char save_static_chain_added;
  int i;
1996
  gimple stmt = gsi_stmt (*gsi);
1997

1998
  switch (gimple_code (stmt))
1999
    {
2000
    case GIMPLE_CALL:
2001 2002
      if (gimple_call_chain (stmt))
	break;
2003 2004
      decl = gimple_call_fndecl (stmt);
      if (!decl)
2005 2006
	break;
      target_context = decl_function_context (decl);
2007
      if (target_context && DECL_STATIC_CHAIN (decl))
2008
	{
2009 2010 2011
	  gimple_call_set_chain (stmt, get_static_chain (info, target_context,
							 &wi->gsi));
	  info->static_chain_added |= (1 << (info->context != target_context));
2012
	}
2013 2014
      break;

2015 2016
    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
2017 2018
      save_static_chain_added = info->static_chain_added;
      info->static_chain_added = 0;
2019
      walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
2020 2021 2022 2023 2024 2025 2026
      for (i = 0; i < 2; i++)
	{
	  tree c, decl;
	  if ((info->static_chain_added & (1 << i)) == 0)
	    continue;
	  decl = i ? get_chain_decl (info) : info->frame_decl;
	  /* Don't add CHAIN.* or FRAME.* twice.  */
2027 2028 2029
	  for (c = gimple_omp_taskreg_clauses (stmt);
	       c;
	       c = OMP_CLAUSE_CHAIN (c))
2030 2031 2032 2033 2034 2035
	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
		&& OMP_CLAUSE_DECL (c) == decl)
	      break;
	  if (c == NULL)
	    {
2036 2037 2038
	      c = build_omp_clause (gimple_location (stmt),
				    i ? OMP_CLAUSE_FIRSTPRIVATE
				    : OMP_CLAUSE_SHARED);
2039
	      OMP_CLAUSE_DECL (c) = decl;
2040 2041
	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
	      gimple_omp_taskreg_set_clauses (stmt, c);
2042 2043 2044 2045 2046
	    }
	}
      info->static_chain_added |= save_static_chain_added;
      break;

2047 2048 2049
    case GIMPLE_OMP_FOR:
      walk_body (convert_gimple_call, NULL, info,
	  	 gimple_omp_for_pre_body (stmt));
2050
      /* FALLTHRU */
2051 2052 2053 2054 2055 2056 2057
    case GIMPLE_OMP_SECTIONS:
    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_SINGLE:
    case GIMPLE_OMP_MASTER:
    case GIMPLE_OMP_ORDERED:
    case GIMPLE_OMP_CRITICAL:
      walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
Diego Novillo committed
2058 2059
      break;

2060
    default:
2061 2062 2063
      /* Keep looking for other operands.  */
      *handled_ops_p = false;
      return NULL_TREE;
2064 2065
    }

2066
  *handled_ops_p = true;
2067 2068 2069
  return NULL_TREE;
}

2070 2071 2072
/* Walk the nesting tree starting with ROOT.  Convert all trampolines and
   call expressions.  At the same time, determine if a nested function
   actually uses its static chain; if not, remember that.  */
2073 2074 2075 2076

static void
convert_all_function_calls (struct nesting_info *root)
{
2077
  unsigned int chain_count = 0, old_chain_count, iter_count;
2078 2079
  struct nesting_info *n;

2080
  /* First, optimistically clear static_chain for all decls that haven't
2081
     used the static chain already for variable access.  */
2082 2083 2084
  FOR_EACH_NEST_INFO (n, root)
    {
      tree decl = n->context;
2085
      if (!n->outer || (!n->chain_decl && !n->chain_field))
2086
	{
2087
	  DECL_STATIC_CHAIN (decl) = 0;
2088
	  if (dump_file && (dump_flags & TDF_DETAILS))
2089
	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2090 2091 2092
		     lang_hooks.decl_printable_name (decl, 2));
	}
      else
2093
	DECL_STATIC_CHAIN (decl) = 1;
2094
      chain_count += DECL_STATIC_CHAIN (decl);
2095 2096 2097 2098 2099 2100
    }

  /* Walk the functions and perform transformations.  Note that these
     transformations can induce new uses of the static chain, which in turn
     require re-examining all users of the decl.  */
  /* ??? It would make sense to try to use the call graph to speed this up,
H.J. Lu committed
2101
     but the call graph hasn't really been built yet.  Even if it did, we
2102 2103 2104
     would still need to iterate in this loop since address-of references
     wouldn't show up in the callgraph anyway.  */
  iter_count = 0;
2105 2106
  do
    {
2107 2108
      old_chain_count = chain_count;
      chain_count = 0;
2109
      iter_count++;
2110

2111 2112
      if (dump_file && (dump_flags & TDF_DETAILS))
	fputc ('\n', dump_file);
2113

2114 2115 2116 2117 2118 2119
      FOR_EACH_NEST_INFO (n, root)
	{
	  tree decl = n->context;
	  walk_function (convert_tramp_reference_stmt,
			 convert_tramp_reference_op, n);
	  walk_function (convert_gimple_call, NULL, n);
2120
	  chain_count += DECL_STATIC_CHAIN (decl);
2121
	}
2122
    }
2123
  while (chain_count != old_chain_count);
2124 2125

  if (dump_file && (dump_flags & TDF_DETAILS))
2126
    fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2127
	     iter_count);
2128 2129
}

2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154
struct nesting_copy_body_data
{
  copy_body_data cb;
  struct nesting_info *root;
};

/* A helper subroutine for debug_var_chain type remapping.  */

static tree
nesting_copy_decl (tree decl, copy_body_data *id)
{
  struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
  void **slot = pointer_map_contains (nid->root->var_map, decl);

  if (slot)
    return (tree) *slot;

  if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
    {
      tree new_decl = copy_decl_no_change (decl, id);
      DECL_ORIGINAL_TYPE (new_decl)
	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
      return new_decl;
    }

2155 2156 2157 2158 2159
  if (TREE_CODE (decl) == VAR_DECL
      || TREE_CODE (decl) == PARM_DECL
      || TREE_CODE (decl) == RESULT_DECL)
    return decl;

2160 2161 2162
  return copy_decl_no_change (decl, id);
}

2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197
/* A helper function for remap_vla_decls.  See if *TP contains
   some remapped variables.  */

static tree
contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
{
  struct nesting_info *root = (struct nesting_info *) data;
  tree t = *tp;
  void **slot;

  if (DECL_P (t))
    {
      *walk_subtrees = 0;
      slot = pointer_map_contains (root->var_map, t);

      if (slot)
	return (tree) *slot;
    }
  return NULL;
}

/* Remap VLA decls in BLOCK and subblocks if remapped variables are
   involved.  */

static void
remap_vla_decls (tree block, struct nesting_info *root)
{
  tree var, subblock, val, type;
  struct nesting_copy_body_data id;

  for (subblock = BLOCK_SUBBLOCKS (block);
       subblock;
       subblock = BLOCK_CHAIN (subblock))
    remap_vla_decls (subblock, root);

2198
  for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213
    if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
      {
	val = DECL_VALUE_EXPR (var);
	type = TREE_TYPE (var);

	if (!(TREE_CODE (val) == INDIRECT_REF
	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
	      && variably_modified_type_p (type, NULL)))
	  continue;

	if (pointer_map_contains (root->var_map, TREE_OPERAND (val, 0))
	    || walk_tree (&type, contains_remapped_vars, root, NULL))
	  break;
      }

2214 2215 2216 2217 2218 2219 2220 2221
  if (var == NULL_TREE)
    return;

  memset (&id, 0, sizeof (id));
  id.cb.copy_decl = nesting_copy_decl;
  id.cb.decl_map = pointer_map_create ();
  id.root = root;

2222
  for (; var; var = DECL_CHAIN (var))
2223
    if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2224 2225
      {
	struct nesting_info *i;
2226 2227
	tree newt, context;
	void **slot;
2228 2229

	val = DECL_VALUE_EXPR (var);
2230 2231 2232 2233 2234 2235 2236 2237 2238
	type = TREE_TYPE (var);

	if (!(TREE_CODE (val) == INDIRECT_REF
	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
	      && variably_modified_type_p (type, NULL)))
	  continue;

	slot = pointer_map_contains (root->var_map, TREE_OPERAND (val, 0));
	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2239 2240 2241 2242 2243 2244 2245 2246 2247 2248
	  continue;

	context = decl_function_context (var);
	for (i = root; i; i = i->outer)
	  if (i->context == context)
	    break;

	if (i == NULL)
	  continue;

2249 2250 2251 2252 2253 2254 2255 2256 2257
	/* Fully expand value expressions.  This avoids having debug variables
	   only referenced from them and that can be swept during GC.  */
        if (slot)
	  {
	    tree t = (tree) *slot;
	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
	  }

2258 2259 2260 2261 2262 2263 2264 2265
	id.cb.src_fn = i->context;
	id.cb.dst_fn = i->context;
	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);

	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
	  {
	    newt = TREE_TYPE (newt);
2266
	    type = TREE_TYPE (type);
2267 2268 2269 2270
	  }
	if (TYPE_NAME (newt)
	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2271 2272
	    && newt != type
	    && TYPE_NAME (newt) == TYPE_NAME (type))
2273 2274 2275 2276 2277 2278 2279 2280 2281 2282
	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);

	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
	if (val != DECL_VALUE_EXPR (var))
	  SET_DECL_VALUE_EXPR (var, val);
      }

  pointer_map_destroy (id.cb.decl_map);
}

2283 2284 2285 2286 2287 2288 2289 2290 2291
/* Fold the MEM_REF *E.  */
static bool
fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
{
  tree *ref_p = CONST_CAST2(tree *, const tree *, (const tree *)e);
  *ref_p = fold (*ref_p);
  return true;
}

2292 2293 2294 2295 2296 2297 2298 2299
/* Do "everything else" to clean up or complete state collected by the
   various walking passes -- lay out the types and decls, generate code
   to initialize the frame decl, store critical expressions in the
   struct function for rtl to find.  */

static void
finalize_nesting_tree_1 (struct nesting_info *root)
{
2300 2301
  gimple_seq stmt_list;
  gimple stmt;
2302 2303 2304
  tree context = root->context;
  struct function *sf;

2305 2306
  stmt_list = NULL;

2307 2308 2309 2310
  /* If we created a non-local frame type or decl, we need to lay them
     out at this time.  */
  if (root->frame_type)
    {
2311 2312 2313
      /* In some cases the frame type will trigger the -Wpadded warning.
	 This is not helpful; suppress it. */
      int save_warn_padded = warn_padded;
2314 2315
      tree *adjust;

2316
      warn_padded = 0;
2317
      layout_type (root->frame_type);
2318
      warn_padded = save_warn_padded;
2319
      layout_decl (root->frame_decl, 0);
2320 2321 2322 2323 2324 2325 2326

      /* Remove root->frame_decl from root->new_local_var_chain, so
	 that we can declare it also in the lexical blocks, which
	 helps ensure virtual regs that end up appearing in its RTL
	 expression get substituted in instantiate_virtual_regs().  */
      for (adjust = &root->new_local_var_chain;
	   *adjust != root->frame_decl;
2327 2328 2329
	   adjust = &DECL_CHAIN (*adjust))
	gcc_assert (DECL_CHAIN (*adjust));
      *adjust = DECL_CHAIN (*adjust);
2330

2331
      DECL_CHAIN (root->frame_decl) = NULL_TREE;
2332 2333
      declare_vars (root->frame_decl,
		    gimple_seq_first_stmt (gimple_body (context)), true);
2334 2335
    }

H.J. Lu committed
2336
  /* If any parameters were referenced non-locally, then we need to
2337 2338 2339 2340 2341
     insert a copy.  Likewise, if any variables were referenced by
     pointer, we need to initialize the address.  */
  if (root->any_parm_remapped)
    {
      tree p;
2342
      for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2343 2344 2345 2346 2347 2348 2349 2350
	{
	  tree field, x, y;

	  field = lookup_field_for_decl (root, p, NO_INSERT);
	  if (!field)
	    continue;

	  if (use_pointer_in_frame (p))
2351
	    x = build_addr (p, context);
2352 2353 2354
	  else
	    x = p;

2355 2356
	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
		      root->frame_decl, field, NULL_TREE);
2357 2358
	  stmt = gimple_build_assign (y, x);
	  gimple_seq_add_stmt (&stmt_list, stmt);
2359 2360 2361 2362 2363 2364 2365 2366 2367
	  /* If the assignment is from a non-register the stmt is
	     not valid gimple.  Make it so by using a temporary instead.  */
	  if (!is_gimple_reg (x)
	      && is_gimple_reg_type (TREE_TYPE (x)))
	    {
	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
	      x = init_tmp_var (root, x, &gsi);
	      gimple_assign_set_rhs1 (stmt, x);
	    }
2368 2369 2370 2371 2372 2373 2374
	}
    }

  /* If a chain_field was created, then it needs to be initialized
     from chain_decl.  */
  if (root->chain_field)
    {
2375 2376
      tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
		       root->frame_decl, root->chain_field, NULL_TREE);
2377 2378
      stmt = gimple_build_assign (x, get_chain_decl (root));
      gimple_seq_add_stmt (&stmt_list, stmt);
2379 2380 2381 2382 2383 2384 2385 2386
    }

  /* If trampolines were created, then we need to initialize them.  */
  if (root->any_tramp_created)
    {
      struct nesting_info *i;
      for (i = root->inner; i ; i = i->next)
	{
2387
	  tree arg1, arg2, arg3, x, field;
2388 2389 2390 2391 2392

	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
	  if (!field)
	    continue;

2393
	  gcc_assert (DECL_STATIC_CHAIN (i->context));
2394
	  arg3 = build_addr (root->frame_decl, context);
2395

2396
	  arg2 = build_addr (i->context, context);
2397

2398 2399
	  x = build3 (COMPONENT_REF, TREE_TYPE (field),
		      root->frame_decl, field, NULL_TREE);
2400
	  arg1 = build_addr (x, context);
2401 2402

	  x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
2403 2404
	  stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
	  gimple_seq_add_stmt (&stmt_list, stmt);
2405 2406 2407 2408 2409 2410
	}
    }

  /* If we created initialization statements, insert them.  */
  if (stmt_list)
    {
2411 2412 2413 2414 2415
      gimple bind;
      annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
      bind = gimple_seq_first_stmt (gimple_body (context));
      gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
      gimple_bind_set_body (bind, stmt_list);
2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431
    }

  /* If a chain_decl was created, then it needs to be registered with
     struct function so that it gets initialized from the static chain
     register at the beginning of the function.  */
  sf = DECL_STRUCT_FUNCTION (root->context);
  sf->static_chain_decl = root->chain_decl;

  /* Similarly for the non-local goto save area.  */
  if (root->nl_goto_field)
    {
      sf->nonlocal_goto_save_area
	= get_frame_field (root, context, root->nl_goto_field, NULL);
      sf->has_nonlocal_label = 1;
    }

2432
  /* Make sure all new local variables get inserted into the
2433 2434
     proper BIND_EXPR.  */
  if (root->new_local_var_chain)
2435 2436
    declare_vars (root->new_local_var_chain,
		  gimple_seq_first_stmt (gimple_body (root->context)),
2437
		  false);
2438

Diego Novillo committed
2439
  if (root->debug_var_chain)
2440 2441
    {
      tree debug_var;
2442 2443 2444
      gimple scope;

      remap_vla_decls (DECL_INITIAL (root->context), root);
2445 2446

      for (debug_var = root->debug_var_chain; debug_var;
2447
	   debug_var = DECL_CHAIN (debug_var))
2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461
	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
	  break;

      /* If there are any debug decls with variable length types,
	 remap those types using other debug_var_chain variables.  */
      if (debug_var)
	{
	  struct nesting_copy_body_data id;

	  memset (&id, 0, sizeof (id));
	  id.cb.copy_decl = nesting_copy_decl;
	  id.cb.decl_map = pointer_map_create ();
	  id.root = root;

2462
	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490 2491 2492 2493 2494 2495 2496
	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
	      {
		tree type = TREE_TYPE (debug_var);
		tree newt, t = type;
		struct nesting_info *i;

		for (i = root; i; i = i->outer)
		  if (variably_modified_type_p (type, i->context))
		    break;

		if (i == NULL)
		  continue;

		id.cb.src_fn = i->context;
		id.cb.dst_fn = i->context;
		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);

		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
		  {
		    newt = TREE_TYPE (newt);
		    t = TREE_TYPE (t);
		  }
		if (TYPE_NAME (newt)
		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
		    && newt != t
		    && TYPE_NAME (newt) == TYPE_NAME (t))
		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
	      }

	  pointer_map_destroy (id.cb.decl_map);
	}

2497 2498 2499 2500 2501 2502 2503
      scope = gimple_seq_first_stmt (gimple_body (root->context));
      if (gimple_bind_block (scope))
	declare_vars (root->debug_var_chain, scope, true);
      else
	BLOCK_VARS (DECL_INITIAL (root->context))
	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
		     root->debug_var_chain);
2504
    }
2505

2506 2507 2508
  /* Fold the rewritten MEM_REF trees.  */
  pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);

2509
  /* Dump the translated tree function.  */
2510 2511 2512 2513 2514
  if (dump_file)
    {
      fputs ("\n\n", dump_file);
      dump_function_to_file (root->context, dump_file, dump_flags);
    }
Diego Novillo committed
2515 2516 2517 2518 2519
}

static void
finalize_nesting_tree (struct nesting_info *root)
{
2520 2521 2522
  struct nesting_info *n;
  FOR_EACH_NEST_INFO (n, root)
    finalize_nesting_tree_1 (n);
Diego Novillo committed
2523 2524 2525 2526 2527 2528 2529
}

/* Unnest the nodes and pass them to cgraph.  */

static void
unnest_nesting_tree_1 (struct nesting_info *root)
{
2530
  struct cgraph_node *node = cgraph_get_node (root->context);
2531 2532 2533 2534 2535

  /* For nested functions update the cgraph to reflect unnesting.
     We also delay finalizing of these functions up to this point.  */
  if (node->origin)
    {
2536
       cgraph_unnest_node (node);
2537 2538
       cgraph_finalize_function (root->context, true);
    }
2539 2540 2541
}

static void
Diego Novillo committed
2542
unnest_nesting_tree (struct nesting_info *root)
2543
{
2544 2545 2546
  struct nesting_info *n;
  FOR_EACH_NEST_INFO (n, root)
    unnest_nesting_tree_1 (n);
2547 2548 2549 2550 2551 2552 2553
}

/* Free the data structures allocated during this pass.  */

static void
free_nesting_tree (struct nesting_info *root)
{
2554 2555 2556
  struct nesting_info *node, *next;

  node = iter_nestinfo_start (root);
2557 2558
  do
    {
2559 2560 2561
      next = iter_nestinfo_next (node);
      pointer_map_destroy (node->var_map);
      pointer_map_destroy (node->field_map);
2562
      pointer_set_destroy (node->mem_refs);
2563 2564
      free (node);
      node = next;
2565
    }
2566
  while (node);
2567 2568
}

2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579
/* Gimplify a function and all its nested functions.  */
static void
gimplify_all_functions (struct cgraph_node *root)
{
  struct cgraph_node *iter;
  if (!gimple_body (root->decl))
    gimplify_function_tree (root->decl);
  for (iter = root->nested; iter; iter = iter->next_nested)
    gimplify_all_functions (iter);
}

2580 2581 2582 2583 2584 2585 2586
/* Main entry point for this pass.  Process FNDECL and all of its nested
   subroutines and turn them into something less tightly bound.  */

void
lower_nested_functions (tree fndecl)
{
  struct cgraph_node *cgn;
2587
  struct nesting_info *root;
2588 2589

  /* If there are no nested functions, there's nothing to do.  */
2590
  cgn = cgraph_get_node (fndecl);
2591 2592 2593
  if (!cgn->nested)
    return;

2594 2595
  gimplify_all_functions (cgn);

2596 2597 2598 2599 2600
  dump_file = dump_begin (TDI_nested, &dump_flags);
  if (dump_file)
    fprintf (dump_file, "\n;; Function %s\n\n",
	     lang_hooks.decl_printable_name (fndecl, 2));

2601
  bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2602
  root = create_nesting_tree (cgn);
2603

2604 2605 2606 2607 2608 2609 2610 2611
  walk_all_functions (convert_nonlocal_reference_stmt,
                      convert_nonlocal_reference_op,
		      root);
  walk_all_functions (convert_local_reference_stmt,
                      convert_local_reference_op,
		      root);
  walk_all_functions (convert_nl_goto_reference, NULL, root);
  walk_all_functions (convert_nl_goto_receiver, NULL, root);
2612

2613 2614
  convert_all_function_calls (root);
  finalize_nesting_tree (root);
Diego Novillo committed
2615
  unnest_nesting_tree (root);
2616

2617
  free_nesting_tree (root);
2618
  bitmap_obstack_release (&nesting_info_bitmap_obstack);
2619 2620 2621 2622 2623 2624

  if (dump_file)
    {
      dump_end (TDI_nested, dump_file);
      dump_file = NULL;
    }
2625 2626 2627
}

#include "gt-tree-nested.h"