cgraphclones.c 39.7 KB
Newer Older
1
/* Callgraph clones
2
   Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69
   Contributed by Jan Hubicka

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */

/* This module provide facilities for clonning functions. I.e. creating
   new functions based on existing functions with simple modifications,
   such as replacement of parameters.

   To allow whole program optimization without actual presence of function
   bodies, an additional infrastructure is provided for so-called virtual
   clones

   A virtual clone in the callgraph is a function that has no
   associated body, just a description of how to create its body based
   on a different function (which itself may be a virtual clone).

   The description of function modifications includes adjustments to
   the function's signature (which allows, for example, removing or
   adding function arguments), substitutions to perform on the
   function body, and, for inlined functions, a pointer to the
   function that it will be inlined into.

   It is also possible to redirect any edge of the callgraph from a
   function to its virtual clone.  This implies updating of the call
   site to adjust for the new function signature.

   Most of the transformations performed by inter-procedural
   optimizations can be represented via virtual clones.  For
   instance, a constant propagation pass can produce a virtual clone
   of the function which replaces one of its arguments by a
   constant.  The inliner can represent its decisions by producing a
   clone of a function whose body will be later integrated into
   a given function.

   Using virtual clones, the program can be easily updated
   during the Execute stage, solving most of pass interactions
   problems that would otherwise occur during Transform.

   Virtual clones are later materialized in the LTRANS stage and
   turned into real functions.  Passes executed after the virtual
   clone were introduced also perform their Transform stage
   on new functions, so for a pass there is no significant
   difference between operating on a real function or a virtual
   clone introduced before its Execute stage.

   Optimization passes then work on virtual clones introduced before
   their Execute stage as if they were real functions.  The
   only difference is that clones are not visible during the
   Generate Summary stage.  */

#include "config.h"
#include "system.h"
#include "coretypes.h"
70
#include "backend.h"
71 72
#include "target.h"
#include "rtl.h"
73 74
#include "tree.h"
#include "gimple.h"
75
#include "stringpool.h"
76 77
#include "cgraph.h"
#include "lto-streamer.h"
78
#include "tree-eh.h"
79
#include "tree-cfg.h"
80
#include "tree-inline.h"
81
#include "dumpfile.h"
82 83
#include "gimple-pretty-print.h"

Martin Liska committed
84 85 86 87
/* Create clone of edge in the node N represented by CALL_EXPR
   the callgraph.  */

cgraph_edge *
88
cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
89
		    profile_count num, profile_count den,
90
		    bool update_original)
91
{
Martin Liska committed
92
  cgraph_edge *new_edge;
93
  profile_count::adjust_for_ipa_scaling (&num, &den);
94
  profile_count prof_count = count.apply_scale (num, den);
95

Martin Liska committed
96
  if (indirect_unknown_callee)
97 98 99
    {
      tree decl;

100 101 102
      if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
	  /* When the call is speculative, we need to resolve it 
	     via cgraph_resolve_speculation and not here.  */
Martin Liska committed
103
	  && !speculative)
104
	{
Martin Liska committed
105
	  cgraph_node *callee = cgraph_node::get (decl);
106
	  gcc_checking_assert (callee);
107
	  new_edge = n->create_edge (callee, call_stmt, prof_count);
108 109 110
	}
      else
	{
Martin Liska committed
111
	  new_edge = n->create_indirect_edge (call_stmt,
Martin Liska committed
112
					      indirect_info->ecf_flags,
113
					      prof_count, false);
Martin Liska committed
114
	  *new_edge->indirect_info = *indirect_info;
115 116 117 118
	}
    }
  else
    {
119
      new_edge = n->create_edge (callee, call_stmt, prof_count);
Martin Liska committed
120
      if (indirect_info)
121 122
	{
	  new_edge->indirect_info
123
	    = ggc_cleared_alloc<cgraph_indirect_call_info> ();
Martin Liska committed
124
	  *new_edge->indirect_info = *indirect_info;
125 126 127
	}
    }

Martin Liska committed
128 129
  new_edge->inline_failed = inline_failed;
  new_edge->indirect_inlining_edge = indirect_inlining_edge;
130 131
  new_edge->lto_stmt_uid = stmt_uid;
  /* Clone flags that depend on call_stmt availability manually.  */
Martin Liska committed
132 133 134
  new_edge->can_throw_external = can_throw_external;
  new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p;
  new_edge->speculative = speculative;
135
  new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor;
136 137

  /* Update IPA profile.  Local profiles need no updating in original.  */
138 139 140
  if (update_original)
    count = count.combine_with_ipa_count (count.ipa () 
					  - new_edge->count.ipa ());
Martin Liska committed
141
  symtab->call_edge_duplication_hooks (this, new_edge);
142 143 144
  return new_edge;
}

145 146 147
/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
   return value if SKIP_RETURN is true.  */

148 149 150
tree
cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
				      bool skip_return)
151 152
{
  tree new_type = NULL;
153
  tree args, new_args = NULL;
154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214
  tree new_reversed;
  int i = 0;

  for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
       args = TREE_CHAIN (args), i++)
    if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
      new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);

  new_reversed = nreverse (new_args);
  if (args)
    {
      if (new_reversed)
        TREE_CHAIN (new_args) = void_list_node;
      else
	new_reversed = void_list_node;
    }

  /* Use copy_node to preserve as much as possible from original type
     (debug info, attribute lists etc.)
     Exception is METHOD_TYPEs must have THIS argument.
     When we are asked to remove it, we need to build new FUNCTION_TYPE
     instead.  */
  if (TREE_CODE (orig_type) != METHOD_TYPE
      || !args_to_skip
      || !bitmap_bit_p (args_to_skip, 0))
    {
      new_type = build_distinct_type_copy (orig_type);
      TYPE_ARG_TYPES (new_type) = new_reversed;
    }
  else
    {
      new_type
        = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
							 new_reversed));
      TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
    }

  if (skip_return)
    TREE_TYPE (new_type) = void_type_node;

  return new_type;
}

/* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
   return value if SKIP_RETURN is true.

   Arguments from DECL_ARGUMENTS list can't be removed now, since they are
   linked by TREE_CHAIN directly.  The caller is responsible for eliminating
   them when they are being duplicated (i.e. copy_arguments_for_versioning).  */

static tree
build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
			       bool skip_return)
{
  tree new_decl = copy_node (orig_decl);
  tree new_type;

  new_type = TREE_TYPE (orig_decl);
  if (prototype_p (new_type)
      || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
    new_type
215 216
      = cgraph_build_function_type_skip_args (new_type, args_to_skip,
					      skip_return);
217 218 219 220 221 222 223 224
  TREE_TYPE (new_decl) = new_type;

  /* For declarations setting DECL_VINDEX (i.e. methods)
     we expect first argument to be THIS pointer.   */
  if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
    DECL_VINDEX (new_decl) = NULL_TREE;

  /* When signature changes, we need to clear builtin info.  */
225
  if (fndecl_built_in_p (new_decl)
226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262
      && args_to_skip
      && !bitmap_empty_p (args_to_skip))
    {
      DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
      DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
    }
  /* The FE might have information and assumptions about the other
     arguments.  */
  DECL_LANG_SPECIFIC (new_decl) = NULL;
  return new_decl;
}

/* Set flags of NEW_NODE and its decl.  NEW_NODE is a newly created private
   clone or its thunk.  */

static void
set_new_clone_decl_and_node_flags (cgraph_node *new_node)
{
  DECL_EXTERNAL (new_node->decl) = 0;
  TREE_PUBLIC (new_node->decl) = 0;
  DECL_COMDAT (new_node->decl) = 0;
  DECL_WEAK (new_node->decl) = 0;
  DECL_VIRTUAL_P (new_node->decl) = 0;
  DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
  DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;

  new_node->externally_visible = 0;
  new_node->local.local = 1;
  new_node->lowered = true;
}

/* Duplicate thunk THUNK if necessary but make it to refer to NODE.
   ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
   Function can return NODE if no thunk is necessary, which can happen when
   thunk is this_adjusting but we are removing this parameter.  */

static cgraph_node *
263
duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
264 265
{
  cgraph_node *new_thunk, *thunk_of;
Martin Liska committed
266
  thunk_of = thunk->callees->callee->ultimate_alias_target ();
267 268

  if (thunk_of->thunk.thunk_p)
269
    node = duplicate_thunk_for_node (thunk_of, node);
270

271
  if (!DECL_ARGUMENTS (thunk->decl))
272
    thunk->get_untransformed_body ();
273

Martin Liska committed
274
  cgraph_edge *cs;
275 276 277
  for (cs = node->callers; cs; cs = cs->next_caller)
    if (cs->caller->thunk.thunk_p
	&& cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
278 279 280 281
	&& cs->caller->thunk.virtual_value == thunk->thunk.virtual_value
	&& cs->caller->thunk.indirect_offset == thunk->thunk.indirect_offset
	&& cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
	&& cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p)
282 283 284
      return cs->caller;

  tree new_decl;
285
  if (!node->clone.args_to_skip)
286 287 288 289 290 291
    new_decl = copy_node (thunk->decl);
  else
    {
      /* We do not need to duplicate this_adjusting thunks if we have removed
	 this.  */
      if (thunk->thunk.this_adjusting
292
	  && bitmap_bit_p (node->clone.args_to_skip, 0))
293 294
	return node;

295 296
      new_decl = build_function_decl_skip_args (thunk->decl,
						node->clone.args_to_skip,
297 298
						false);
    }
299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314

  tree *link = &DECL_ARGUMENTS (new_decl);
  int i = 0;
  for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++)
    {
      if (!node->clone.args_to_skip
	  || !bitmap_bit_p (node->clone.args_to_skip, i))
	{
	  tree nd = copy_node (pd);
	  DECL_CONTEXT (nd) = new_decl;
	  *link = nd;
	  link = &DECL_CHAIN (nd);
	}
    }
  *link = NULL_TREE;

315 316 317 318 319
  gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
  gcc_checking_assert (!DECL_INITIAL (new_decl));
  gcc_checking_assert (!DECL_RESULT (new_decl));
  gcc_checking_assert (!DECL_RTL_SET_P (new_decl));

320 321
  DECL_NAME (new_decl) = clone_function_name_numbered (thunk->decl,
						       "artificial_thunk");
322 323
  SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));

324 325 326 327
  /* We need to force DECL_IGNORED_P because the new thunk is created after
     early debug was run.  */
  DECL_IGNORED_P (new_decl) = 1;

Martin Liska committed
328
  new_thunk = cgraph_node::create (new_decl);
329 330
  set_new_clone_decl_and_node_flags (new_thunk);
  new_thunk->definition = true;
331
  new_thunk->local.can_change_signature = node->local.can_change_signature;
332 333 334
  new_thunk->thunk = thunk->thunk;
  new_thunk->unique_name = in_lto_p;
  new_thunk->former_clone_of = thunk->decl;
335 336
  new_thunk->clone.args_to_skip = node->clone.args_to_skip;
  new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
337

338
  cgraph_edge *e = new_thunk->create_edge (node, NULL, new_thunk->count);
Martin Liska committed
339 340
  symtab->call_edge_duplication_hooks (thunk->callees, e);
  symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
341 342 343 344 345
  return new_thunk;
}

/* If E does not lead to a thunk, simply redirect it to N.  Otherwise create
   one or more equivalent thunks for N and redirect E to the first in the
346 347
   chain.  Note that it is then necessary to call
   n->expand_all_artificial_thunks once all callers are redirected.  */
348 349

void
350
cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
351
{
352
  cgraph_node *orig_to = callee->ultimate_alias_target ();
353
  if (orig_to->thunk.thunk_p)
354
    n = duplicate_thunk_for_node (orig_to, n);
355

356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380
  redirect_callee (n);
}

/* Call expand_thunk on all callers that are thunks and if analyze those nodes
   that were expanded.  */

void
cgraph_node::expand_all_artificial_thunks ()
{
  cgraph_edge *e;
  for (e = callers; e;)
    if (e->caller->thunk.thunk_p)
      {
	cgraph_node *thunk = e->caller;

	e = e->next_caller;
	if (thunk->expand_thunk (false, false))
	  {
	    thunk->thunk.thunk_p = false;
	    thunk->analyze ();
	  }
	thunk->expand_all_artificial_thunks ();
      }
    else
      e = e->next_caller;
381
}
382

383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404
void
dump_callgraph_transformation (const cgraph_node *original,
			       const cgraph_node *clone,
			       const char *suffix)
{
  if (symtab->ipa_clones_dump_file)
    {
      fprintf (symtab->ipa_clones_dump_file,
	       "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n",
	       original->asm_name (), original->order,
	       DECL_SOURCE_FILE (original->decl),
	       DECL_SOURCE_LINE (original->decl),
	       DECL_SOURCE_COLUMN (original->decl), clone->asm_name (),
	       clone->order, DECL_SOURCE_FILE (clone->decl),
	       DECL_SOURCE_LINE (clone->decl), DECL_SOURCE_COLUMN (clone->decl),
	       suffix);

      symtab->cloned_nodes.add (original);
      symtab->cloned_nodes.add (clone);
    }
}

405 406 407 408 409 410 411 412 413
/* Create node representing clone of N executed COUNT times.  Decrease
   the execution counts from original node too.
   The new clone will have decl set to DECL that may or may not be the same
   as decl of N.

   When UPDATE_ORIGINAL is true, the counts are subtracted from the original
   function's profile to reflect the fact that part of execution is handled
   by node.  
   When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
414 415 416 417 418 419
   the new clone. Otherwise the caller is responsible for doing so later.

   If the new node is being inlined into another one, NEW_INLINED_TO should be
   the outline function the new one is (even indirectly) inlined to.  All hooks
   will see this in node's global.inlined_to, when invoked.  Can be NULL if the
   node is not inlined.  */
420

Martin Liska committed
421
cgraph_node *
422
cgraph_node::create_clone (tree new_decl, profile_count prof_count,
Martin Liska committed
423 424 425
			   bool update_original,
			   vec<cgraph_edge *> redirect_callers,
			   bool call_duplication_hook,
Martin Liska committed
426
			   cgraph_node *new_inlined_to,
427
			   bitmap args_to_skip, const char *suffix)
428
{
Martin Liska committed
429 430
  cgraph_node *new_node = symtab->create_empty ();
  cgraph_edge *e;
431
  unsigned i;
432
  profile_count old_count = count;
433

434 435 436
  if (new_inlined_to)
    dump_callgraph_transformation (this, new_inlined_to, "inlining to");

437 438 439 440
  /* When inlining we scale precisely to prof_count, when cloning we can
     preserve local profile.  */
  if (!new_inlined_to)
    prof_count = count.combine_with_ipa_count (prof_count);
441
  new_node->count = prof_count;
442 443

  /* Update IPA profile.  Local profiles need no updating in original.  */
444 445
  if (update_original)
    count = count.combine_with_ipa_count (count.ipa () - prof_count.ipa ());
446
  new_node->decl = new_decl;
Martin Liska committed
447 448 449
  new_node->register_symbol ();
  new_node->origin = origin;
  new_node->lto_file_data = lto_file_data;
450 451 452 453 454
  if (new_node->origin)
    {
      new_node->next_nested = new_node->origin->nested;
      new_node->origin->nested = new_node;
    }
Martin Liska committed
455 456 457
  new_node->analyzed = analyzed;
  new_node->definition = definition;
  new_node->local = local;
458
  new_node->externally_visible = false;
459
  new_node->no_reorder = no_reorder;
460
  new_node->local.local = true;
Martin Liska committed
461
  new_node->global = global;
462
  new_node->global.inlined_to = new_inlined_to;
Martin Liska committed
463 464 465
  new_node->rtl = rtl;
  new_node->frequency = frequency;
  new_node->tp_first_run = tp_first_run;
466
  new_node->tm_clone = tm_clone;
467
  new_node->icf_merged = icf_merged;
468
  new_node->merged_comdat = merged_comdat;
469
  new_node->thunk = thunk;
470 471 472

  new_node->clone.tree_map = NULL;
  new_node->clone.args_to_skip = args_to_skip;
473
  new_node->split_part = split_part;
474
  if (!args_to_skip)
Martin Liska committed
475 476
    new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
  else if (clone.combined_args_to_skip)
477 478 479
    {
      new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
      bitmap_ior (new_node->clone.combined_args_to_skip,
Martin Liska committed
480
		  clone.combined_args_to_skip, args_to_skip);
481 482 483 484
    }
  else
    new_node->clone.combined_args_to_skip = args_to_skip;

485
  FOR_EACH_VEC_ELT (redirect_callers, i, e)
486 487
    {
      /* Redirect calls to the old version node to point to its new
488 489 490
	 version.  The only exception is when the edge was proved to
	 be unreachable during the clonning procedure.  */
      if (!e->callee
491
	  || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
492
        e->redirect_callee_duplicating_thunks (new_node);
493
    }
494
  new_node->expand_all_artificial_thunks ();
495

Martin Liska committed
496
  for (e = callees;e; e=e->next_callee)
497 498
    e->clone (new_node, e->call_stmt, e->lto_stmt_uid, new_node->count, old_count,
	      update_original);
499

Martin Liska committed
500
  for (e = indirect_calls; e; e = e->next_callee)
Martin Liska committed
501
    e->clone (new_node, e->call_stmt, e->lto_stmt_uid,
502
	      new_node->count, old_count, update_original);
Martin Liska committed
503
  new_node->clone_references (this);
504

Martin Liska committed
505 506 507 508 509
  new_node->next_sibling_clone = clones;
  if (clones)
    clones->prev_sibling_clone = new_node;
  clones = new_node;
  new_node->clone_of = this;
510 511

  if (call_duplication_hook)
Martin Liska committed
512
    symtab->call_cgraph_duplication_hooks (this, new_node);
513 514 515 516

  if (!new_inlined_to)
    dump_callgraph_transformation (this, new_node, suffix);

517 518 519
  return new_node;
}

520
static GTY(()) hash_map<const char *, unsigned> *clone_fn_ids;
521

522 523 524 525 526 527
/* Return a new assembler name for a clone of decl named NAME.  Apart
   from the string SUFFIX, the new name will end with a unique (for
   each NAME) unspecified number.  If clone numbering is not needed
   then the two argument clone_function_name should be used instead.
   Should not be called directly except for by
   lto-partition.c:privatize_symbol_name_1.  */
528

529
tree
530 531
clone_function_name_numbered (const char *name, const char *suffix)
{
532 533 534 535 536 537 538
  /* Initialize the function->counter mapping the first time it's
     needed.  */
  if (!clone_fn_ids)
    clone_fn_ids = hash_map<const char *, unsigned int>::create_ggc (64);
  unsigned int &suffix_counter = clone_fn_ids->get_or_insert (
				   IDENTIFIER_POINTER (get_identifier (name)));
  return clone_function_name (name, suffix, suffix_counter++);
539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562
}

/* Return a new assembler name for a clone of DECL.  Apart from string
   SUFFIX, the new name will end with a unique (for each DECL
   assembler name) unspecified number.  If clone numbering is not
   needed then the two argument clone_function_name should be used
   instead.  */

tree
clone_function_name_numbered (tree decl, const char *suffix)
{
  tree name = DECL_ASSEMBLER_NAME (decl);
  return clone_function_name_numbered (IDENTIFIER_POINTER (name),
				       suffix);
}

/* Return a new assembler name for a clone of decl named NAME.  Apart
   from the string SUFFIX, the new name will end with the specified
   NUMBER.  If clone numbering is not needed then the two argument
   clone_function_name should be used instead.  */

tree
clone_function_name (const char *name, const char *suffix,
		     unsigned long number)
563
{
564
  size_t len = strlen (name);
565 566 567
  char *tmp_name, *prefix;

  prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
568
  memcpy (prefix, name, len);
569
  strcpy (prefix + len + 1, suffix);
570
  prefix[len] = symbol_table::symbol_suffix_separator ();
571
  ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, number);
572 573 574
  return get_identifier (tmp_name);
}

575 576 577 578 579 580 581 582 583 584 585 586 587
/* Return a new assembler name for a clone of DECL.  Apart from the
   string SUFFIX, the new name will end with the specified NUMBER.  If
   clone numbering is not needed then the two argument
   clone_function_name should be used instead.  */

tree
clone_function_name (tree decl, const char *suffix,
		     unsigned long number)
{
  return clone_function_name (
	   IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), suffix, number);
}

588 589
/* Return a new assembler name ending with the string SUFFIX for a
   clone of DECL.  */
590 591 592 593

tree
clone_function_name (tree decl, const char *suffix)
{
594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611
  tree identifier = DECL_ASSEMBLER_NAME (decl);
  /* For consistency this needs to behave the same way as
     ASM_FORMAT_PRIVATE_NAME does, but without the final number
     suffix.  */
  char *separator = XALLOCAVEC (char, 2);
  separator[0] = symbol_table::symbol_suffix_separator ();
  separator[1] = 0;
#if defined (NO_DOT_IN_LABEL) && defined (NO_DOLLAR_IN_LABEL)
  const char *prefix = "__";
#else
  const char *prefix = "";
#endif
  char *result = ACONCAT ((prefix,
			   IDENTIFIER_POINTER (identifier),
			   separator,
			   suffix,
			   (char*)0));
  return get_identifier (result);
612 613 614
}


615 616 617
/* Create callgraph node clone with new declaration.  The actual body will be
   copied later at compilation stage.  The name of the new clone will be
   constructed from the name of the original node, SUFFIX and NUM_SUFFIX.
618 619 620 621

   TODO: after merging in ipa-sra use function call notes instead of args_to_skip
   bitmap interface.
   */
Martin Liska committed
622
cgraph_node *
Martin Liska committed
623 624
cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
				   vec<ipa_replace_map *, va_gc> *tree_map,
625 626
				   bitmap args_to_skip, const char * suffix,
				   unsigned num_suffix)
627
{
Martin Liska committed
628
  tree old_decl = decl;
Martin Liska committed
629
  cgraph_node *new_node = NULL;
630
  tree new_decl;
631
  size_t len, i;
Martin Liska committed
632
  ipa_replace_map *map;
633
  char *name;
634

635
  gcc_checking_assert (local.versionable);
Martin Liska committed
636
  gcc_assert (local.can_change_signature || !args_to_skip);
637 638 639 640 641 642

  /* Make a new FUNCTION_DECL tree node */
  if (!args_to_skip)
    new_decl = copy_node (old_decl);
  else
    new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
643 644 645 646

  /* These pointers represent function body and will be populated only when clone
     is materialized.  */
  gcc_assert (new_decl != old_decl);
647
  DECL_STRUCT_FUNCTION (new_decl) = NULL;
648 649 650
  DECL_ARGUMENTS (new_decl) = NULL;
  DECL_INITIAL (new_decl) = NULL;
  DECL_RESULT (new_decl) = NULL; 
651
  /* We cannot do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
652
     sometimes storing only clone decl instead of original.  */
653 654

  /* Generate a new name for the new version. */
655 656 657 658 659 660
  len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
  name = XALLOCAVEC (char, len + strlen (suffix) + 2);
  memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
  strcpy (name + len + 1, suffix);
  name[len] = '.';
  DECL_NAME (new_decl) = get_identifier (name);
661 662
  SET_DECL_ASSEMBLER_NAME (new_decl,
			   clone_function_name (old_decl, suffix, num_suffix));
663 664
  SET_DECL_RTL (new_decl, NULL);

665
  new_node = create_clone (new_decl, count, false,
666
			   redirect_callers, false, NULL, args_to_skip, suffix);
Martin Liska committed
667

668 669 670 671 672
  /* Update the properties.
     Make clone visible only within this translation unit.  Make sure
     that is not weak also.
     ??? We cannot use COMDAT linkage because there is no
     ABI support for this.  */
673
  set_new_clone_decl_and_node_flags (new_node);
674
  new_node->clone.tree_map = tree_map;
675
  if (!implicit_section)
676
    new_node->set_section (get_section ());
677 678 679 680 681 682 683

  /* Clones of global symbols or symbols with unique names are unique.  */
  if ((TREE_PUBLIC (old_decl)
       && !DECL_EXTERNAL (old_decl)
       && !DECL_WEAK (old_decl)
       && !DECL_COMDAT (old_decl))
      || in_lto_p)
684
    new_node->unique_name = true;
685
  FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
686
    new_node->maybe_create_reference (map->new_tree, NULL);
687

Martin Liska committed
688
  if (ipa_transforms_to_apply.exists ())
689
    new_node->ipa_transforms_to_apply
Martin Liska committed
690
      = ipa_transforms_to_apply.copy ();
691

Martin Liska committed
692
  symtab->call_cgraph_duplication_hooks (this, new_node);
693 694 695 696

  return new_node;
}

Martin Liska committed
697 698 699 700
/* callgraph node being removed from symbol table; see if its entry can be
   replaced by other inline clone.  */
cgraph_node *
cgraph_node::find_replacement (void)
701
{
Martin Liska committed
702
  cgraph_node *next_inline_clone, *replacement;
703

Martin Liska committed
704
  for (next_inline_clone = clones;
705
       next_inline_clone
Martin Liska committed
706
       && next_inline_clone->decl != decl;
707 708 709 710 711 712 713 714
       next_inline_clone = next_inline_clone->next_sibling_clone)
    ;

  /* If there is inline clone of the node being removed, we need
     to put it into the position of removed node and reorganize all
     other clones to be based on it.  */
  if (next_inline_clone)
    {
Martin Liska committed
715 716
      cgraph_node *n;
      cgraph_node *new_clones;
717 718 719 720 721 722 723 724 725

      replacement = next_inline_clone;

      /* Unlink inline clone from the list of clones of removed node.  */
      if (next_inline_clone->next_sibling_clone)
	next_inline_clone->next_sibling_clone->prev_sibling_clone
	  = next_inline_clone->prev_sibling_clone;
      if (next_inline_clone->prev_sibling_clone)
	{
Martin Liska committed
726
	  gcc_assert (clones != next_inline_clone);
727 728 729 730 731
	  next_inline_clone->prev_sibling_clone->next_sibling_clone
	    = next_inline_clone->next_sibling_clone;
	}
      else
	{
Martin Liska committed
732 733
	  gcc_assert (clones == next_inline_clone);
	  clones = next_inline_clone->next_sibling_clone;
734 735
	}

Martin Liska committed
736 737
      new_clones = clones;
      clones = NULL;
738 739

      /* Copy clone info.  */
Martin Liska committed
740
      next_inline_clone->clone = clone;
741 742

      /* Now place it into clone tree at same level at NODE.  */
Martin Liska committed
743
      next_inline_clone->clone_of = clone_of;
744 745
      next_inline_clone->prev_sibling_clone = NULL;
      next_inline_clone->next_sibling_clone = NULL;
Martin Liska committed
746
      if (clone_of)
747
	{
Martin Liska committed
748 749 750 751
	  if (clone_of->clones)
	    clone_of->clones->prev_sibling_clone = next_inline_clone;
	  next_inline_clone->next_sibling_clone = clone_of->clones;
	  clone_of->clones = next_inline_clone;
752 753 754 755 756 757 758 759 760 761 762
	}

      /* Merge the clone list.  */
      if (new_clones)
	{
	  if (!next_inline_clone->clones)
	    next_inline_clone->clones = new_clones;
	  else
	    {
	      n = next_inline_clone->clones;
	      while (n->next_sibling_clone)
Martin Liska committed
763
		n = n->next_sibling_clone;
764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782
	      n->next_sibling_clone = new_clones;
	      new_clones->prev_sibling_clone = n;
	    }
	}

      /* Update clone_of pointers.  */
      n = new_clones;
      while (n)
	{
	  n->clone_of = next_inline_clone;
	  n = n->next_sibling_clone;
	}
      return replacement;
    }
  else
    return NULL;
}

/* Like cgraph_set_call_stmt but walk the clone tree and update all
783 784 785 786
   clones sharing the same function body.  
   When WHOLE_SPECULATIVE_EDGES is true, all three components of
   speculative edge gets updated.  Otherwise we update only direct
   call.  */
787 788

void
789
cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
790
					     gcall *new_stmt,
Martin Liska committed
791
					     bool update_speculative)
792
{
Martin Liska committed
793 794
  cgraph_node *node;
  cgraph_edge *edge = get_edge (old_stmt);
795 796

  if (edge)
Martin Liska committed
797
    edge->set_call_stmt (new_stmt, update_speculative);
798

Martin Liska committed
799
  node = clones;
800
  if (node)
Martin Liska committed
801
    while (node != this)
802
      {
Martin Liska committed
803
	cgraph_edge *edge = node->get_edge (old_stmt);
804
	if (edge)
805
	  {
Martin Liska committed
806
	    edge->set_call_stmt (new_stmt, update_speculative);
807 808 809 810 811
	    /* If UPDATE_SPECULATIVE is false, it means that we are turning
	       speculative call into a real code sequence.  Update the
	       callgraph edges.  */
	    if (edge->speculative && !update_speculative)
	      {
Martin Liska committed
812 813
		cgraph_edge *direct, *indirect;
		ipa_ref *ref;
814 815

		gcc_assert (!edge->indirect_unknown_callee);
Martin Liska committed
816
		edge->speculative_call_info (direct, indirect, ref);
817 818 819 820 821
		direct->speculative = false;
		indirect->speculative = false;
		ref->speculative = false;
	      }
	  }
822 823 824 825 826 827
	if (node->clones)
	  node = node->clones;
	else if (node->next_sibling_clone)
	  node = node->next_sibling_clone;
	else
	  {
Martin Liska committed
828
	    while (node != this && !node->next_sibling_clone)
829
	      node = node->clone_of;
Martin Liska committed
830
	    if (node != this)
831 832 833 834 835 836 837 838 839 840 841 842 843
	      node = node->next_sibling_clone;
	  }
      }
}

/* Like cgraph_create_edge walk the clone tree and update all clones sharing
   same function body.  If clones already have edge for OLD_STMT; only
   update the edge same way as cgraph_set_call_stmt_including_clones does.

   TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
   frequencies of the clones.  */

void
Martin Liska committed
844
cgraph_node::create_edge_including_clones (cgraph_node *callee,
845
					   gimple *old_stmt, gcall *stmt,
846
					   profile_count count,
Martin Liska committed
847
					   cgraph_inline_failed_t reason)
848
{
Martin Liska committed
849 850
  cgraph_node *node;
  cgraph_edge *edge;
851

Martin Liska committed
852
  if (!get_edge (stmt))
853
    {
854
      edge = create_edge (callee, stmt, count);
855 856 857
      edge->inline_failed = reason;
    }

Martin Liska committed
858
  node = clones;
859
  if (node)
Martin Liska committed
860
    while (node != this)
861 862 863 864 865 866 867 868 869 870 871 872 873
      /* Thunk clones do not get updated while copying inline function body.  */
      if (!node->thunk.thunk_p)
	{
	  cgraph_edge *edge = node->get_edge (old_stmt);

	  /* It is possible that clones already contain the edge while
	     master didn't.  Either we promoted indirect call into direct
	     call in the clone or we are processing clones of unreachable
	     master where edges has been removed.  */
	  if (edge)
	    edge->set_call_stmt (stmt);
	  else if (! node->get_edge (stmt))
	    {
874
	      edge = node->create_edge (callee, stmt, count);
875 876
	      edge->inline_failed = reason;
	    }
877

878 879 880 881 882 883 884 885 886 887 888 889
	  if (node->clones)
	    node = node->clones;
	  else if (node->next_sibling_clone)
	    node = node->next_sibling_clone;
	  else
	    {
	      while (node != this && !node->next_sibling_clone)
		node = node->clone_of;
	      if (node != this)
		node = node->next_sibling_clone;
	    }
	}
890 891 892 893 894 895 896 897
}

/* Remove the node from cgraph and all inline clones inlined into it.
   Skip however removal of FORBIDDEN_NODE and return true if it needs to be
   removed.  This allows to call the function from outer loop walking clone
   tree.  */

bool
Martin Liska committed
898
cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
899
{
Martin Liska committed
900
  cgraph_edge *e, *next;
901 902
  bool found = false;

Martin Liska committed
903
  if (this == forbidden_node)
904
    {
Martin Liska committed
905
      callers->remove ();
906 907
      return true;
    }
Martin Liska committed
908
  for (e = callees; e; e = next)
909 910 911
    {
      next = e->next_callee;
      if (!e->inline_failed)
Martin Liska committed
912
	found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
913
    }
Martin Liska committed
914
  remove ();
915 916 917 918 919 920 921 922
  return found;
}

/* The edges representing the callers of the NEW_VERSION node were
   fixed by cgraph_function_versioning (), now the call_expr in their
   respective tree code should be updated to call the NEW_VERSION.  */

static void
Martin Liska committed
923
update_call_expr (cgraph_node *new_version)
924
{
Martin Liska committed
925
  cgraph_edge *e;
926 927 928 929 930 931

  gcc_assert (new_version);

  /* Update the call expr on the edges to call the new version.  */
  for (e = new_version->callers; e; e = e->next_caller)
    {
Martin Liska committed
932
      function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
933
      gimple_call_set_fndecl (e->call_stmt, new_version->decl);
934 935 936 937 938 939
      maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
    }
}


/* Create a new cgraph node which is the new version of
Martin Liska committed
940
   callgraph node.  REDIRECT_CALLERS holds the callers
941
   edges which should be redirected to point to
Martin Liska committed
942
   NEW_VERSION.  ALL the callees edges of the node
943 944 945 946 947 948 949
   are cloned to the new version node.  Return the new
   version node. 

   If non-NULL BLOCK_TO_COPY determine what basic blocks 
   was copied to prevent duplications of calls that are dead
   in the clone.  */

Martin Liska committed
950 951 952
cgraph_node *
cgraph_node::create_version_clone (tree new_decl,
				  vec<cgraph_edge *> redirect_callers,
953 954
				  bitmap bbs_to_copy,
				  const char *suffix)
955
 {
Martin Liska committed
956 957
   cgraph_node *new_version;
   cgraph_edge *e;
958 959
   unsigned i;

Martin Liska committed
960
   new_version = cgraph_node::create (new_decl);
961

Martin Liska committed
962 963 964
   new_version->analyzed = analyzed;
   new_version->definition = definition;
   new_version->local = local;
965
   new_version->externally_visible = false;
966
   new_version->no_reorder = no_reorder;
967
   new_version->local.local = new_version->definition;
Martin Liska committed
968 969 970
   new_version->global = global;
   new_version->rtl = rtl;
   new_version->count = count;
971

Martin Liska committed
972
   for (e = callees; e; e=e->next_callee)
973 974
     if (!bbs_to_copy
	 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
Martin Liska committed
975
       e->clone (new_version, e->call_stmt,
976
		 e->lto_stmt_uid, count, count,
Martin Liska committed
977
		 true);
Martin Liska committed
978
   for (e = indirect_calls; e; e=e->next_callee)
979 980
     if (!bbs_to_copy
	 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
Martin Liska committed
981
       e->clone (new_version, e->call_stmt,
982
		 e->lto_stmt_uid, count, count,
Martin Liska committed
983
		 true);
984
   FOR_EACH_VEC_ELT (redirect_callers, i, e)
985 986 987
     {
       /* Redirect calls to the old version node to point to its new
	  version.  */
Martin Liska committed
988
       e->redirect_callee (new_version);
989 990
     }

Martin Liska committed
991
   symtab->call_cgraph_duplication_hooks (this, new_version);
992

993 994
   dump_callgraph_transformation (this, new_version, suffix);

995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014
   return new_version;
 }

/* Perform function versioning.
   Function versioning includes copying of the tree and
   a callgraph update (creating a new cgraph node and updating
   its callees and callers).

   REDIRECT_CALLERS varray includes the edges to be redirected
   to the new version.

   TREE_MAP is a mapping of tree nodes we want to replace with
   new ones (according to results of prior analysis).

   If non-NULL ARGS_TO_SKIP determine function parameters to remove
   from new version.
   If SKIP_RETURN is true, the new version will return void.
   If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
   If non_NULL NEW_ENTRY determine new entry BB of the clone.

1015 1016 1017 1018 1019
   If TARGET_ATTRIBUTES is non-null, when creating a new declaration,
   add the attributes to DECL_ATTRIBUTES.  And call valid_attribute_p
   that will promote value of the attribute DECL_FUNCTION_SPECIFIC_TARGET
   of the declaration.

1020 1021
   Return the new version's cgraph node.  */

Martin Liska committed
1022 1023 1024 1025 1026
cgraph_node *
cgraph_node::create_version_clone_with_body
  (vec<cgraph_edge *> redirect_callers,
   vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
   bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
1027
   const char *suffix, tree target_attributes)
1028
{
Martin Liska committed
1029
  tree old_decl = decl;
Martin Liska committed
1030
  cgraph_node *new_version_node = NULL;
1031 1032 1033 1034 1035
  tree new_decl;

  if (!tree_versionable_function_p (old_decl))
    return NULL;

Martin Liska committed
1036
  gcc_assert (local.can_change_signature || !args_to_skip);
1037 1038 1039 1040 1041 1042 1043 1044 1045

  /* Make a new FUNCTION_DECL tree node for the new version. */
  if (!args_to_skip && !skip_return)
    new_decl = copy_node (old_decl);
  else
    new_decl
      = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);

  /* Generate a new name for the new version. */
1046
  DECL_NAME (new_decl) = clone_function_name_numbered (old_decl, suffix);
1047 1048 1049
  SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
  SET_DECL_RTL (new_decl, NULL);

1050 1051
  DECL_VIRTUAL_P (new_decl) = 0;

1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064
  if (target_attributes)
    {
      DECL_ATTRIBUTES (new_decl) = target_attributes;

      location_t saved_loc = input_location;
      tree v = TREE_VALUE (target_attributes);
      input_location = DECL_SOURCE_LOCATION (new_decl);
      bool r = targetm.target_option.valid_attribute_p (new_decl, NULL, v, 0);
      input_location = saved_loc;
      if (!r)
	return NULL;
    }

1065
  /* When the old decl was a con-/destructor make sure the clone isn't.  */
1066 1067
  DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
  DECL_STATIC_DESTRUCTOR (new_decl) = 0;
1068 1069 1070

  /* Create the new version's call-graph node.
     and update the edges of the new node. */
Martin Liska committed
1071
  new_version_node = create_version_clone (new_decl, redirect_callers,
1072
					  bbs_to_copy, suffix);
1073

Martin Liska committed
1074
  if (ipa_transforms_to_apply.exists ())
1075
    new_version_node->ipa_transforms_to_apply
Martin Liska committed
1076
      = ipa_transforms_to_apply.copy ();
1077 1078 1079 1080 1081 1082 1083 1084 1085
  /* Copy the OLD_VERSION_NODE function tree to the new version.  */
  tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
			    skip_return, bbs_to_copy, new_entry_block);

  /* Update the new version's properties.
     Make The new version visible only within this translation unit.  Make sure
     that is not weak also.
     ??? We cannot use COMDAT linkage because there is no
     ABI support for this.  */
Martin Liska committed
1086
  new_version_node->make_decl_local ();
1087 1088
  DECL_VIRTUAL_P (new_version_node->decl) = 0;
  new_version_node->externally_visible = 0;
1089 1090
  new_version_node->local.local = 1;
  new_version_node->lowered = true;
1091 1092
  if (!implicit_section)
    new_version_node->set_section (get_section ());
1093 1094 1095 1096 1097 1098
  /* Clones of global symbols or symbols with unique names are unique.  */
  if ((TREE_PUBLIC (old_decl)
       && !DECL_EXTERNAL (old_decl)
       && !DECL_WEAK (old_decl)
       && !DECL_COMDAT (old_decl))
      || in_lto_p)
1099
    new_version_node->unique_name = true;
1100 1101 1102 1103

  /* Update the call_expr on the edges to call the new version node. */
  update_call_expr (new_version_node);

1104
  symtab->call_cgraph_insertion_hooks (new_version_node);
1105 1106 1107 1108 1109 1110
  return new_version_node;
}

/* Given virtual clone, turn it into actual clone.  */

static void
Martin Liska committed
1111
cgraph_materialize_clone (cgraph_node *node)
1112 1113
{
  bitmap_obstack_initialize (NULL);
1114
  node->former_clone_of = node->clone_of->decl;
1115 1116 1117
  if (node->clone_of->former_clone_of)
    node->former_clone_of = node->clone_of->former_clone_of;
  /* Copy the OLD_VERSION_NODE function tree to the new version.  */
1118
  tree_function_versioning (node->clone_of->decl, node->decl,
1119 1120 1121
  			    node->clone.tree_map, true,
			    node->clone.args_to_skip, false,
			    NULL, NULL);
Martin Liska committed
1122
  if (symtab->dump_file)
1123
    {
Martin Liska committed
1124 1125 1126
      dump_function_to_file (node->clone_of->decl, symtab->dump_file,
			     dump_flags);
      dump_function_to_file (node->decl, symtab->dump_file, dump_flags);
1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137
    }

  /* Function is no longer clone.  */
  if (node->next_sibling_clone)
    node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
  if (node->prev_sibling_clone)
    node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
  else
    node->clone_of->clones = node->next_sibling_clone;
  node->next_sibling_clone = NULL;
  node->prev_sibling_clone = NULL;
1138
  if (!node->clone_of->analyzed && !node->clone_of->clones)
1139
    {
Martin Liska committed
1140 1141
      node->clone_of->release_body ();
      node->clone_of->remove_callees ();
Martin Liska committed
1142
      node->clone_of->remove_all_references ();
1143 1144 1145 1146 1147 1148 1149 1150
    }
  node->clone_of = NULL;
  bitmap_obstack_release (NULL);
}

/* Once all functions from compilation unit are in memory, produce all clones
   and update all calls.  We might also do this on demand if we don't want to
   bring all functions to memory prior compilation, but current WHOPR
1151
   implementation does that and it is a bit easier to keep everything right in
1152 1153 1154
   this order.  */

void
Martin Liska committed
1155
symbol_table::materialize_all_clones (void)
1156
{
Martin Liska committed
1157
  cgraph_node *node;
1158
  bool stabilized = false;
1159
  
1160

Martin Liska committed
1161 1162
  if (symtab->dump_file)
    fprintf (symtab->dump_file, "Materializing clones\n");
1163 1164

  cgraph_node::checking_verify_cgraph_nodes ();
1165 1166 1167 1168 1169 1170 1171 1172 1173

  /* We can also do topological order, but number of iterations should be
     bounded by number of IPA passes since single IPA pass is probably not
     going to create clones of clones it created itself.  */
  while (!stabilized)
    {
      stabilized = true;
      FOR_EACH_FUNCTION (node)
        {
1174 1175
	  if (node->clone_of && node->decl != node->clone_of->decl
	      && !gimple_has_body_p (node->decl))
1176
	    {
1177
	      if (!node->clone_of->clone_of)
1178
		node->clone_of->get_untransformed_body ();
1179
	      if (gimple_has_body_p (node->clone_of->decl))
1180
	        {
Martin Liska committed
1181
		  if (symtab->dump_file)
1182
		    {
Martin Liska committed
1183
		      fprintf (symtab->dump_file, "cloning %s to %s\n",
1184 1185
			       xstrdup_for_dump (node->clone_of->name ()),
			       xstrdup_for_dump (node->name ()));
1186 1187 1188
		      if (node->clone.tree_map)
		        {
			  unsigned int i;
Martin Liska committed
1189
			  fprintf (symtab->dump_file, "   replace map: ");
1190 1191 1192
			  for (i = 0;
			       i < vec_safe_length (node->clone.tree_map);
			       i++)
1193
			    {
Martin Liska committed
1194
			      ipa_replace_map *replace_info;
1195
			      replace_info = (*node->clone.tree_map)[i];
1196 1197
			      print_generic_expr (symtab->dump_file,
						  replace_info->old_tree);
Martin Liska committed
1198
			      fprintf (symtab->dump_file, " -> ");
1199 1200
			      print_generic_expr (symtab->dump_file,
						  replace_info->new_tree);
Martin Liska committed
1201
			      fprintf (symtab->dump_file, "%s%s;",
1202 1203 1204
			      	       replace_info->replace_p ? "(replace)":"",
				       replace_info->ref_p ? "(ref)":"");
			    }
Martin Liska committed
1205
			  fprintf (symtab->dump_file, "\n");
1206 1207 1208
			}
		      if (node->clone.args_to_skip)
			{
Martin Liska committed
1209 1210 1211
			  fprintf (symtab->dump_file, "   args_to_skip: ");
			  dump_bitmap (symtab->dump_file,
				       node->clone.args_to_skip);
1212 1213 1214
			}
		      if (node->clone.args_to_skip)
			{
Martin Liska committed
1215 1216
			  fprintf (symtab->dump_file, "   combined_args_to_skip:");
			  dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip);
1217 1218 1219 1220 1221 1222 1223 1224 1225
			}
		    }
		  cgraph_materialize_clone (node);
		  stabilized = false;
	        }
	    }
	}
    }
  FOR_EACH_FUNCTION (node)
1226
    if (!node->analyzed && node->callees)
1227
      {
Martin Liska committed
1228
	node->remove_callees ();
Martin Liska committed
1229
	node->remove_all_references ();
1230 1231
      }
    else
Martin Liska committed
1232
      node->clear_stmts_in_references ();
Martin Liska committed
1233 1234
  if (symtab->dump_file)
    fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
1235 1236 1237

  cgraph_node::checking_verify_cgraph_nodes ();

1238
  symtab->remove_unreachable_nodes (symtab->dump_file);
1239 1240 1241
}

#include "gt-cgraphclones.h"