ipa.c 41.8 KB
Newer Older
1
/* Basic IPA optimizations and utilities.
2
   Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 4 5 6 7

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9 10 11 12 13 14 15 16
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
19 20 21 22

#include "config.h"
#include "system.h"
#include "coretypes.h"
23
#include "backend.h"
24
#include "target.h"
25 26
#include "tree.h"
#include "gimple.h"
27 28 29 30
#include "alloc-pool.h"
#include "tree-pass.h"
#include "stringpool.h"
#include "cgraph.h"
31
#include "gimplify.h"
32
#include "tree-iterator.h"
33
#include "ipa-utils.h"
34
#include "symbol-summary.h"
Kugan Vivekanandarajah committed
35
#include "tree-vrp.h"
Andrew MacLeod committed
36
#include "ipa-prop.h"
37
#include "ipa-fnsummary.h"
38
#include "dbgcnt.h"
39
#include "debug.h"
40 41
#include "stringpool.h"
#include "attribs.h"
42 43 44 45 46

/* Return true when NODE has ADDR reference.  */

static bool
has_addr_references_p (struct cgraph_node *node,
47
		       void *)
48 49
{
  int i;
Martin Liska committed
50
  struct ipa_ref *ref = NULL;
51

Martin Liska committed
52
  for (i = 0; node->iterate_referring (i, ref); i++)
53 54 55 56 57
    if (ref->use == IPA_REF_ADDR)
      return true;
  return false;
}

58 59 60 61 62 63 64 65
/* Return true when NODE can be target of an indirect call.  */

static bool
is_indirect_call_target_p (struct cgraph_node *node, void *)
{
  return node->indirect_call_target;
}

66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
/* Look for all functions inlined to NODE and update their inlined_to pointers
   to INLINED_TO.  */

static void
update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
{
  struct cgraph_edge *e;
  for (e = node->callees; e; e = e->next_callee)
    if (e->callee->global.inlined_to)
      {
        e->callee->global.inlined_to = inlined_to;
	update_inlined_to_pointer (e->callee, inlined_to);
      }
}

81
/* Add symtab NODE to queue starting at FIRST.
82 83 84 85 86 87

   The queue is linked via AUX pointers and terminated by pointer to 1.
   We enqueue nodes at two occasions: when we find them reachable or when we find
   their bodies needed for further clonning.  In the second case we mark them
   by pointer to 2 after processing so they are re-queue when they become
   reachable.  */
88 89

static void
90
enqueue_node (symtab_node *node, symtab_node **first,
91
	      hash_set<symtab_node *> *reachable)
92
{
93
  /* Node is still in queue; do nothing.  */
94
  if (node->aux && node->aux != (void *) 2)
95 96 97
    return;
  /* Node was already processed as unreachable, re-enqueue
     only if it became reachable now.  */
98
  if (node->aux == (void *)2 && !reachable->contains (node))
99
    return;
100
  node->aux = *first;
101 102 103 104 105 106
  *first = node;
}

/* Process references.  */

static void
Martin Liska committed
107
process_references (symtab_node *snode,
108
		    symtab_node **first,
109
		    bool before_inlining_p,
110
		    hash_set<symtab_node *> *reachable)
111 112
{
  int i;
Martin Liska committed
113 114
  struct ipa_ref *ref = NULL;
  for (i = 0; snode->iterate_reference (i, ref); i++)
115
    {
116
      symtab_node *node = ref->referred;
117
      symtab_node *body = node->ultimate_alias_target ();
118

119 120
      if (node->definition && !node->in_other_partition
	  && ((!DECL_EXTERNAL (node->decl) || node->alias)
121
	      || (((before_inlining_p
122
		    && (TREE_CODE (node->decl) != FUNCTION_DECL
123 124
			|| (TREE_CODE (node->decl) == FUNCTION_DECL
			    && opt_for_fn (body->decl, optimize))
125 126 127 128 129
		        || (symtab->state < IPA_SSA
		            && lookup_attribute
				 ("always_inline",
			          DECL_ATTRIBUTES (body->decl))))))
		  /* We use variable constructors during late compilation for
130 131
		     constant folding.  Keep references alive so partitioning
		     knows about potential references.  */
132
		  || (VAR_P (node->decl)
133
		      && flag_wpa
134
		      && ctor_for_folding (node->decl)
135
		         != error_mark_node))))
136 137 138 139 140 141 142 143 144
	{
	  /* Be sure that we will not optimize out alias target
	     body.  */
	  if (DECL_EXTERNAL (node->decl)
	      && node->alias
	      && before_inlining_p)
	    reachable->add (body);
	  reachable->add (node);
	}
145
      enqueue_node (node, first, reachable);
146 147 148
    }
}

149 150 151 152 153 154 155 156 157
/* EDGE is an polymorphic call.  If BEFORE_INLINING_P is set, mark
   all its potential targets as reachable to permit later inlining if
   devirtualization happens.  After inlining still keep their declarations
   around, so we can devirtualize to a direct call.

   Also try to make trivial devirutalization when no or only one target is
   possible.  */

static void
158
walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
159
			       struct cgraph_edge *edge,
160
			       symtab_node **first,
161 162
			       hash_set<symtab_node *> *reachable,
			       bool before_inlining_p)
163 164 165 166 167 168 169 170
{
  unsigned int i;
  void *cache_token;
  bool final;
  vec <cgraph_node *>targets
    = possible_polymorphic_call_targets
	(edge, &final, &cache_token);

171
  if (!reachable_call_targets->add (cache_token))
172
    {
173
      for (i = 0; i < targets.length (); i++)
174 175 176 177 178 179
	{
	  struct cgraph_node *n = targets[i];

	  /* Do not bother to mark virtual methods in anonymous namespace;
	     either we will find use of virtual table defining it, or it is
	     unused.  */
180
	  if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
181
	      && type_in_anonymous_namespace_p
182
		    (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
183 184
	    continue;

185 186
	  n->indirect_call_target = true;
	  symtab_node *body = n->function_symbol ();
187

188 189
	  /* Prior inlining, keep alive bodies of possible targets for
	     devirtualization.  */
190 191 192 193 194 195 196 197 198 199 200 201 202
	  if (n->definition
	      && (before_inlining_p
		  && opt_for_fn (body->decl, optimize)
		  && opt_for_fn (body->decl, flag_devirtualize)))
	     {
		/* Be sure that we will not optimize out alias target
		   body.  */
		if (DECL_EXTERNAL (n->decl)
		    && n->alias
		    && before_inlining_p)
		  reachable->add (body);
	       reachable->add (n);
	     }
203 204 205
	  /* Even after inlining we want to keep the possible targets in the
	     boundary, so late passes can still produce direct call even if
	     the chance for inlining is lost.  */
206
	  enqueue_node (n, first, reachable);
207 208 209 210 211 212 213 214 215
	}
    }

  /* Very trivial devirtualization; when the type is
     final or anonymous (so we know all its derivation)
     and there is only one possible virtual call target,
     make the edge direct.  */
  if (final)
    {
216
      if (targets.length () <= 1 && dbg_cnt (devirt))
217
	{
218
	  cgraph_node *target, *node = edge->caller;
219 220 221
	  if (targets.length () == 1)
	    target = targets[0];
	  else
Martin Liska committed
222
	    target = cgraph_node::get_create
223 224
		       (builtin_decl_implicit (BUILT_IN_UNREACHABLE));

225 226
	  if (dump_enabled_p ())
            {
227 228 229 230 231
	      location_t locus;
	      if (edge->call_stmt)
		locus = gimple_location (edge->call_stmt);
	      else
		locus = UNKNOWN_LOCATION;
Martin Liska committed
232
	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
233 234 235
			       "devirtualizing call in %s to %s\n",
			       edge->caller->dump_name (),
			       target->dump_name ());
236
	    }
Martin Liska committed
237
	  edge = edge->make_direct (target);
238 239
	  if (ipa_fn_summaries)
	    ipa_update_overall_fn_summary (node);
240
	  else if (edge->call_stmt)
Ilya Enkovich committed
241 242 243 244 245 246 247
	    {
	      edge->redirect_call_stmt_to_callee ();

	      /* Call to __builtin_unreachable shouldn't be instrumented.  */
	      if (!targets.length ())
		gimple_call_set_with_bounds (edge->call_stmt, false);
	    }
248 249 250
	}
    }
}
251

252
/* Perform reachability analysis and reclaim all unreachable nodes.
253 254 255 256 257 258 259 260 261 262 263 264

   The algorithm is basically mark&sweep but with some extra refinements:

   - reachable extern inline functions needs special handling; the bodies needs
     to stay in memory until inlining in hope that they will be inlined.
     After inlining we release their bodies and turn them into unanalyzed
     nodes even when they are reachable.

   - virtual functions are kept in callgraph even if they seem unreachable in
     hope calls to them will be devirtualized. 

     Again we remove them after inlining.  In late optimization some
265
     devirtualization may happen, but it is not important since we won't inline
266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281
     the call. In theory early opts and IPA should work out all important cases.

   - virtual clones needs bodies of their origins for later materialization;
     this means that we want to keep the body even if the origin is unreachable
     otherwise.  To avoid origin from sitting in the callgraph and being
     walked by IPA passes, we turn them into unanalyzed nodes with body
     defined.

     We maintain set of function declaration where body needs to stay in
     body_needed_for_clonning

     Inline clones represent special case: their declaration match the
     declaration of origin and cgraph_remove_node already knows how to
     reshape callgraph and preserve body when offline copy of function or
     inline clone is being removed.

282 283 284 285 286 287
   - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
     variables with DECL_INITIAL set.  We finalize these and keep reachable
     ones around for constant folding purposes.  After inlining we however
     stop walking their references to let everything static referneced by them
     to be removed when it is otherwise unreachable.

288 289 290 291 292
   We maintain queue of both reachable symbols (i.e. defined symbols that needs
   to stay) and symbols that are in boundary (i.e. external symbols referenced
   by reachable symbols or origins of clones).  The queue is represented
   as linked list by AUX pointer terminated by 1.

293
   At the end we keep all reachable symbols. For symbols in boundary we always
294 295 296 297 298 299 300 301 302 303
   turn definition into a declaration, but we may keep function body around
   based on body_needed_for_clonning

   All symbols that enter the queue have AUX pointer non-zero and are in the
   boundary.  Pointer set REACHABLE is used to track reachable symbols.

   Every symbol can be visited twice - once as part of boundary and once
   as real reachable symbol. enqueue_node needs to decide whether the
   node needs to be re-queued for second processing.  For this purpose
   we set AUX pointer of processed symbols in the boundary to constant 2.  */
304 305

bool
306
symbol_table::remove_unreachable_nodes (FILE *file)
307
{
308
  symtab_node *first = (symtab_node *) (void *) 1;
309
  struct cgraph_node *node, *next;
310
  varpool_node *vnode, *vnext;
311
  bool changed = false;
312 313 314
  hash_set<symtab_node *> reachable;
  hash_set<tree> body_needed_for_clonning;
  hash_set<void *> reachable_call_targets;
315
  bool before_inlining_p = symtab->state < (!optimize && !in_lto_p ? IPA_SSA
316
					    : IPA_SSA_AFTER_INLINING);
317

318
  timevar_push (TV_IPA_UNREACHABLE);
319
  build_type_inheritance_graph ();
320 321
  if (file)
    fprintf (file, "\nReclaiming functions:");
322 323 324 325 326 327 328
  if (flag_checking)
    {
      FOR_EACH_FUNCTION (node)
	gcc_assert (!node->aux);
      FOR_EACH_VARIABLE (vnode)
	gcc_assert (!vnode->aux);
    }
329 330 331 332
  /* Mark functions whose bodies are obviously needed.
     This is mostly when they can be referenced externally.  Inline clones
     are special since their declarations are shared with master clone and thus
     cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
333 334 335
  FOR_EACH_FUNCTION (node)
    {
      node->used_as_abstract_origin = false;
336
      node->indirect_call_target = false;
337
      if (node->definition
338
	  && !node->global.inlined_to
339
	  && !node->in_other_partition
Martin Liska committed
340
	  && !node->can_remove_if_no_direct_calls_and_refs_p ())
341 342
	{
	  gcc_assert (!node->global.inlined_to);
343 344
	  reachable.add (node);
	  enqueue_node (node, &first, &reachable);
345 346
	}
      else
347
	gcc_assert (!node->aux);
348
     }
349 350

  /* Mark variables that are obviously needed.  */
351
  FOR_EACH_DEFINED_VARIABLE (vnode)
Martin Liska committed
352
    if (!vnode->can_remove_if_no_refs_p()
353
	&& !vnode->in_other_partition)
354
      {
355 356
	reachable.add (vnode);
	enqueue_node (vnode, &first, &reachable);
357 358 359
      }

  /* Perform reachability analysis.  */
360
  while (first != (symtab_node *) (void *) 1)
361
    {
362
      bool in_boundary_p = !reachable.contains (first);
363
      symtab_node *node = first;
364

365
      first = (symtab_node *)first->aux;
366

367 368 369
      /* If we are processing symbol in boundary, mark its AUX pointer for
	 possible later re-processing in enqueue_node.  */
      if (in_boundary_p)
370 371 372 373 374
	{
	  node->aux = (void *)2;
	  if (node->alias && node->analyzed)
	    enqueue_node (node->get_alias_target (), &first, &reachable);
	}
375 376
      else
	{
377 378
	  if (TREE_CODE (node->decl) == FUNCTION_DECL
	      && DECL_ABSTRACT_ORIGIN (node->decl))
379 380
	    {
	      struct cgraph_node *origin_node
381 382 383 384 385 386 387 388 389 390 391
	      = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
	      if (origin_node && !origin_node->used_as_abstract_origin)
		{
	          origin_node->used_as_abstract_origin = true;
		  gcc_assert (!origin_node->prev_sibling_clone);
		  gcc_assert (!origin_node->next_sibling_clone);
		  for (cgraph_node *n = origin_node->clones; n;
		       n = n->next_sibling_clone)
		    if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
		      n->used_as_abstract_origin = true;
		}
392
	    }
393
	  /* If any symbol in a comdat group is reachable, force
394 395 396
	     all externally visible symbols in the same comdat
	     group to be reachable as well.  Comdat-local symbols
	     can be discarded if all uses were inlined.  */
397
	  if (node->same_comdat_group)
398
	    {
399
	      symtab_node *next;
400
	      for (next = node->same_comdat_group;
401
		   next != node;
402
		   next = next->same_comdat_group)
Martin Liska committed
403
		if (!next->comdat_local_p ()
404 405
		    && !reachable.add (next))
		  enqueue_node (next, &first, &reachable);
406 407
	    }
	  /* Mark references as reachable.  */
408
	  process_references (node, &first, before_inlining_p, &reachable);
409
	}
410

411
      if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
412
	{
413 414 415
	  /* Mark the callees reachable unless they are direct calls to extern
 	     inline functions we decided to not inline.  */
	  if (!in_boundary_p)
416
	    {
417
	      struct cgraph_edge *e;
418
	      /* Keep alive possible targets for devirtualization.  */
419 420
	      if (opt_for_fn (cnode->decl, optimize)
		  && opt_for_fn (cnode->decl, flag_devirtualize))
421 422 423 424 425 426
		{
		  struct cgraph_edge *next;
		  for (e = cnode->indirect_calls; e; e = next)
		    {
		      next = e->next_callee;
		      if (e->indirect_info->polymorphic)
427 428
			walk_polymorphic_call_targets (&reachable_call_targets,
						       e, &first, &reachable,
429 430 431
						       before_inlining_p);
		    }
		}
432
	      for (e = cnode->callees; e; e = e->next_callee)
433
		{
434
	          symtab_node *body = e->callee->function_symbol ();
435 436
		  if (e->callee->definition
		      && !e->callee->in_other_partition
437
		      && (!e->inline_failed
438 439
			  || !DECL_EXTERNAL (e->callee->decl)
			  || e->callee->alias
440 441 442 443 444 445
			  || (before_inlining_p
			      && (opt_for_fn (body->decl, optimize)
		                  || (symtab->state < IPA_SSA
		                      && lookup_attribute
				          ("always_inline",
				           DECL_ATTRIBUTES (body->decl)))))))
446 447 448 449 450 451
		    {
		      /* Be sure that we will not optimize out alias target
			 body.  */
		      if (DECL_EXTERNAL (e->callee->decl)
			  && e->callee->alias
			  && before_inlining_p)
452
			reachable.add (body);
453
		      reachable.add (e->callee);
454
		    }
455
		  enqueue_node (e->callee, &first, &reachable);
456
		}
457 458 459

	      /* When inline clone exists, mark body to be preserved so when removing
		 offline copy of the function we don't kill it.  */
460
	      if (cnode->global.inlined_to)
461
	        body_needed_for_clonning.add (cnode->decl);
462

463 464 465 466 467 468 469 470 471 472 473 474 475 476
	      /* For instrumentation clones we always need original
		 function node for proper LTO privatization.  */
	      if (cnode->instrumentation_clone
		  && cnode->definition)
		{
		  gcc_assert (cnode->instrumented_version || in_lto_p);
		  if (cnode->instrumented_version)
		    {
		      enqueue_node (cnode->instrumented_version, &first,
				    &reachable);
		      reachable.add (cnode->instrumented_version);
		    }
		}

477 478 479 480
	      /* For non-inline clones, force their origins to the boundary and ensure
		 that body is not removed.  */
	      while (cnode->clone_of)
		{
481
		  bool noninline = cnode->clone_of->decl != cnode->decl;
482 483 484
		  cnode = cnode->clone_of;
		  if (noninline)
		    {
485 486
		      body_needed_for_clonning.add (cnode->decl);
		      enqueue_node (cnode, &first, &reachable);
487
		    }
488
		}
489 490

	    }
491 492
	  else if (cnode->thunk.thunk_p)
	    enqueue_node (cnode->callees->callee, &first, &reachable);
493

494 495 496 497 498 499 500 501 502
	  /* If any reachable function has simd clones, mark them as
	     reachable as well.  */
	  if (cnode->simd_clones)
	    {
	      cgraph_node *next;
	      for (next = cnode->simd_clones;
		   next;
		   next = next->simdclone->next_clone)
		if (in_boundary_p
503 504
		    || !reachable.add (next))
		  enqueue_node (next, &first, &reachable);
505
	    }
506
	}
507
      /* When we see constructor of external variable, keep referred nodes in the
508 509
	boundary.  This will also hold initializers of the external vars NODE
	refers to.  */
510
      varpool_node *vnode = dyn_cast <varpool_node *> (node);
511
      if (vnode
512 513
	  && DECL_EXTERNAL (node->decl)
	  && !vnode->alias
514
	  && in_boundary_p)
515
	{
Martin Liska committed
516 517
	  struct ipa_ref *ref = NULL;
	  for (int i = 0; node->iterate_reference (i, ref); i++)
518
	    enqueue_node (ref->referred, &first, &reachable);
519
	}
520 521
    }

522
  /* Remove unreachable functions.   */
Martin Liska committed
523
  for (node = first_function (); node; node = next)
524
    {
Martin Liska committed
525
      next = next_function (node);
526 527

      /* If node is not needed at all, remove it.  */
528
      if (!node->aux)
529
	{
530
	  if (file)
531
	    fprintf (file, " %s", node->dump_name ());
Martin Liska committed
532
	  node->remove ();
533 534
	  changed = true;
	}
535
      /* If node is unreachable, remove its body.  */
536
      else if (!reachable.contains (node))
537
        {
538 539 540 541 542 543 544
	  /* We keep definitions of thunks and aliases in the boundary so
	     we can walk to the ultimate alias targets and function symbols
	     reliably.  */
	  if (node->alias || node->thunk.thunk_p)
	    ;
	  else if (!body_needed_for_clonning.contains (node->decl)
	      && !node->alias && !node->thunk.thunk_p)
Martin Liska committed
545
	    node->release_body ();
546
	  else if (!node->clone_of)
547
	    gcc_assert (in_lto_p || DECL_RESULT (node->decl));
548
	  if (node->definition && !node->alias && !node->thunk.thunk_p)
549
	    {
550
	      if (file)
551
		fprintf (file, " %s", node->dump_name ());
552
	      node->body_removed = true;
553 554 555 556
	      node->analyzed = false;
	      node->definition = false;
	      node->cpp_implicit_alias = false;
	      node->alias = false;
557
	      node->transparent_alias = false;
558
	      node->thunk.thunk_p = false;
559
	      node->weakref = false;
560 561 562 563 564 565
	      /* After early inlining we drop always_inline attributes on
		 bodies of functions that are still referenced (have their
		 address taken).  */
	      DECL_ATTRIBUTES (node->decl)
		= remove_attribute ("always_inline",
				    DECL_ATTRIBUTES (node->decl));
566
	      if (!node->in_other_partition)
567
		node->local.local = false;
Martin Liska committed
568
	      node->remove_callees ();
Martin Liska committed
569
	      node->remove_all_references ();
570
	      changed = true;
Ilya Enkovich committed
571 572 573 574 575 576
	      if (node->thunk.thunk_p
		  && node->thunk.add_pointer_bounds_args)
		{
		  node->thunk.thunk_p = false;
		  node->thunk.add_pointer_bounds_args = false;
		}
577
	    }
578
	}
579
      else
Martin Liska committed
580
	gcc_assert (node->clone_of || !node->has_gimple_body_p ()
581
		    || in_lto_p || DECL_RESULT (node->decl));
582
    }
583 584 585 586

  /* Inline clones might be kept around so their materializing allows further
     cloning.  If the function the clone is inlined into is removed, we need
     to turn it into normal cone.  */
587
  FOR_EACH_FUNCTION (node)
588 589 590 591 592
    {
      if (node->global.inlined_to
	  && !node->callers)
	{
	  gcc_assert (node->clones);
593 594
	  node->global.inlined_to = NULL;
	  update_inlined_to_pointer (node, node);
595
	}
596
      node->aux = NULL;
597
    }
598

599
  /* Remove unreachable variables.  */
600
  if (file)
601
    fprintf (file, "\nReclaiming variables:");
Martin Liska committed
602
  for (vnode = first_variable (); vnode; vnode = vnext)
603
    {
Martin Liska committed
604
      vnext = next_variable (vnode);
605
      if (!vnode->aux
606 607 608
	  /* For can_refer_decl_in_current_unit_p we want to track for
	     all external variables if they are defined in other partition
	     or not.  */
609
	  && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
610
	{
611 612 613 614 615 616 617
	  struct ipa_ref *ref = NULL;

	  /* First remove the aliases, so varpool::remove can possibly lookup
	     the constructor and save it for future use.  */
	  while (vnode->iterate_direct_aliases (0, ref))
	    {
	      if (file)
618
		fprintf (file, " %s", ref->referred->dump_name ());
619 620
	      ref->referring->remove ();
	    }
621
	  if (file)
622
	    fprintf (file, " %s", vnode->dump_name ());
623
          vnext = next_variable (vnode);
624 625 626 627 628 629
	  /* Signal removal to the debug machinery.  */
	  if (! flag_wpa)
	    {
	      vnode->definition = false;
	      (*debug_hooks->late_global_decl) (vnode->decl);
	    }
Martin Liska committed
630
	  vnode->remove ();
631
	  changed = true;
632
	}
633
      else if (!reachable.contains (vnode) && !vnode->alias)
634
        {
635
	  tree init;
636
	  if (vnode->definition)
637 638
	    {
	      if (file)
639
		fprintf (file, " %s", vnode->name ());
640 641
	      changed = true;
	    }
642
	  /* Keep body if it may be useful for constant folding.  */
Ilya Enkovich committed
643 644
	  if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
	      && !POINTER_BOUNDS_P (vnode->decl))
645 646 647
	    vnode->remove_initializer ();
	  else
	    DECL_INITIAL (vnode->decl) = init;
648
	  vnode->body_removed = true;
649 650 651
	  vnode->definition = false;
	  vnode->analyzed = false;
	  vnode->aux = NULL;
652

Martin Liska committed
653
	  vnode->remove_from_same_comdat_group ();
654

Martin Liska committed
655
	  vnode->remove_all_references ();
656 657
	}
      else
658
	vnode->aux = NULL;
659
    }
660

661
  /* Now update address_taken flags and try to promote functions to be local.  */
662 663
  if (file)
    fprintf (file, "\nClearing address taken flags:");
664
  FOR_EACH_DEFINED_FUNCTION (node)
665 666
    if (node->address_taken
	&& !node->used_from_other_partition)
667
      {
668
	if (!node->call_for_symbol_and_aliases
Ilya Enkovich committed
669 670 671 672
	    (has_addr_references_p, NULL, true)
	    && (!node->instrumentation_clone
		|| !node->instrumented_version
		|| !node->instrumented_version->address_taken))
673 674
	  {
	    if (file)
675
	      fprintf (file, " %s", node->name ());
676
	    node->address_taken = false;
677
	    changed = true;
678 679 680 681 682 683 684 685
	    if (node->local_p ()
		/* Virtual functions may be kept in cgraph just because
		   of possible later devirtualization.  Do not mark them as
		   local too early so we won't optimize them out before
		   we are done with polymorphic call analysis.  */
		&& (!before_inlining_p
		    || !node->call_for_symbol_and_aliases
		       (is_indirect_call_target_p, NULL, true)))
686 687 688 689 690
	      {
		node->local.local = true;
		if (file)
		  fprintf (file, " (local)");
	      }
691 692
	  }
      }
693 694
  if (file)
    fprintf (file, "\n");
695

696
  symtab_node::checking_verify_symtab_nodes ();
Diego Novillo committed
697

698
  /* If we removed something, perhaps profile could be improved.  */
699
  if (changed && (optimize || in_lto_p) && ipa_call_summaries)
700
    FOR_EACH_DEFINED_FUNCTION (node)
701
      ipa_propagate_frequency (node);
702

703
  timevar_pop (TV_IPA_UNREACHABLE);
704 705
  return changed;
}
706

707 708 709 710 711 712 713 714 715 716 717 718
/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
   as needed, also clear EXPLICIT_REFS if the references to given variable
   do not need to be explicit.  */

void
process_references (varpool_node *vnode,
		    bool *written, bool *address_taken,
		    bool *read, bool *explicit_refs)
{
  int i;
  struct ipa_ref *ref;

Martin Liska committed
719
  if (!vnode->all_refs_explicit_p ()
720 721 722
      || TREE_THIS_VOLATILE (vnode->decl))
    *explicit_refs = false;

Martin Liska committed
723
  for (i = 0; vnode->iterate_referring (i, ref)
724 725 726 727 728 729 730 731 732 733 734 735 736
	      && *explicit_refs && (!*written || !*address_taken || !*read); i++)
    switch (ref->use)
      {
      case IPA_REF_ADDR:
	*address_taken = true;
	break;
      case IPA_REF_LOAD:
	*read = true;
	break;
      case IPA_REF_STORE:
	*written = true;
	break;
      case IPA_REF_ALIAS:
Martin Liska committed
737 738
	process_references (dyn_cast<varpool_node *> (ref->referring), written,
			    address_taken, read, explicit_refs);
739
	break;
Ilya Enkovich committed
740 741
      case IPA_REF_CHKP:
	gcc_unreachable ();
742 743 744 745 746 747 748 749 750 751 752 753 754 755 756
      }
}

/* Set TREE_READONLY bit.  */

bool
set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
{
  TREE_READONLY (vnode->decl) = true;
  return false;
}

/* Set writeonly bit and clear the initalizer, since it will not be needed.  */

bool
757
set_writeonly_bit (varpool_node *vnode, void *data)
758 759
{
  vnode->writeonly = true;
760
  if (optimize || in_lto_p)
761 762 763
    {
      DECL_INITIAL (vnode->decl) = NULL;
      if (!vnode->alias)
764 765 766 767 768
	{
	  if (vnode->num_references ())
	    *(bool *)data = true;
	  vnode->remove_all_references ();
	}
769 770 771 772 773 774 775 776 777 778 779 780 781 782
    }
  return false;
}

/* Clear addressale bit of VNODE.  */

bool
clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
{
  vnode->address_taken = false;
  TREE_ADDRESSABLE (vnode->decl) = 0;
  return false;
}

783 784 785
/* Discover variables that have no longer address taken or that are read only
   and update their flags.

786 787
   Return true when unreachable symbol removan should be done.

788 789
   FIXME: This can not be done in between gimplify and omp_expand since
   readonly flag plays role on what is shared and what is not.  Currently we do
790 791 792
   this transformation as part of whole program visibility and re-do at
   ipa-reference pass (to take into account clonning), but it would
   make sense to do it before early optimizations.  */
793

794
bool
795 796
ipa_discover_readonly_nonaddressable_vars (void)
{
797
  bool remove_p = false;
798
  varpool_node *vnode;
799 800
  if (dump_file)
    fprintf (dump_file, "Clearing variable flags:");
801
  FOR_EACH_VARIABLE (vnode)
802
    if (!vnode->alias
803
	&& (TREE_ADDRESSABLE (vnode->decl)
804
	    || !vnode->writeonly
805
	    || !TREE_READONLY (vnode->decl)))
806 807 808
      {
	bool written = false;
	bool address_taken = false;
809 810 811
	bool read = false;
	bool explicit_refs = true;

812 813
	process_references (vnode, &written, &address_taken, &read,
			    &explicit_refs);
814 815 816
	if (!explicit_refs)
	  continue;
	if (!address_taken)
817
	  {
818
	    if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
819
	      fprintf (dump_file, " %s (non-addressable)", vnode->name ());
820 821
	    vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
					        true);
822
	  }
823
	if (!address_taken && !written
824 825 826
	    /* Making variable in explicit section readonly can cause section
	       type conflict. 
	       See e.g. gcc.c-torture/compile/pr23237.c */
827
	    && vnode->get_section () == NULL)
828
	  {
829
	    if (!TREE_READONLY (vnode->decl) && dump_file)
830
	      fprintf (dump_file, " %s (read-only)", vnode->name ());
831
	    vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
832
	  }
833
	if (!vnode->writeonly && !read && !address_taken && written)
834 835 836
	  {
	    if (dump_file)
	      fprintf (dump_file, " %s (write-only)", vnode->name ());
837 838
	    vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p, 
					        true);
839 840 841 842
	  }
      }
  if (dump_file)
    fprintf (dump_file, "\n");
843
  return remove_p;
844 845
}

846
/* Generate and emit a static constructor or destructor.  WHICH must
Ilya Enkovich committed
847 848 849 850 851
   be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
   (for chp static vars constructor) or 'B' (for chkp static bounds
   constructor).  BODY is a STATEMENT_LIST containing GENERIC
   statements.  PRIORITY is the initialization priority for this
   constructor or destructor.
852

853 854 855 856 857
   FINAL specify whether the externally visible name for collect2 should
   be produced. */

static void
cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
858 859 860 861 862 863 864 865
{
  static int counter = 0;
  char which_buf[16];
  tree decl, name, resdecl;

  /* The priority is encoded in the constructor or destructor name.
     collect2 will sort the names and arrange that they are called at
     program startup.  */
866 867 868 869 870 871
  if (final)
    sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
  else
  /* Proudce sane name but one not recognizable by collect2, just for the
     case we fail to inline the function.  */
    sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888
  name = get_file_function_name (which_buf);

  decl = build_decl (input_location, FUNCTION_DECL, name,
		     build_function_type_list (void_type_node, NULL_TREE));
  current_function_decl = decl;

  resdecl = build_decl (input_location,
			RESULT_DECL, NULL_TREE, void_type_node);
  DECL_ARTIFICIAL (resdecl) = 1;
  DECL_RESULT (decl) = resdecl;
  DECL_CONTEXT (resdecl) = decl;

  allocate_struct_function (decl, false);

  TREE_STATIC (decl) = 1;
  TREE_USED (decl) = 1;
  DECL_ARTIFICIAL (decl) = 1;
889
  DECL_IGNORED_P (decl) = 1;
890 891
  DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
  DECL_SAVED_TREE (decl) = body;
892
  if (!targetm.have_ctors_dtors && final)
893 894 895 896 897 898 899
    {
      TREE_PUBLIC (decl) = 1;
      DECL_PRESERVE_P (decl) = 1;
    }
  DECL_UNINLINABLE (decl) = 1;

  DECL_INITIAL (decl) = make_node (BLOCK);
900
  BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
901 902 903 904 905 906 907 908 909 910 911
  TREE_USED (DECL_INITIAL (decl)) = 1;

  DECL_SOURCE_LOCATION (decl) = input_location;
  cfun->function_end_locus = input_location;

  switch (which)
    {
    case 'I':
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
      decl_init_priority_insert (decl, priority);
      break;
Ilya Enkovich committed
912 913 914 915 916 917 918 919 920 921 922 923 924 925
    case 'P':
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
      DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
					  NULL,
					  NULL_TREE);
      decl_init_priority_insert (decl, priority);
      break;
    case 'B':
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
      DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
					  NULL,
					  NULL_TREE);
      decl_init_priority_insert (decl, priority);
      break;
926 927 928 929 930 931 932 933 934 935
    case 'D':
      DECL_STATIC_DESTRUCTOR (decl) = 1;
      decl_fini_priority_insert (decl, priority);
      break;
    default:
      gcc_unreachable ();
    }

  gimplify_function_tree (decl);

Martin Liska committed
936
  cgraph_node::add_new_function (decl, false);
937 938 939 940 941

  set_cfun (NULL);
  current_function_decl = NULL;
}

942
/* Generate and emit a static constructor or destructor.  WHICH must
Ilya Enkovich committed
943 944 945 946 947
   be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
   (for chkp static vars constructor) or 'B' (for chkp static bounds
   constructor).  BODY is a STATEMENT_LIST containing GENERIC
   statements.  PRIORITY is the initialization priority for this
   constructor or destructor.  */
948 949 950 951 952 953

void
cgraph_build_static_cdtor (char which, tree body, int priority)
{
  cgraph_build_static_cdtor_1 (which, body, priority, false);
}
954 955 956 957 958 959 960 961 962

/* When target does not have ctors and dtors, we call all constructor
   and destructor by special initialization/destruction function
   recognized by collect2.

   When we are going to build this function, collect all constructors and
   destructors and turn them into normal functions.  */

static void
963
record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
964
{
965
  if (DECL_STATIC_CONSTRUCTOR (node->decl))
966
    ctors->safe_push (node->decl);
967
  if (DECL_STATIC_DESTRUCTOR (node->decl))
968
    dtors->safe_push (node->decl);
Martin Liska committed
969
  node = cgraph_node::get (node->decl);
970
  DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
971 972 973 974 975 976 977 978
}

/* Define global constructors/destructor functions for the CDTORS, of
   which they are LEN.  The CDTORS are sorted by initialization
   priority.  If CTOR_P is true, these are constructors; otherwise,
   they are destructors.  */

static void
979
build_cdtor (bool ctor_p, const vec<tree> &cdtors)
980 981
{
  size_t i,j;
982
  size_t len = cdtors.length ();
983 984 985 986 987 988 989 990 991 992 993 994 995 996

  i = 0;
  while (i < len)
    {
      tree body;
      tree fn;
      priority_type priority;

      priority = 0;
      body = NULL_TREE;
      j = i;
      do
	{
	  priority_type p;
997
	  fn = cdtors[j];
998 999 1000 1001 1002 1003 1004 1005 1006
	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
	  if (j == i)
	    priority = p;
	  else if (p != priority)
	    break;
	  j++;
	}
      while (j < len);

1007
      /* When there is only one cdtor and target supports them, do nothing.  */
1008 1009 1010 1011 1012 1013 1014 1015
      if (j == i + 1
	  && targetm.have_ctors_dtors)
	{
	  i++;
	  continue;
	}
      /* Find the next batch of constructors/destructors with the same
	 initialization priority.  */
1016
      for (;i < j; i++)
1017 1018
	{
	  tree call;
1019
	  fn = cdtors[i];
1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033
	  call = build_call_expr (fn, 0);
	  if (ctor_p)
	    DECL_STATIC_CONSTRUCTOR (fn) = 0;
	  else
	    DECL_STATIC_DESTRUCTOR (fn) = 0;
	  /* We do not want to optimize away pure/const calls here.
	     When optimizing, these should be already removed, when not
	     optimizing, we want user to be able to breakpoint in them.  */
	  TREE_SIDE_EFFECTS (call) = 1;
	  append_to_statement_list (call, &body);
	}
      gcc_assert (body != NULL_TREE);
      /* Generate a function to call all the function of like
	 priority.  */
1034
      cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095
    }
}

/* Comparison function for qsort.  P1 and P2 are actually of type
   "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
   used to determine the sort order.  */

static int
compare_ctor (const void *p1, const void *p2)
{
  tree f1;
  tree f2;
  int priority1;
  int priority2;

  f1 = *(const tree *)p1;
  f2 = *(const tree *)p2;
  priority1 = DECL_INIT_PRIORITY (f1);
  priority2 = DECL_INIT_PRIORITY (f2);

  if (priority1 < priority2)
    return -1;
  else if (priority1 > priority2)
    return 1;
  else
    /* Ensure a stable sort.  Constructors are executed in backwarding
       order to make LTO initialize braries first.  */
    return DECL_UID (f2) - DECL_UID (f1);
}

/* Comparison function for qsort.  P1 and P2 are actually of type
   "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
   used to determine the sort order.  */

static int
compare_dtor (const void *p1, const void *p2)
{
  tree f1;
  tree f2;
  int priority1;
  int priority2;

  f1 = *(const tree *)p1;
  f2 = *(const tree *)p2;
  priority1 = DECL_FINI_PRIORITY (f1);
  priority2 = DECL_FINI_PRIORITY (f2);

  if (priority1 < priority2)
    return -1;
  else if (priority1 > priority2)
    return 1;
  else
    /* Ensure a stable sort.  */
    return DECL_UID (f1) - DECL_UID (f2);
}

/* Generate functions to call static constructors and destructors
   for targets that do not support .ctors/.dtors sections.  These
   functions have magic names which are detected by collect2.  */

static void
1096
build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1097
{
1098
  if (!ctors->is_empty ())
1099 1100
    {
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1101 1102
      ctors->qsort (compare_ctor);
      build_cdtor (/*ctor_p=*/true, *ctors);
1103 1104
    }

1105
  if (!dtors->is_empty ())
1106 1107
    {
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1108 1109
      dtors->qsort (compare_dtor);
      build_cdtor (/*ctor_p=*/false, *dtors);
1110 1111 1112 1113 1114
    }
}

/* Look for constructors and destructors and produce function calling them.
   This is needed for targets not supporting ctors or dtors, but we perform the
Joseph Myers committed
1115
   transformation also at linktime to merge possibly numerous
1116 1117 1118 1119 1120 1121
   constructors/destructors into single function to improve code locality and
   reduce size.  */

static unsigned int
ipa_cdtor_merge (void)
{
1122 1123 1124 1125
  /* A vector of FUNCTION_DECLs declared as static constructors.  */
  auto_vec<tree, 20> ctors;
  /* A vector of FUNCTION_DECLs declared as static destructors.  */
  auto_vec<tree, 20> dtors;
1126
  struct cgraph_node *node;
1127
  FOR_EACH_DEFINED_FUNCTION (node)
1128 1129
    if (DECL_STATIC_CONSTRUCTOR (node->decl)
	|| DECL_STATIC_DESTRUCTOR (node->decl))
1130 1131
       record_cdtor_fn (node, &ctors, &dtors);
  build_cdtor_fns (&ctors, &dtors);
1132 1133 1134
  return 0;
}

1135 1136 1137
namespace {

const pass_data pass_data_ipa_cdtor_merge =
1138
{
1139 1140 1141 1142 1143 1144 1145 1146 1147
  IPA_PASS, /* type */
  "cdtor", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CGRAPHOPT, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
1148
};
1149

1150
class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1151 1152
{
public:
1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163
  pass_ipa_cdtor_merge (gcc::context *ctxt)
    : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
		      NULL, /* generate_summary */
		      NULL, /* write_summary */
		      NULL, /* read_summary */
		      NULL, /* write_optimization_summary */
		      NULL, /* read_optimization_summary */
		      NULL, /* stmt_fixup */
		      0, /* function_transform_todo_flags_start */
		      NULL, /* function_transform */
		      NULL) /* variable_transform */
1164 1165 1166
  {}

  /* opt_pass methods: */
1167
  virtual bool gate (function *);
1168
  virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1169 1170 1171

}; // class pass_ipa_cdtor_merge

1172 1173 1174 1175 1176 1177
bool
pass_ipa_cdtor_merge::gate (function *)
{
  /* Perform the pass when we have no ctors/dtors support
     or at LTO time to merge multiple constructors into single
     function.  */
1178
  return !targetm.have_ctors_dtors || in_lto_p;
1179 1180
}

1181 1182
} // anon namespace

1183 1184 1185 1186 1187
ipa_opt_pass_d *
make_pass_ipa_cdtor_merge (gcc::context *ctxt)
{
  return new pass_ipa_cdtor_merge (ctxt);
}
1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204

/* Invalid pointer representing BOTTOM for single user dataflow.  */
#define BOTTOM ((cgraph_node *)(size_t) 2)

/* Meet operation for single user dataflow.
   Here we want to associate variables with sigle function that may access it.

   FUNCTION is current single user of a variable, VAR is variable that uses it.
   Latttice is stored in SINGLE_USER_MAP.

   We represent: 
    - TOP by no entry in SIGNLE_USER_MAP
    - BOTTOM by BOTTOM in AUX pointer (to save lookups)
    - known single user by cgraph pointer in SINGLE_USER_MAP.  */

cgraph_node *
meet (cgraph_node *function, varpool_node *var,
Trevor Saunders committed
1205
       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1206 1207 1208 1209 1210 1211
{
  struct cgraph_node *user, **f;

  if (var->aux == BOTTOM)
    return BOTTOM;

Trevor Saunders committed
1212
  f = single_user_map.get (var);
1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230
  if (!f)
    return function;
  user = *f;
  if (!function)
    return user;
  else if (function != user)
    return BOTTOM;
  else
    return function;
}

/* Propagation step of single-use dataflow.

   Check all uses of VNODE and see if they are used by single function FUNCTION.
   SINGLE_USER_MAP represents the dataflow lattice.  */

cgraph_node *
propagate_single_user (varpool_node *vnode, cgraph_node *function,
Trevor Saunders committed
1231
		       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1232 1233 1234 1235 1236 1237 1238 1239
{
  int i;
  struct ipa_ref *ref;

  gcc_assert (!vnode->externally_visible);

  /* If node is an alias, first meet with its target.  */
  if (vnode->alias)
Martin Liska committed
1240
    function = meet (function, vnode->get_alias_target (), single_user_map);
1241 1242

  /* Check all users and see if they correspond to a single function.  */
Martin Liska committed
1243
  for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255
    {
      struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
      if (cnode)
	{
	  if (cnode->global.inlined_to)
	    cnode = cnode->global.inlined_to;
	  if (!function)
	    function = cnode;
	  else if (function != cnode)
	    function = BOTTOM;
	}
      else
1256 1257
	function = meet (function, dyn_cast <varpool_node *> (ref->referring),
			 single_user_map);
1258 1259 1260 1261 1262
    }
  return function;
}

/* Pass setting used_by_single_function flag.
1263 1264
   This flag is set on variable when there is only one function that may
   possibly referr to it.  */
1265 1266 1267 1268 1269 1270

static unsigned int
ipa_single_use (void)
{
  varpool_node *first = (varpool_node *) (void *) 1;
  varpool_node *var;
Trevor Saunders committed
1271
  hash_map<varpool_node *, cgraph_node *> single_user_map;
1272 1273

  FOR_EACH_DEFINED_VARIABLE (var)
Martin Liska committed
1274
    if (!var->all_refs_explicit_p ())
1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291
      var->aux = BOTTOM;
    else
      {
	/* Enqueue symbol for dataflow.  */
        var->aux = first;
	first = var;
      }

  /* The actual dataflow.  */

  while (first != (void *) 1)
    {
      cgraph_node *user, *orig_user, **f;

      var = first;
      first = (varpool_node *)first->aux;

Trevor Saunders committed
1292
      f = single_user_map.get (var);
1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306
      if (f)
	orig_user = *f;
      else
	orig_user = NULL;
      user = propagate_single_user (var, orig_user, single_user_map);

      gcc_checking_assert (var->aux != BOTTOM);

      /* If user differs, enqueue all references.  */
      if (user != orig_user)
	{
	  unsigned int i;
	  ipa_ref *ref;

Trevor Saunders committed
1307
	  single_user_map.put (var, user);
1308 1309

	  /* Enqueue all aliases for re-processing.  */
1310 1311
	  for (i = 0; var->iterate_direct_aliases (i, ref); i++)
	    if (!ref->referring->aux)
1312 1313 1314 1315 1316
	      {
		ref->referring->aux = first;
		first = dyn_cast <varpool_node *> (ref->referring);
	      }
	  /* Enqueue all users for re-processing.  */
Martin Liska committed
1317
	  for (i = 0; var->iterate_reference (i, ref); i++)
1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339
	    if (!ref->referred->aux
	        && ref->referred->definition
		&& is_a <varpool_node *> (ref->referred))
	      {
		ref->referred->aux = first;
		first = dyn_cast <varpool_node *> (ref->referred);
	      }

	  /* If user is BOTTOM, just punt on this var.  */
	  if (user == BOTTOM)
	    var->aux = BOTTOM;
	  else
	    var->aux = NULL;
	}
      else
	var->aux = NULL;
    }

  FOR_EACH_DEFINED_VARIABLE (var)
    {
      if (var->aux != BOTTOM)
	{
1340 1341 1342 1343
	  /* Not having the single user known means that the VAR is
	     unreachable.  Either someone forgot to remove unreachable
	     variables or the reachability here is wrong.  */

1344 1345
	  gcc_checking_assert (single_user_map.get (var));

1346 1347
	  if (dump_file)
	    {
1348 1349
	      fprintf (dump_file, "Variable %s is used by single function\n",
		       var->dump_name ());
1350 1351 1352 1353 1354 1355 1356 1357
	    }
	  var->used_by_single_function = true;
	}
      var->aux = NULL;
    }
  return 0;
}

1358 1359 1360
namespace {

const pass_data pass_data_ipa_single_use =
1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372
{
  IPA_PASS, /* type */
  "single-use", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CGRAPHOPT, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
};

1373
class pass_ipa_single_use : public ipa_opt_pass_d
1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393
{
public:
  pass_ipa_single_use (gcc::context *ctxt)
    : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
		      NULL, /* generate_summary */
		      NULL, /* write_summary */
		      NULL, /* read_summary */
		      NULL, /* write_optimization_summary */
		      NULL, /* read_optimization_summary */
		      NULL, /* stmt_fixup */
		      0, /* function_transform_todo_flags_start */
		      NULL, /* function_transform */
		      NULL) /* variable_transform */
  {}

  /* opt_pass methods: */
  virtual unsigned int execute (function *) { return ipa_single_use (); }

}; // class pass_ipa_single_use

1394 1395
} // anon namespace

1396 1397 1398 1399 1400
ipa_opt_pass_d *
make_pass_ipa_single_use (gcc::context *ctxt)
{
  return new pass_ipa_single_use (ctxt);
}
1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441

/* Materialize all clones.  */

namespace {

const pass_data pass_data_materialize_all_clones =
{
  SIMPLE_IPA_PASS, /* type */
  "materialize-all-clones", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_IPA_OPT, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
};

class pass_materialize_all_clones : public simple_ipa_opt_pass
{
public:
  pass_materialize_all_clones (gcc::context *ctxt)
    : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
  {}

  /* opt_pass methods: */
  virtual unsigned int execute (function *)
    {
      symtab->materialize_all_clones ();
      return 0;
    }

}; // class pass_materialize_all_clones

} // anon namespace

simple_ipa_opt_pass *
make_pass_materialize_all_clones (gcc::context *ctxt)
{
  return new pass_materialize_all_clones (ctxt);
}