ipa.c 42.1 KB
Newer Older
1
/* Basic IPA optimizations and utilities.
2
   Copyright (C) 2003-2016 Free Software Foundation, Inc.
3 4 5 6 7

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9 10 11 12 13 14 15 16
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
19 20 21 22

#include "config.h"
#include "system.h"
#include "coretypes.h"
23
#include "backend.h"
24
#include "target.h"
25 26
#include "tree.h"
#include "gimple.h"
27 28 29 30
#include "alloc-pool.h"
#include "tree-pass.h"
#include "stringpool.h"
#include "cgraph.h"
31
#include "gimplify.h"
32
#include "tree-iterator.h"
33
#include "ipa-utils.h"
34
#include "symbol-summary.h"
Andrew MacLeod committed
35
#include "ipa-prop.h"
36
#include "ipa-inline.h"
37
#include "dbgcnt.h"
38

39 40 41 42 43

/* Return true when NODE has ADDR reference.  */

static bool
has_addr_references_p (struct cgraph_node *node,
44
		       void *)
45 46
{
  int i;
Martin Liska committed
47
  struct ipa_ref *ref = NULL;
48

Martin Liska committed
49
  for (i = 0; node->iterate_referring (i, ref); i++)
50 51 52 53 54
    if (ref->use == IPA_REF_ADDR)
      return true;
  return false;
}

55 56 57 58 59 60 61 62
/* Return true when NODE can be target of an indirect call.  */

static bool
is_indirect_call_target_p (struct cgraph_node *node, void *)
{
  return node->indirect_call_target;
}

63 64 65 66 67 68 69 70 71 72 73 74 75 76 77
/* Look for all functions inlined to NODE and update their inlined_to pointers
   to INLINED_TO.  */

static void
update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
{
  struct cgraph_edge *e;
  for (e = node->callees; e; e = e->next_callee)
    if (e->callee->global.inlined_to)
      {
        e->callee->global.inlined_to = inlined_to;
	update_inlined_to_pointer (e->callee, inlined_to);
      }
}

78
/* Add symtab NODE to queue starting at FIRST.
79 80 81 82 83 84

   The queue is linked via AUX pointers and terminated by pointer to 1.
   We enqueue nodes at two occasions: when we find them reachable or when we find
   their bodies needed for further clonning.  In the second case we mark them
   by pointer to 2 after processing so they are re-queue when they become
   reachable.  */
85 86

static void
87
enqueue_node (symtab_node *node, symtab_node **first,
88
	      hash_set<symtab_node *> *reachable)
89
{
90
  /* Node is still in queue; do nothing.  */
91
  if (node->aux && node->aux != (void *) 2)
92 93 94
    return;
  /* Node was already processed as unreachable, re-enqueue
     only if it became reachable now.  */
95
  if (node->aux == (void *)2 && !reachable->contains (node))
96
    return;
97
  node->aux = *first;
98 99 100 101 102 103
  *first = node;
}

/* Process references.  */

static void
Martin Liska committed
104
process_references (symtab_node *snode,
105
		    symtab_node **first,
106
		    bool before_inlining_p,
107
		    hash_set<symtab_node *> *reachable)
108 109
{
  int i;
Martin Liska committed
110 111
  struct ipa_ref *ref = NULL;
  for (i = 0; snode->iterate_reference (i, ref); i++)
112
    {
113
      symtab_node *node = ref->referred;
114
      symtab_node *body = node->ultimate_alias_target ();
115

116 117
      if (node->definition && !node->in_other_partition
	  && ((!DECL_EXTERNAL (node->decl) || node->alias)
118
	      || (((before_inlining_p
119 120 121 122
		    && ((TREE_CODE (node->decl) != FUNCTION_DECL
			 && optimize)
			|| (TREE_CODE (node->decl) == FUNCTION_DECL
			    && opt_for_fn (body->decl, optimize))
123 124 125 126 127
		        || (symtab->state < IPA_SSA
		            && lookup_attribute
				 ("always_inline",
			          DECL_ATTRIBUTES (body->decl))))))
		  /* We use variable constructors during late compilation for
128 129
		     constant folding.  Keep references alive so partitioning
		     knows about potential references.  */
130
		  || (TREE_CODE (node->decl) == VAR_DECL
131
		      && flag_wpa
132
		      && ctor_for_folding (node->decl)
133
		         != error_mark_node))))
134 135 136 137 138 139 140 141 142
	{
	  /* Be sure that we will not optimize out alias target
	     body.  */
	  if (DECL_EXTERNAL (node->decl)
	      && node->alias
	      && before_inlining_p)
	    reachable->add (body);
	  reachable->add (node);
	}
143
      enqueue_node (node, first, reachable);
144 145 146
    }
}

147 148 149 150 151 152 153 154 155
/* EDGE is an polymorphic call.  If BEFORE_INLINING_P is set, mark
   all its potential targets as reachable to permit later inlining if
   devirtualization happens.  After inlining still keep their declarations
   around, so we can devirtualize to a direct call.

   Also try to make trivial devirutalization when no or only one target is
   possible.  */

static void
156
walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
157
			       struct cgraph_edge *edge,
158
			       symtab_node **first,
159 160
			       hash_set<symtab_node *> *reachable,
			       bool before_inlining_p)
161 162 163 164 165 166 167 168
{
  unsigned int i;
  void *cache_token;
  bool final;
  vec <cgraph_node *>targets
    = possible_polymorphic_call_targets
	(edge, &final, &cache_token);

169
  if (!reachable_call_targets->add (cache_token))
170
    {
171
      for (i = 0; i < targets.length (); i++)
172 173 174 175 176 177
	{
	  struct cgraph_node *n = targets[i];

	  /* Do not bother to mark virtual methods in anonymous namespace;
	     either we will find use of virtual table defining it, or it is
	     unused.  */
178
	  if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
179
	      && type_in_anonymous_namespace_p
180
		    (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
181 182
	    continue;

183 184
	  n->indirect_call_target = true;
	  symtab_node *body = n->function_symbol ();
185

186 187
	  /* Prior inlining, keep alive bodies of possible targets for
	     devirtualization.  */
188 189 190 191 192 193 194 195 196 197 198 199 200
	  if (n->definition
	      && (before_inlining_p
		  && opt_for_fn (body->decl, optimize)
		  && opt_for_fn (body->decl, flag_devirtualize)))
	     {
		/* Be sure that we will not optimize out alias target
		   body.  */
		if (DECL_EXTERNAL (n->decl)
		    && n->alias
		    && before_inlining_p)
		  reachable->add (body);
	       reachable->add (n);
	     }
201 202 203
	  /* Even after inlining we want to keep the possible targets in the
	     boundary, so late passes can still produce direct call even if
	     the chance for inlining is lost.  */
204
	  enqueue_node (n, first, reachable);
205 206 207 208 209 210 211 212 213
	}
    }

  /* Very trivial devirtualization; when the type is
     final or anonymous (so we know all its derivation)
     and there is only one possible virtual call target,
     make the edge direct.  */
  if (final)
    {
214
      if (targets.length () <= 1 && dbg_cnt (devirt))
215
	{
216
	  cgraph_node *target, *node = edge->caller;
217 218 219
	  if (targets.length () == 1)
	    target = targets[0];
	  else
Martin Liska committed
220
	    target = cgraph_node::get_create
221 222
		       (builtin_decl_implicit (BUILT_IN_UNREACHABLE));

223 224
	  if (dump_enabled_p ())
            {
225 226 227 228 229
	      location_t locus;
	      if (edge->call_stmt)
		locus = gimple_location (edge->call_stmt);
	      else
		locus = UNKNOWN_LOCATION;
Martin Liska committed
230
	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
231 232 233 234 235
                               "devirtualizing call in %s/%i to %s/%i\n",
                               edge->caller->name (), edge->caller->order,
                               target->name (),
                               target->order);
	    }
Martin Liska committed
236
	  edge = edge->make_direct (target);
237
	  if (inline_summaries)
238
	    inline_update_overall_summary (node);
239
	  else if (edge->call_stmt)
Ilya Enkovich committed
240 241 242 243 244 245 246
	    {
	      edge->redirect_call_stmt_to_callee ();

	      /* Call to __builtin_unreachable shouldn't be instrumented.  */
	      if (!targets.length ())
		gimple_call_set_with_bounds (edge->call_stmt, false);
	    }
247 248 249
	}
    }
}
250

251
/* Perform reachability analysis and reclaim all unreachable nodes.
252 253 254 255 256 257 258 259 260 261 262 263

   The algorithm is basically mark&sweep but with some extra refinements:

   - reachable extern inline functions needs special handling; the bodies needs
     to stay in memory until inlining in hope that they will be inlined.
     After inlining we release their bodies and turn them into unanalyzed
     nodes even when they are reachable.

   - virtual functions are kept in callgraph even if they seem unreachable in
     hope calls to them will be devirtualized. 

     Again we remove them after inlining.  In late optimization some
264
     devirtualization may happen, but it is not important since we won't inline
265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280
     the call. In theory early opts and IPA should work out all important cases.

   - virtual clones needs bodies of their origins for later materialization;
     this means that we want to keep the body even if the origin is unreachable
     otherwise.  To avoid origin from sitting in the callgraph and being
     walked by IPA passes, we turn them into unanalyzed nodes with body
     defined.

     We maintain set of function declaration where body needs to stay in
     body_needed_for_clonning

     Inline clones represent special case: their declaration match the
     declaration of origin and cgraph_remove_node already knows how to
     reshape callgraph and preserve body when offline copy of function or
     inline clone is being removed.

281 282 283 284 285 286
   - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
     variables with DECL_INITIAL set.  We finalize these and keep reachable
     ones around for constant folding purposes.  After inlining we however
     stop walking their references to let everything static referneced by them
     to be removed when it is otherwise unreachable.

287 288 289 290 291
   We maintain queue of both reachable symbols (i.e. defined symbols that needs
   to stay) and symbols that are in boundary (i.e. external symbols referenced
   by reachable symbols or origins of clones).  The queue is represented
   as linked list by AUX pointer terminated by 1.

292
   At the end we keep all reachable symbols. For symbols in boundary we always
293 294 295 296 297 298 299 300 301 302
   turn definition into a declaration, but we may keep function body around
   based on body_needed_for_clonning

   All symbols that enter the queue have AUX pointer non-zero and are in the
   boundary.  Pointer set REACHABLE is used to track reachable symbols.

   Every symbol can be visited twice - once as part of boundary and once
   as real reachable symbol. enqueue_node needs to decide whether the
   node needs to be re-queued for second processing.  For this purpose
   we set AUX pointer of processed symbols in the boundary to constant 2.  */
303 304

bool
305
symbol_table::remove_unreachable_nodes (FILE *file)
306
{
307
  symtab_node *first = (symtab_node *) (void *) 1;
308
  struct cgraph_node *node, *next;
309
  varpool_node *vnode, *vnext;
310
  bool changed = false;
311 312 313
  hash_set<symtab_node *> reachable;
  hash_set<tree> body_needed_for_clonning;
  hash_set<void *> reachable_call_targets;
314 315
  bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
					    : IPA_SSA_AFTER_INLINING);
316

317
  timevar_push (TV_IPA_UNREACHABLE);
318
  build_type_inheritance_graph ();
319 320
  if (file)
    fprintf (file, "\nReclaiming functions:");
321 322 323 324 325 326 327
  if (flag_checking)
    {
      FOR_EACH_FUNCTION (node)
	gcc_assert (!node->aux);
      FOR_EACH_VARIABLE (vnode)
	gcc_assert (!vnode->aux);
    }
328 329 330 331
  /* Mark functions whose bodies are obviously needed.
     This is mostly when they can be referenced externally.  Inline clones
     are special since their declarations are shared with master clone and thus
     cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
332 333 334
  FOR_EACH_FUNCTION (node)
    {
      node->used_as_abstract_origin = false;
335
      node->indirect_call_target = false;
336
      if (node->definition
337
	  && !node->global.inlined_to
338
	  && !node->in_other_partition
Martin Liska committed
339
	  && !node->can_remove_if_no_direct_calls_and_refs_p ())
340 341
	{
	  gcc_assert (!node->global.inlined_to);
342 343
	  reachable.add (node);
	  enqueue_node (node, &first, &reachable);
344 345
	}
      else
346
	gcc_assert (!node->aux);
347
     }
348 349

  /* Mark variables that are obviously needed.  */
350
  FOR_EACH_DEFINED_VARIABLE (vnode)
Martin Liska committed
351
    if (!vnode->can_remove_if_no_refs_p()
352
	&& !vnode->in_other_partition)
353
      {
354 355
	reachable.add (vnode);
	enqueue_node (vnode, &first, &reachable);
356 357 358
      }

  /* Perform reachability analysis.  */
359
  while (first != (symtab_node *) (void *) 1)
360
    {
361
      bool in_boundary_p = !reachable.contains (first);
362
      symtab_node *node = first;
363

364
      first = (symtab_node *)first->aux;
365

366 367 368
      /* If we are processing symbol in boundary, mark its AUX pointer for
	 possible later re-processing in enqueue_node.  */
      if (in_boundary_p)
369 370 371 372 373
	{
	  node->aux = (void *)2;
	  if (node->alias && node->analyzed)
	    enqueue_node (node->get_alias_target (), &first, &reachable);
	}
374 375
      else
	{
376 377
	  if (TREE_CODE (node->decl) == FUNCTION_DECL
	      && DECL_ABSTRACT_ORIGIN (node->decl))
378 379
	    {
	      struct cgraph_node *origin_node
380 381 382 383 384 385 386 387 388 389 390
	      = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
	      if (origin_node && !origin_node->used_as_abstract_origin)
		{
	          origin_node->used_as_abstract_origin = true;
		  gcc_assert (!origin_node->prev_sibling_clone);
		  gcc_assert (!origin_node->next_sibling_clone);
		  for (cgraph_node *n = origin_node->clones; n;
		       n = n->next_sibling_clone)
		    if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
		      n->used_as_abstract_origin = true;
		}
391
	    }
392
	  /* If any symbol in a comdat group is reachable, force
393 394 395
	     all externally visible symbols in the same comdat
	     group to be reachable as well.  Comdat-local symbols
	     can be discarded if all uses were inlined.  */
396
	  if (node->same_comdat_group)
397
	    {
398
	      symtab_node *next;
399
	      for (next = node->same_comdat_group;
400
		   next != node;
401
		   next = next->same_comdat_group)
Martin Liska committed
402
		if (!next->comdat_local_p ()
403 404
		    && !reachable.add (next))
		  enqueue_node (next, &first, &reachable);
405 406
	    }
	  /* Mark references as reachable.  */
407
	  process_references (node, &first, before_inlining_p, &reachable);
408
	}
409

410
      if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
411
	{
412 413 414
	  /* Mark the callees reachable unless they are direct calls to extern
 	     inline functions we decided to not inline.  */
	  if (!in_boundary_p)
415
	    {
416
	      struct cgraph_edge *e;
417
	      /* Keep alive possible targets for devirtualization.  */
418 419
	      if (opt_for_fn (cnode->decl, optimize)
		  && opt_for_fn (cnode->decl, flag_devirtualize))
420 421 422 423 424 425
		{
		  struct cgraph_edge *next;
		  for (e = cnode->indirect_calls; e; e = next)
		    {
		      next = e->next_callee;
		      if (e->indirect_info->polymorphic)
426 427
			walk_polymorphic_call_targets (&reachable_call_targets,
						       e, &first, &reachable,
428 429 430
						       before_inlining_p);
		    }
		}
431
	      for (e = cnode->callees; e; e = e->next_callee)
432
		{
433
	          symtab_node *body = e->callee->function_symbol ();
434 435
		  if (e->callee->definition
		      && !e->callee->in_other_partition
436
		      && (!e->inline_failed
437 438
			  || !DECL_EXTERNAL (e->callee->decl)
			  || e->callee->alias
439 440 441 442 443 444
			  || (before_inlining_p
			      && (opt_for_fn (body->decl, optimize)
		                  || (symtab->state < IPA_SSA
		                      && lookup_attribute
				          ("always_inline",
				           DECL_ATTRIBUTES (body->decl)))))))
445 446 447 448 449 450
		    {
		      /* Be sure that we will not optimize out alias target
			 body.  */
		      if (DECL_EXTERNAL (e->callee->decl)
			  && e->callee->alias
			  && before_inlining_p)
451
			reachable.add (body);
452
		      reachable.add (e->callee);
453
		    }
454
		  enqueue_node (e->callee, &first, &reachable);
455
		}
456 457 458

	      /* When inline clone exists, mark body to be preserved so when removing
		 offline copy of the function we don't kill it.  */
459
	      if (cnode->global.inlined_to)
460
	        body_needed_for_clonning.add (cnode->decl);
461

462 463 464 465 466 467 468 469 470 471 472 473 474 475
	      /* For instrumentation clones we always need original
		 function node for proper LTO privatization.  */
	      if (cnode->instrumentation_clone
		  && cnode->definition)
		{
		  gcc_assert (cnode->instrumented_version || in_lto_p);
		  if (cnode->instrumented_version)
		    {
		      enqueue_node (cnode->instrumented_version, &first,
				    &reachable);
		      reachable.add (cnode->instrumented_version);
		    }
		}

476 477 478 479
	      /* For non-inline clones, force their origins to the boundary and ensure
		 that body is not removed.  */
	      while (cnode->clone_of)
		{
480
		  bool noninline = cnode->clone_of->decl != cnode->decl;
481 482 483
		  cnode = cnode->clone_of;
		  if (noninline)
		    {
484 485
		      body_needed_for_clonning.add (cnode->decl);
		      enqueue_node (cnode, &first, &reachable);
486
		    }
487
		}
488 489

	    }
490 491
	  else if (cnode->thunk.thunk_p)
	    enqueue_node (cnode->callees->callee, &first, &reachable);
492

493 494 495 496 497 498 499 500 501
	  /* If any reachable function has simd clones, mark them as
	     reachable as well.  */
	  if (cnode->simd_clones)
	    {
	      cgraph_node *next;
	      for (next = cnode->simd_clones;
		   next;
		   next = next->simdclone->next_clone)
		if (in_boundary_p
502 503
		    || !reachable.add (next))
		  enqueue_node (next, &first, &reachable);
504
	    }
505
	}
506
      /* When we see constructor of external variable, keep referred nodes in the
507 508
	boundary.  This will also hold initializers of the external vars NODE
	refers to.  */
509
      varpool_node *vnode = dyn_cast <varpool_node *> (node);
510
      if (vnode
511 512
	  && DECL_EXTERNAL (node->decl)
	  && !vnode->alias
513
	  && in_boundary_p)
514
	{
Martin Liska committed
515 516
	  struct ipa_ref *ref = NULL;
	  for (int i = 0; node->iterate_reference (i, ref); i++)
517
	    enqueue_node (ref->referred, &first, &reachable);
518
	}
519 520
    }

521
  /* Remove unreachable functions.   */
Martin Liska committed
522
  for (node = first_function (); node; node = next)
523
    {
Martin Liska committed
524
      next = next_function (node);
525 526

      /* If node is not needed at all, remove it.  */
527
      if (!node->aux)
528
	{
529
	  if (file)
530
	    fprintf (file, " %s/%i", node->name (), node->order);
Martin Liska committed
531
	  node->remove ();
532 533
	  changed = true;
	}
534
      /* If node is unreachable, remove its body.  */
535
      else if (!reachable.contains (node))
536
        {
537 538 539 540 541 542 543
	  /* We keep definitions of thunks and aliases in the boundary so
	     we can walk to the ultimate alias targets and function symbols
	     reliably.  */
	  if (node->alias || node->thunk.thunk_p)
	    ;
	  else if (!body_needed_for_clonning.contains (node->decl)
	      && !node->alias && !node->thunk.thunk_p)
Martin Liska committed
544
	    node->release_body ();
545
	  else if (!node->clone_of)
546
	    gcc_assert (in_lto_p || DECL_RESULT (node->decl));
547
	  if (node->definition && !node->alias && !node->thunk.thunk_p)
548
	    {
549
	      if (file)
550
		fprintf (file, " %s/%i", node->name (), node->order);
551
	      node->body_removed = true;
552 553 554 555
	      node->analyzed = false;
	      node->definition = false;
	      node->cpp_implicit_alias = false;
	      node->alias = false;
556
	      node->transparent_alias = false;
557
	      node->thunk.thunk_p = false;
558
	      node->weakref = false;
559 560 561 562 563 564
	      /* After early inlining we drop always_inline attributes on
		 bodies of functions that are still referenced (have their
		 address taken).  */
	      DECL_ATTRIBUTES (node->decl)
		= remove_attribute ("always_inline",
				    DECL_ATTRIBUTES (node->decl));
565
	      if (!node->in_other_partition)
566
		node->local.local = false;
Martin Liska committed
567
	      node->remove_callees ();
Martin Liska committed
568
	      node->remove_all_references ();
569
	      changed = true;
Ilya Enkovich committed
570 571 572 573 574 575
	      if (node->thunk.thunk_p
		  && node->thunk.add_pointer_bounds_args)
		{
		  node->thunk.thunk_p = false;
		  node->thunk.add_pointer_bounds_args = false;
		}
576
	    }
577
	}
578
      else
Martin Liska committed
579
	gcc_assert (node->clone_of || !node->has_gimple_body_p ()
580
		    || in_lto_p || DECL_RESULT (node->decl));
581
    }
582 583 584 585

  /* Inline clones might be kept around so their materializing allows further
     cloning.  If the function the clone is inlined into is removed, we need
     to turn it into normal cone.  */
586
  FOR_EACH_FUNCTION (node)
587 588 589 590 591
    {
      if (node->global.inlined_to
	  && !node->callers)
	{
	  gcc_assert (node->clones);
592 593
	  node->global.inlined_to = NULL;
	  update_inlined_to_pointer (node, node);
594
	}
595
      node->aux = NULL;
596
    }
597

598
  /* Remove unreachable variables.  */
599
  if (file)
600
    fprintf (file, "\nReclaiming variables:");
Martin Liska committed
601
  for (vnode = first_variable (); vnode; vnode = vnext)
602
    {
Martin Liska committed
603
      vnext = next_variable (vnode);
604
      if (!vnode->aux
605 606 607
	  /* For can_refer_decl_in_current_unit_p we want to track for
	     all external variables if they are defined in other partition
	     or not.  */
608
	  && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
609
	{
610 611 612 613 614 615 616 617 618 619 620
	  struct ipa_ref *ref = NULL;

	  /* First remove the aliases, so varpool::remove can possibly lookup
	     the constructor and save it for future use.  */
	  while (vnode->iterate_direct_aliases (0, ref))
	    {
	      if (file)
		fprintf (file, " %s/%i", ref->referred->name (),
			 ref->referred->order);
	      ref->referring->remove ();
	    }
621
	  if (file)
622
	    fprintf (file, " %s/%i", vnode->name (), vnode->order);
623
          vnext = next_variable (vnode);
Martin Liska committed
624
	  vnode->remove ();
625
	  changed = true;
626
	}
627
      else if (!reachable.contains (vnode) && !vnode->alias)
628
        {
629
	  tree init;
630
	  if (vnode->definition)
631 632
	    {
	      if (file)
633
		fprintf (file, " %s", vnode->name ());
634 635
	      changed = true;
	    }
636
	  /* Keep body if it may be useful for constant folding.  */
Ilya Enkovich committed
637 638
	  if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
	      && !POINTER_BOUNDS_P (vnode->decl))
639 640 641
	    vnode->remove_initializer ();
	  else
	    DECL_INITIAL (vnode->decl) = init;
642
	  vnode->body_removed = true;
643 644 645
	  vnode->definition = false;
	  vnode->analyzed = false;
	  vnode->aux = NULL;
646

Martin Liska committed
647
	  vnode->remove_from_same_comdat_group ();
648

Martin Liska committed
649
	  vnode->remove_all_references ();
650 651
	}
      else
652
	vnode->aux = NULL;
653
    }
654

655
  /* Now update address_taken flags and try to promote functions to be local.  */
656 657
  if (file)
    fprintf (file, "\nClearing address taken flags:");
658
  FOR_EACH_DEFINED_FUNCTION (node)
659 660
    if (node->address_taken
	&& !node->used_from_other_partition)
661
      {
662
	if (!node->call_for_symbol_and_aliases
Ilya Enkovich committed
663 664 665 666
	    (has_addr_references_p, NULL, true)
	    && (!node->instrumentation_clone
		|| !node->instrumented_version
		|| !node->instrumented_version->address_taken))
667 668
	  {
	    if (file)
669
	      fprintf (file, " %s", node->name ());
670
	    node->address_taken = false;
671
	    changed = true;
672 673 674 675 676 677 678 679
	    if (node->local_p ()
		/* Virtual functions may be kept in cgraph just because
		   of possible later devirtualization.  Do not mark them as
		   local too early so we won't optimize them out before
		   we are done with polymorphic call analysis.  */
		&& (!before_inlining_p
		    || !node->call_for_symbol_and_aliases
		       (is_indirect_call_target_p, NULL, true)))
680 681 682 683 684
	      {
		node->local.local = true;
		if (file)
		  fprintf (file, " (local)");
	      }
685 686
	  }
      }
687 688
  if (file)
    fprintf (file, "\n");
689

690
  symtab_node::checking_verify_symtab_nodes ();
Diego Novillo committed
691

692
  /* If we removed something, perhaps profile could be improved.  */
693
  if (changed && optimize && inline_edge_summary_vec.exists ())
694
    FOR_EACH_DEFINED_FUNCTION (node)
695
      ipa_propagate_frequency (node);
696

697
  timevar_pop (TV_IPA_UNREACHABLE);
698 699
  return changed;
}
700

701 702 703 704 705 706 707 708 709 710 711 712
/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
   as needed, also clear EXPLICIT_REFS if the references to given variable
   do not need to be explicit.  */

void
process_references (varpool_node *vnode,
		    bool *written, bool *address_taken,
		    bool *read, bool *explicit_refs)
{
  int i;
  struct ipa_ref *ref;

Martin Liska committed
713
  if (!vnode->all_refs_explicit_p ()
714 715 716
      || TREE_THIS_VOLATILE (vnode->decl))
    *explicit_refs = false;

Martin Liska committed
717
  for (i = 0; vnode->iterate_referring (i, ref)
718 719 720 721 722 723 724 725 726 727 728 729 730
	      && *explicit_refs && (!*written || !*address_taken || !*read); i++)
    switch (ref->use)
      {
      case IPA_REF_ADDR:
	*address_taken = true;
	break;
      case IPA_REF_LOAD:
	*read = true;
	break;
      case IPA_REF_STORE:
	*written = true;
	break;
      case IPA_REF_ALIAS:
Martin Liska committed
731 732
	process_references (dyn_cast<varpool_node *> (ref->referring), written,
			    address_taken, read, explicit_refs);
733
	break;
Ilya Enkovich committed
734 735
      case IPA_REF_CHKP:
	gcc_unreachable ();
736 737 738 739 740 741 742 743 744 745 746 747 748 749 750
      }
}

/* Set TREE_READONLY bit.  */

bool
set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
{
  TREE_READONLY (vnode->decl) = true;
  return false;
}

/* Set writeonly bit and clear the initalizer, since it will not be needed.  */

bool
751
set_writeonly_bit (varpool_node *vnode, void *data)
752 753 754 755 756 757
{
  vnode->writeonly = true;
  if (optimize)
    {
      DECL_INITIAL (vnode->decl) = NULL;
      if (!vnode->alias)
758 759 760 761 762
	{
	  if (vnode->num_references ())
	    *(bool *)data = true;
	  vnode->remove_all_references ();
	}
763 764 765 766 767 768 769 770 771 772 773 774 775 776
    }
  return false;
}

/* Clear addressale bit of VNODE.  */

bool
clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
{
  vnode->address_taken = false;
  TREE_ADDRESSABLE (vnode->decl) = 0;
  return false;
}

777 778 779
/* Discover variables that have no longer address taken or that are read only
   and update their flags.

780 781
   Return true when unreachable symbol removan should be done.

782 783
   FIXME: This can not be done in between gimplify and omp_expand since
   readonly flag plays role on what is shared and what is not.  Currently we do
784 785 786
   this transformation as part of whole program visibility and re-do at
   ipa-reference pass (to take into account clonning), but it would
   make sense to do it before early optimizations.  */
787

788
bool
789 790
ipa_discover_readonly_nonaddressable_vars (void)
{
791
  bool remove_p = false;
792
  varpool_node *vnode;
793 794
  if (dump_file)
    fprintf (dump_file, "Clearing variable flags:");
795
  FOR_EACH_VARIABLE (vnode)
796
    if (!vnode->alias
797
	&& (TREE_ADDRESSABLE (vnode->decl)
798
	    || !vnode->writeonly
799
	    || !TREE_READONLY (vnode->decl)))
800 801 802
      {
	bool written = false;
	bool address_taken = false;
803 804 805
	bool read = false;
	bool explicit_refs = true;

806 807
	process_references (vnode, &written, &address_taken, &read,
			    &explicit_refs);
808 809 810
	if (!explicit_refs)
	  continue;
	if (!address_taken)
811
	  {
812
	    if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
813
	      fprintf (dump_file, " %s (non-addressable)", vnode->name ());
814 815
	    vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
					        true);
816
	  }
817
	if (!address_taken && !written
818 819 820
	    /* Making variable in explicit section readonly can cause section
	       type conflict. 
	       See e.g. gcc.c-torture/compile/pr23237.c */
821
	    && vnode->get_section () == NULL)
822
	  {
823
	    if (!TREE_READONLY (vnode->decl) && dump_file)
824
	      fprintf (dump_file, " %s (read-only)", vnode->name ());
825
	    vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
826
	  }
827
	if (!vnode->writeonly && !read && !address_taken && written)
828 829 830
	  {
	    if (dump_file)
	      fprintf (dump_file, " %s (write-only)", vnode->name ());
831 832
	    vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p, 
					        true);
833 834 835 836
	  }
      }
  if (dump_file)
    fprintf (dump_file, "\n");
837
  return remove_p;
838 839
}

840 841
/* Free inline summary.  */

842 843 844
namespace {

const pass_data pass_data_ipa_free_inline_summary =
845
{
846
  SIMPLE_IPA_PASS, /* type */
847
  "free-inline-summary", /* name */
848 849 850 851 852 853
  OPTGROUP_NONE, /* optinfo_flags */
  TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
854 855 856 857
  /* Early optimizations may make function unreachable.  We can not
     remove unreachable functions as part of the ealry opts pass because
     TODOs are run before subpasses.  Do it here.  */
  ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
858 859
};

860
class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
861 862
{
public:
863 864
  pass_ipa_free_inline_summary (gcc::context *ctxt)
    : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
865 866 867
  {}

  /* opt_pass methods: */
868 869 870 871 872
  virtual unsigned int execute (function *)
    {
      inline_free_summary ();
      return 0;
    }
873 874 875

}; // class pass_ipa_free_inline_summary

876 877
} // anon namespace

878 879 880 881 882 883
simple_ipa_opt_pass *
make_pass_ipa_free_inline_summary (gcc::context *ctxt)
{
  return new pass_ipa_free_inline_summary (ctxt);
}

884
/* Generate and emit a static constructor or destructor.  WHICH must
Ilya Enkovich committed
885 886 887 888 889
   be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
   (for chp static vars constructor) or 'B' (for chkp static bounds
   constructor).  BODY is a STATEMENT_LIST containing GENERIC
   statements.  PRIORITY is the initialization priority for this
   constructor or destructor.
890

891 892 893 894 895
   FINAL specify whether the externally visible name for collect2 should
   be produced. */

static void
cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
896 897 898 899 900 901 902 903
{
  static int counter = 0;
  char which_buf[16];
  tree decl, name, resdecl;

  /* The priority is encoded in the constructor or destructor name.
     collect2 will sort the names and arrange that they are called at
     program startup.  */
904 905 906 907 908 909
  if (final)
    sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
  else
  /* Proudce sane name but one not recognizable by collect2, just for the
     case we fail to inline the function.  */
    sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926
  name = get_file_function_name (which_buf);

  decl = build_decl (input_location, FUNCTION_DECL, name,
		     build_function_type_list (void_type_node, NULL_TREE));
  current_function_decl = decl;

  resdecl = build_decl (input_location,
			RESULT_DECL, NULL_TREE, void_type_node);
  DECL_ARTIFICIAL (resdecl) = 1;
  DECL_RESULT (decl) = resdecl;
  DECL_CONTEXT (resdecl) = decl;

  allocate_struct_function (decl, false);

  TREE_STATIC (decl) = 1;
  TREE_USED (decl) = 1;
  DECL_ARTIFICIAL (decl) = 1;
927
  DECL_IGNORED_P (decl) = 1;
928 929
  DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
  DECL_SAVED_TREE (decl) = body;
930
  if (!targetm.have_ctors_dtors && final)
931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948
    {
      TREE_PUBLIC (decl) = 1;
      DECL_PRESERVE_P (decl) = 1;
    }
  DECL_UNINLINABLE (decl) = 1;

  DECL_INITIAL (decl) = make_node (BLOCK);
  TREE_USED (DECL_INITIAL (decl)) = 1;

  DECL_SOURCE_LOCATION (decl) = input_location;
  cfun->function_end_locus = input_location;

  switch (which)
    {
    case 'I':
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
      decl_init_priority_insert (decl, priority);
      break;
Ilya Enkovich committed
949 950 951 952 953 954 955 956 957 958 959 960 961 962
    case 'P':
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
      DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
					  NULL,
					  NULL_TREE);
      decl_init_priority_insert (decl, priority);
      break;
    case 'B':
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
      DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
					  NULL,
					  NULL_TREE);
      decl_init_priority_insert (decl, priority);
      break;
963 964 965 966 967 968 969 970 971 972
    case 'D':
      DECL_STATIC_DESTRUCTOR (decl) = 1;
      decl_fini_priority_insert (decl, priority);
      break;
    default:
      gcc_unreachable ();
    }

  gimplify_function_tree (decl);

Martin Liska committed
973
  cgraph_node::add_new_function (decl, false);
974 975 976 977 978

  set_cfun (NULL);
  current_function_decl = NULL;
}

979
/* Generate and emit a static constructor or destructor.  WHICH must
Ilya Enkovich committed
980 981 982 983 984
   be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
   (for chkp static vars constructor) or 'B' (for chkp static bounds
   constructor).  BODY is a STATEMENT_LIST containing GENERIC
   statements.  PRIORITY is the initialization priority for this
   constructor or destructor.  */
985 986 987 988 989 990

void
cgraph_build_static_cdtor (char which, tree body, int priority)
{
  cgraph_build_static_cdtor_1 (which, body, priority, false);
}
991 992

/* A vector of FUNCTION_DECLs declared as static constructors.  */
993
static vec<tree> static_ctors;
994
/* A vector of FUNCTION_DECLs declared as static destructors.  */
995
static vec<tree> static_dtors;
996 997 998 999 1000 1001 1002 1003 1004 1005 1006

/* When target does not have ctors and dtors, we call all constructor
   and destructor by special initialization/destruction function
   recognized by collect2.

   When we are going to build this function, collect all constructors and
   destructors and turn them into normal functions.  */

static void
record_cdtor_fn (struct cgraph_node *node)
{
1007 1008 1009 1010
  if (DECL_STATIC_CONSTRUCTOR (node->decl))
    static_ctors.safe_push (node->decl);
  if (DECL_STATIC_DESTRUCTOR (node->decl))
    static_dtors.safe_push (node->decl);
Martin Liska committed
1011
  node = cgraph_node::get (node->decl);
1012
  DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
1013 1014 1015 1016 1017 1018 1019 1020
}

/* Define global constructors/destructor functions for the CDTORS, of
   which they are LEN.  The CDTORS are sorted by initialization
   priority.  If CTOR_P is true, these are constructors; otherwise,
   they are destructors.  */

static void
1021
build_cdtor (bool ctor_p, vec<tree> cdtors)
1022 1023
{
  size_t i,j;
1024
  size_t len = cdtors.length ();
1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038

  i = 0;
  while (i < len)
    {
      tree body;
      tree fn;
      priority_type priority;

      priority = 0;
      body = NULL_TREE;
      j = i;
      do
	{
	  priority_type p;
1039
	  fn = cdtors[j];
1040 1041 1042 1043 1044 1045 1046 1047 1048
	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
	  if (j == i)
	    priority = p;
	  else if (p != priority)
	    break;
	  j++;
	}
      while (j < len);

1049
      /* When there is only one cdtor and target supports them, do nothing.  */
1050 1051 1052 1053 1054 1055 1056 1057
      if (j == i + 1
	  && targetm.have_ctors_dtors)
	{
	  i++;
	  continue;
	}
      /* Find the next batch of constructors/destructors with the same
	 initialization priority.  */
1058
      for (;i < j; i++)
1059 1060
	{
	  tree call;
1061
	  fn = cdtors[i];
1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075
	  call = build_call_expr (fn, 0);
	  if (ctor_p)
	    DECL_STATIC_CONSTRUCTOR (fn) = 0;
	  else
	    DECL_STATIC_DESTRUCTOR (fn) = 0;
	  /* We do not want to optimize away pure/const calls here.
	     When optimizing, these should be already removed, when not
	     optimizing, we want user to be able to breakpoint in them.  */
	  TREE_SIDE_EFFECTS (call) = 1;
	  append_to_statement_list (call, &body);
	}
      gcc_assert (body != NULL_TREE);
      /* Generate a function to call all the function of like
	 priority.  */
1076
      cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139
    }
}

/* Comparison function for qsort.  P1 and P2 are actually of type
   "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
   used to determine the sort order.  */

static int
compare_ctor (const void *p1, const void *p2)
{
  tree f1;
  tree f2;
  int priority1;
  int priority2;

  f1 = *(const tree *)p1;
  f2 = *(const tree *)p2;
  priority1 = DECL_INIT_PRIORITY (f1);
  priority2 = DECL_INIT_PRIORITY (f2);

  if (priority1 < priority2)
    return -1;
  else if (priority1 > priority2)
    return 1;
  else
    /* Ensure a stable sort.  Constructors are executed in backwarding
       order to make LTO initialize braries first.  */
    return DECL_UID (f2) - DECL_UID (f1);
}

/* Comparison function for qsort.  P1 and P2 are actually of type
   "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
   used to determine the sort order.  */

static int
compare_dtor (const void *p1, const void *p2)
{
  tree f1;
  tree f2;
  int priority1;
  int priority2;

  f1 = *(const tree *)p1;
  f2 = *(const tree *)p2;
  priority1 = DECL_FINI_PRIORITY (f1);
  priority2 = DECL_FINI_PRIORITY (f2);

  if (priority1 < priority2)
    return -1;
  else if (priority1 > priority2)
    return 1;
  else
    /* Ensure a stable sort.  */
    return DECL_UID (f1) - DECL_UID (f2);
}

/* Generate functions to call static constructors and destructors
   for targets that do not support .ctors/.dtors sections.  These
   functions have magic names which are detected by collect2.  */

static void
build_cdtor_fns (void)
{
1140
  if (!static_ctors.is_empty ())
1141 1142
    {
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1143
      static_ctors.qsort (compare_ctor);
1144
      build_cdtor (/*ctor_p=*/true, static_ctors);
1145 1146
    }

1147
  if (!static_dtors.is_empty ())
1148 1149
    {
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1150
      static_dtors.qsort (compare_dtor);
1151
      build_cdtor (/*ctor_p=*/false, static_dtors);
1152 1153 1154 1155 1156
    }
}

/* Look for constructors and destructors and produce function calling them.
   This is needed for targets not supporting ctors or dtors, but we perform the
Joseph Myers committed
1157
   transformation also at linktime to merge possibly numerous
1158 1159 1160 1161 1162 1163 1164
   constructors/destructors into single function to improve code locality and
   reduce size.  */

static unsigned int
ipa_cdtor_merge (void)
{
  struct cgraph_node *node;
1165
  FOR_EACH_DEFINED_FUNCTION (node)
1166 1167
    if (DECL_STATIC_CONSTRUCTOR (node->decl)
	|| DECL_STATIC_DESTRUCTOR (node->decl))
1168 1169
       record_cdtor_fn (node);
  build_cdtor_fns ();
1170 1171
  static_ctors.release ();
  static_dtors.release ();
1172 1173 1174
  return 0;
}

1175 1176 1177
namespace {

const pass_data pass_data_ipa_cdtor_merge =
1178
{
1179 1180 1181 1182 1183 1184 1185 1186 1187
  IPA_PASS, /* type */
  "cdtor", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CGRAPHOPT, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
1188
};
1189

1190
class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1191 1192
{
public:
1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203
  pass_ipa_cdtor_merge (gcc::context *ctxt)
    : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
		      NULL, /* generate_summary */
		      NULL, /* write_summary */
		      NULL, /* read_summary */
		      NULL, /* write_optimization_summary */
		      NULL, /* read_optimization_summary */
		      NULL, /* stmt_fixup */
		      0, /* function_transform_todo_flags_start */
		      NULL, /* function_transform */
		      NULL) /* variable_transform */
1204 1205 1206
  {}

  /* opt_pass methods: */
1207
  virtual bool gate (function *);
1208
  virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1209 1210 1211

}; // class pass_ipa_cdtor_merge

1212 1213 1214 1215 1216 1217 1218 1219 1220
bool
pass_ipa_cdtor_merge::gate (function *)
{
  /* Perform the pass when we have no ctors/dtors support
     or at LTO time to merge multiple constructors into single
     function.  */
  return !targetm.have_ctors_dtors || (optimize && in_lto_p);
}

1221 1222
} // anon namespace

1223 1224 1225 1226 1227
ipa_opt_pass_d *
make_pass_ipa_cdtor_merge (gcc::context *ctxt)
{
  return new pass_ipa_cdtor_merge (ctxt);
}
1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244

/* Invalid pointer representing BOTTOM for single user dataflow.  */
#define BOTTOM ((cgraph_node *)(size_t) 2)

/* Meet operation for single user dataflow.
   Here we want to associate variables with sigle function that may access it.

   FUNCTION is current single user of a variable, VAR is variable that uses it.
   Latttice is stored in SINGLE_USER_MAP.

   We represent: 
    - TOP by no entry in SIGNLE_USER_MAP
    - BOTTOM by BOTTOM in AUX pointer (to save lookups)
    - known single user by cgraph pointer in SINGLE_USER_MAP.  */

cgraph_node *
meet (cgraph_node *function, varpool_node *var,
Trevor Saunders committed
1245
       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1246 1247 1248 1249 1250 1251
{
  struct cgraph_node *user, **f;

  if (var->aux == BOTTOM)
    return BOTTOM;

Trevor Saunders committed
1252
  f = single_user_map.get (var);
1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270
  if (!f)
    return function;
  user = *f;
  if (!function)
    return user;
  else if (function != user)
    return BOTTOM;
  else
    return function;
}

/* Propagation step of single-use dataflow.

   Check all uses of VNODE and see if they are used by single function FUNCTION.
   SINGLE_USER_MAP represents the dataflow lattice.  */

cgraph_node *
propagate_single_user (varpool_node *vnode, cgraph_node *function,
Trevor Saunders committed
1271
		       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1272 1273 1274 1275 1276 1277 1278 1279
{
  int i;
  struct ipa_ref *ref;

  gcc_assert (!vnode->externally_visible);

  /* If node is an alias, first meet with its target.  */
  if (vnode->alias)
Martin Liska committed
1280
    function = meet (function, vnode->get_alias_target (), single_user_map);
1281 1282

  /* Check all users and see if they correspond to a single function.  */
Martin Liska committed
1283
  for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295
    {
      struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
      if (cnode)
	{
	  if (cnode->global.inlined_to)
	    cnode = cnode->global.inlined_to;
	  if (!function)
	    function = cnode;
	  else if (function != cnode)
	    function = BOTTOM;
	}
      else
1296 1297
	function = meet (function, dyn_cast <varpool_node *> (ref->referring),
			 single_user_map);
1298 1299 1300 1301 1302
    }
  return function;
}

/* Pass setting used_by_single_function flag.
1303 1304
   This flag is set on variable when there is only one function that may
   possibly referr to it.  */
1305 1306 1307 1308 1309 1310

static unsigned int
ipa_single_use (void)
{
  varpool_node *first = (varpool_node *) (void *) 1;
  varpool_node *var;
Trevor Saunders committed
1311
  hash_map<varpool_node *, cgraph_node *> single_user_map;
1312 1313

  FOR_EACH_DEFINED_VARIABLE (var)
Martin Liska committed
1314
    if (!var->all_refs_explicit_p ())
1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331
      var->aux = BOTTOM;
    else
      {
	/* Enqueue symbol for dataflow.  */
        var->aux = first;
	first = var;
      }

  /* The actual dataflow.  */

  while (first != (void *) 1)
    {
      cgraph_node *user, *orig_user, **f;

      var = first;
      first = (varpool_node *)first->aux;

Trevor Saunders committed
1332
      f = single_user_map.get (var);
1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346
      if (f)
	orig_user = *f;
      else
	orig_user = NULL;
      user = propagate_single_user (var, orig_user, single_user_map);

      gcc_checking_assert (var->aux != BOTTOM);

      /* If user differs, enqueue all references.  */
      if (user != orig_user)
	{
	  unsigned int i;
	  ipa_ref *ref;

Trevor Saunders committed
1347
	  single_user_map.put (var, user);
1348 1349

	  /* Enqueue all aliases for re-processing.  */
1350 1351
	  for (i = 0; var->iterate_direct_aliases (i, ref); i++)
	    if (!ref->referring->aux)
1352 1353 1354 1355 1356
	      {
		ref->referring->aux = first;
		first = dyn_cast <varpool_node *> (ref->referring);
	      }
	  /* Enqueue all users for re-processing.  */
Martin Liska committed
1357
	  for (i = 0; var->iterate_reference (i, ref); i++)
1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379
	    if (!ref->referred->aux
	        && ref->referred->definition
		&& is_a <varpool_node *> (ref->referred))
	      {
		ref->referred->aux = first;
		first = dyn_cast <varpool_node *> (ref->referred);
	      }

	  /* If user is BOTTOM, just punt on this var.  */
	  if (user == BOTTOM)
	    var->aux = BOTTOM;
	  else
	    var->aux = NULL;
	}
      else
	var->aux = NULL;
    }

  FOR_EACH_DEFINED_VARIABLE (var)
    {
      if (var->aux != BOTTOM)
	{
1380 1381 1382 1383
	  /* Not having the single user known means that the VAR is
	     unreachable.  Either someone forgot to remove unreachable
	     variables or the reachability here is wrong.  */

1384 1385
	  gcc_checking_assert (single_user_map.get (var));

1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397
	  if (dump_file)
	    {
	      fprintf (dump_file, "Variable %s/%i is used by single function\n",
		       var->name (), var->order);
	    }
	  var->used_by_single_function = true;
	}
      var->aux = NULL;
    }
  return 0;
}

1398 1399 1400
namespace {

const pass_data pass_data_ipa_single_use =
1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412
{
  IPA_PASS, /* type */
  "single-use", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CGRAPHOPT, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
};

1413
class pass_ipa_single_use : public ipa_opt_pass_d
1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440
{
public:
  pass_ipa_single_use (gcc::context *ctxt)
    : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
		      NULL, /* generate_summary */
		      NULL, /* write_summary */
		      NULL, /* read_summary */
		      NULL, /* write_optimization_summary */
		      NULL, /* read_optimization_summary */
		      NULL, /* stmt_fixup */
		      0, /* function_transform_todo_flags_start */
		      NULL, /* function_transform */
		      NULL) /* variable_transform */
  {}

  /* opt_pass methods: */
  virtual bool gate (function *);
  virtual unsigned int execute (function *) { return ipa_single_use (); }

}; // class pass_ipa_single_use

bool
pass_ipa_single_use::gate (function *)
{
  return optimize;
}

1441 1442
} // anon namespace

1443 1444 1445 1446 1447
ipa_opt_pass_d *
make_pass_ipa_single_use (gcc::context *ctxt)
{
  return new pass_ipa_single_use (ctxt);
}