ipa.c 38 KB
Newer Older
1
/* Basic IPA optimizations and utilities.
2
   Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 4 5 6 7

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9 10 11 12 13 14 15 16
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
17 18
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
19 20 21 22 23

#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
24
#include "tree.h"
25 26
#include "calls.h"
#include "stringpool.h"
27
#include "cgraph.h"
28
#include "tree-pass.h"
Trevor Saunders committed
29
#include "hash-map.h"
30 31
#include "pointer-set.h"
#include "gimple-expr.h"
32
#include "gimplify.h"
33
#include "flags.h"
34 35
#include "target.h"
#include "tree-iterator.h"
36
#include "ipa-utils.h"
37
#include "ipa-inline.h"
38 39 40
#include "tree-inline.h"
#include "profile.h"
#include "params.h"
41 42 43 44
#include "internal-fn.h"
#include "tree-ssa-alias.h"
#include "gimple.h"
#include "dbgcnt.h"
45

46 47 48 49 50 51 52 53

/* Return true when NODE has ADDR reference.  */

static bool
has_addr_references_p (struct cgraph_node *node,
		       void *data ATTRIBUTE_UNUSED)
{
  int i;
Martin Liska committed
54
  struct ipa_ref *ref = NULL;
55

Martin Liska committed
56
  for (i = 0; node->iterate_referring (i, ref); i++)
57 58 59 60 61
    if (ref->use == IPA_REF_ADDR)
      return true;
  return false;
}

62 63 64 65 66 67 68 69 70 71 72 73 74 75 76
/* Look for all functions inlined to NODE and update their inlined_to pointers
   to INLINED_TO.  */

static void
update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
{
  struct cgraph_edge *e;
  for (e = node->callees; e; e = e->next_callee)
    if (e->callee->global.inlined_to)
      {
        e->callee->global.inlined_to = inlined_to;
	update_inlined_to_pointer (e->callee, inlined_to);
      }
}

77
/* Add symtab NODE to queue starting at FIRST.
78 79 80 81 82 83

   The queue is linked via AUX pointers and terminated by pointer to 1.
   We enqueue nodes at two occasions: when we find them reachable or when we find
   their bodies needed for further clonning.  In the second case we mark them
   by pointer to 2 after processing so they are re-queue when they become
   reachable.  */
84 85

static void
86
enqueue_node (symtab_node *node, symtab_node **first,
87
	      struct pointer_set_t *reachable)
88
{
89
  /* Node is still in queue; do nothing.  */
90
  if (node->aux && node->aux != (void *) 2)
91 92 93
    return;
  /* Node was already processed as unreachable, re-enqueue
     only if it became reachable now.  */
94
  if (node->aux == (void *)2 && !pointer_set_contains (reachable, node))
95
    return;
96
  node->aux = *first;
97 98 99 100 101 102
  *first = node;
}

/* Process references.  */

static void
Martin Liska committed
103
process_references (symtab_node *snode,
104
		    symtab_node **first,
105 106
		    bool before_inlining_p,
		    struct pointer_set_t *reachable)
107 108
{
  int i;
Martin Liska committed
109 110
  struct ipa_ref *ref = NULL;
  for (i = 0; snode->iterate_reference (i, ref); i++)
111
    {
112
      symtab_node *node = ref->referred;
113

114 115
      if (node->definition && !node->in_other_partition
	  && ((!DECL_EXTERNAL (node->decl) || node->alias)
116 117 118 119
	      || (((before_inlining_p
		    && (cgraph_state < CGRAPH_STATE_IPA_SSA
		        || !lookup_attribute ("always_inline",
					      DECL_ATTRIBUTES (node->decl)))))
120 121 122
		  /* We use variable constructors during late complation for
		     constant folding.  Keep references alive so partitioning
		     knows about potential references.  */
123
		  || (TREE_CODE (node->decl) == VAR_DECL
124
		      && flag_wpa
125
		      && ctor_for_folding (node->decl)
126
		         != error_mark_node))))
127
	pointer_set_insert (reachable, node);
128
      enqueue_node (node, first, reachable);
129 130 131
    }
}

132 133 134 135 136 137 138 139 140 141 142
/* EDGE is an polymorphic call.  If BEFORE_INLINING_P is set, mark
   all its potential targets as reachable to permit later inlining if
   devirtualization happens.  After inlining still keep their declarations
   around, so we can devirtualize to a direct call.

   Also try to make trivial devirutalization when no or only one target is
   possible.  */

static void
walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
			       struct cgraph_edge *edge,
143
			       symtab_node **first,
144 145 146 147 148 149 150 151 152 153 154 155
			       pointer_set_t *reachable, bool before_inlining_p)
{
  unsigned int i;
  void *cache_token;
  bool final;
  vec <cgraph_node *>targets
    = possible_polymorphic_call_targets
	(edge, &final, &cache_token);

  if (!pointer_set_insert (reachable_call_targets,
			   cache_token))
    {
156
      for (i = 0; i < targets.length (); i++)
157 158 159 160 161 162
	{
	  struct cgraph_node *n = targets[i];

	  /* Do not bother to mark virtual methods in anonymous namespace;
	     either we will find use of virtual table defining it, or it is
	     unused.  */
163
	  if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
164
	      && type_in_anonymous_namespace_p
165
		    (method_class_type (TREE_TYPE (n->decl))))
166 167 168 169
	    continue;

	  /* Prior inlining, keep alive bodies of possible targets for
	     devirtualization.  */
170
	   if (n->definition
171 172 173 174
	       && (before_inlining_p
		   && (cgraph_state < CGRAPH_STATE_IPA_SSA
		       || !lookup_attribute ("always_inline",
					     DECL_ATTRIBUTES (n->decl)))))
175 176 177 178 179
	     pointer_set_insert (reachable, n);

	  /* Even after inlining we want to keep the possible targets in the
	     boundary, so late passes can still produce direct call even if
	     the chance for inlining is lost.  */
180
	  enqueue_node (n, first, reachable);
181 182 183 184 185 186 187 188 189
	}
    }

  /* Very trivial devirtualization; when the type is
     final or anonymous (so we know all its derivation)
     and there is only one possible virtual call target,
     make the edge direct.  */
  if (final)
    {
190
      if (targets.length () <= 1 && dbg_cnt (devirt))
191
	{
192
	  cgraph_node *target, *node = edge->caller;
193 194 195 196 197 198
	  if (targets.length () == 1)
	    target = targets[0];
	  else
	    target = cgraph_get_create_node
		       (builtin_decl_implicit (BUILT_IN_UNREACHABLE));

199 200
	  if (dump_enabled_p ())
            {
201
              location_t locus = gimple_location_safe (edge->call_stmt);
202 203 204 205 206 207
              dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
                               "devirtualizing call in %s/%i to %s/%i\n",
                               edge->caller->name (), edge->caller->order,
                               target->name (),
                               target->order);
	    }
208
	  edge = cgraph_make_edge_direct (edge, target);
209
	  if (inline_summary_vec)
210
	    inline_update_overall_summary (node);
211 212
	  else if (edge->call_stmt)
	    cgraph_redirect_edge_call_stmt_to_callee (edge);
213 214 215
	}
    }
}
216

217
/* Perform reachability analysis and reclaim all unreachable nodes.
218 219 220 221 222 223 224 225 226 227 228 229 230 231

   The algorithm is basically mark&sweep but with some extra refinements:

   - reachable extern inline functions needs special handling; the bodies needs
     to stay in memory until inlining in hope that they will be inlined.
     After inlining we release their bodies and turn them into unanalyzed
     nodes even when they are reachable.

     BEFORE_INLINING_P specify whether we are before or after inlining.

   - virtual functions are kept in callgraph even if they seem unreachable in
     hope calls to them will be devirtualized. 

     Again we remove them after inlining.  In late optimization some
232
     devirtualization may happen, but it is not important since we won't inline
233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248
     the call. In theory early opts and IPA should work out all important cases.

   - virtual clones needs bodies of their origins for later materialization;
     this means that we want to keep the body even if the origin is unreachable
     otherwise.  To avoid origin from sitting in the callgraph and being
     walked by IPA passes, we turn them into unanalyzed nodes with body
     defined.

     We maintain set of function declaration where body needs to stay in
     body_needed_for_clonning

     Inline clones represent special case: their declaration match the
     declaration of origin and cgraph_remove_node already knows how to
     reshape callgraph and preserve body when offline copy of function or
     inline clone is being removed.

249 250 251 252 253 254
   - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
     variables with DECL_INITIAL set.  We finalize these and keep reachable
     ones around for constant folding purposes.  After inlining we however
     stop walking their references to let everything static referneced by them
     to be removed when it is otherwise unreachable.

255 256 257 258 259
   We maintain queue of both reachable symbols (i.e. defined symbols that needs
   to stay) and symbols that are in boundary (i.e. external symbols referenced
   by reachable symbols or origins of clones).  The queue is represented
   as linked list by AUX pointer terminated by 1.

260
   At the end we keep all reachable symbols. For symbols in boundary we always
261 262 263 264 265 266 267 268 269 270
   turn definition into a declaration, but we may keep function body around
   based on body_needed_for_clonning

   All symbols that enter the queue have AUX pointer non-zero and are in the
   boundary.  Pointer set REACHABLE is used to track reachable symbols.

   Every symbol can be visited twice - once as part of boundary and once
   as real reachable symbol. enqueue_node needs to decide whether the
   node needs to be re-queued for second processing.  For this purpose
   we set AUX pointer of processed symbols in the boundary to constant 2.  */
271 272

bool
273
symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
274
{
275
  symtab_node *first = (symtab_node *) (void *) 1;
276
  struct cgraph_node *node, *next;
277
  varpool_node *vnode, *vnext;
278
  bool changed = false;
279
  struct pointer_set_t *reachable = pointer_set_create ();
280
  struct pointer_set_t *body_needed_for_clonning = pointer_set_create ();
281
  struct pointer_set_t *reachable_call_targets = pointer_set_create ();
282

283
  timevar_push (TV_IPA_UNREACHABLE);
284 285
  if (optimize && flag_devirtualize)
    build_type_inheritance_graph ();
286 287
  if (file)
    fprintf (file, "\nReclaiming functions:");
288
#ifdef ENABLE_CHECKING
289
  FOR_EACH_FUNCTION (node)
290
    gcc_assert (!node->aux);
291
  FOR_EACH_VARIABLE (vnode)
292
    gcc_assert (!vnode->aux);
293
#endif
294 295 296 297
  /* Mark functions whose bodies are obviously needed.
     This is mostly when they can be referenced externally.  Inline clones
     are special since their declarations are shared with master clone and thus
     cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
298 299 300
  FOR_EACH_FUNCTION (node)
    {
      node->used_as_abstract_origin = false;
301
      if (node->definition
302
	  && !node->global.inlined_to
303
	  && !node->in_other_partition
304
	  && !cgraph_can_remove_if_no_direct_calls_and_refs_p (node))
305 306 307
	{
	  gcc_assert (!node->global.inlined_to);
	  pointer_set_insert (reachable, node);
308
	  enqueue_node (node, &first, reachable);
309 310
	}
      else
311
	gcc_assert (!node->aux);
312
     }
313 314

  /* Mark variables that are obviously needed.  */
315
  FOR_EACH_DEFINED_VARIABLE (vnode)
316
    if (!varpool_can_remove_if_no_refs (vnode)
317
	&& !vnode->in_other_partition)
318 319
      {
	pointer_set_insert (reachable, vnode);
320
	enqueue_node (vnode, &first, reachable);
321 322 323
      }

  /* Perform reachability analysis.  */
324
  while (first != (symtab_node *) (void *) 1)
325
    {
326
      bool in_boundary_p = !pointer_set_contains (reachable, first);
327
      symtab_node *node = first;
328

329
      first = (symtab_node *)first->aux;
330

331 332 333
      /* If we are processing symbol in boundary, mark its AUX pointer for
	 possible later re-processing in enqueue_node.  */
      if (in_boundary_p)
334
	node->aux = (void *)2;
335 336
      else
	{
337 338
	  if (TREE_CODE (node->decl) == FUNCTION_DECL
	      && DECL_ABSTRACT_ORIGIN (node->decl))
339 340
	    {
	      struct cgraph_node *origin_node
341
	      = cgraph_get_create_node (DECL_ABSTRACT_ORIGIN (node->decl));
342
	      origin_node->used_as_abstract_origin = true;
343
	      enqueue_node (origin_node, &first, reachable);
344
	    }
345
	  /* If any symbol in a comdat group is reachable, force
346 347 348
	     all externally visible symbols in the same comdat
	     group to be reachable as well.  Comdat-local symbols
	     can be discarded if all uses were inlined.  */
349
	  if (node->same_comdat_group)
350
	    {
351
	      symtab_node *next;
352
	      for (next = node->same_comdat_group;
353
		   next != node;
354
		   next = next->same_comdat_group)
355 356
		if (!symtab_comdat_local_p (next)
		    && !pointer_set_insert (reachable, next))
357
		  enqueue_node (next, &first, reachable);
358 359
	    }
	  /* Mark references as reachable.  */
Martin Liska committed
360
	  process_references (node, &first, before_inlining_p, reachable);
361
	}
362

363
      if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
364
	{
365 366 367
	  /* Mark the callees reachable unless they are direct calls to extern
 	     inline functions we decided to not inline.  */
	  if (!in_boundary_p)
368
	    {
369
	      struct cgraph_edge *e;
370 371 372 373 374 375 376 377 378 379 380 381 382
	      /* Keep alive possible targets for devirtualization.  */
	      if (optimize && flag_devirtualize)
		{
		  struct cgraph_edge *next;
		  for (e = cnode->indirect_calls; e; e = next)
		    {
		      next = e->next_callee;
		      if (e->indirect_info->polymorphic)
			walk_polymorphic_call_targets (reachable_call_targets,
						       e, &first, reachable,
						       before_inlining_p);
		    }
		}
383
	      for (e = cnode->callees; e; e = e->next_callee)
384
		{
385 386
		  if (e->callee->definition
		      && !e->callee->in_other_partition
387
		      && (!e->inline_failed
388 389
			  || !DECL_EXTERNAL (e->callee->decl)
			  || e->callee->alias
390
			  || before_inlining_p))
391 392 393 394 395 396 397 398 399 400 401 402
		    {
		      /* Be sure that we will not optimize out alias target
			 body.  */
		      if (DECL_EXTERNAL (e->callee->decl)
			  && e->callee->alias
			  && before_inlining_p)
			{
		          pointer_set_insert (reachable,
					      cgraph_function_node (e->callee));
			}
		      pointer_set_insert (reachable, e->callee);
		    }
403
		  enqueue_node (e->callee, &first, reachable);
404
		}
405 406 407

	      /* When inline clone exists, mark body to be preserved so when removing
		 offline copy of the function we don't kill it.  */
408
	      if (cnode->global.inlined_to)
409
	        pointer_set_insert (body_needed_for_clonning, cnode->decl);
410

411 412 413 414
	      /* For non-inline clones, force their origins to the boundary and ensure
		 that body is not removed.  */
	      while (cnode->clone_of)
		{
415
		  bool noninline = cnode->clone_of->decl != cnode->decl;
416 417 418
		  cnode = cnode->clone_of;
		  if (noninline)
		    {
419 420
		      pointer_set_insert (body_needed_for_clonning, cnode->decl);
		      enqueue_node (cnode, &first, reachable);
421
		    }
422
		}
423 424 425 426 427 428 429 430 431 432 433 434 435

	    }
	  /* If any reachable function has simd clones, mark them as
	     reachable as well.  */
	  if (cnode->simd_clones)
	    {
	      cgraph_node *next;
	      for (next = cnode->simd_clones;
		   next;
		   next = next->simdclone->next_clone)
		if (in_boundary_p
		    || !pointer_set_insert (reachable, next))
		  enqueue_node (next, &first, reachable);
436
	    }
437
	}
438
      /* When we see constructor of external variable, keep referred nodes in the
439 440
	boundary.  This will also hold initializers of the external vars NODE
	refers to.  */
441
      varpool_node *vnode = dyn_cast <varpool_node *> (node);
442
      if (vnode
443 444
	  && DECL_EXTERNAL (node->decl)
	  && !vnode->alias
445
	  && in_boundary_p)
446
	{
Martin Liska committed
447 448
	  struct ipa_ref *ref = NULL;
	  for (int i = 0; node->iterate_reference (i, ref); i++)
449
	    enqueue_node (ref->referred, &first, reachable);
450
	}
451 452
    }

453
  /* Remove unreachable functions.   */
454
  for (node = cgraph_first_function (); node; node = next)
455
    {
456
      next = cgraph_next_function (node);
457 458

      /* If node is not needed at all, remove it.  */
459
      if (!node->aux)
460
	{
461
	  if (file)
462
	    fprintf (file, " %s/%i", node->name (), node->order);
463 464 465
	  cgraph_remove_node (node);
	  changed = true;
	}
466
      /* If node is unreachable, remove its body.  */
467 468
      else if (!pointer_set_contains (reachable, node))
        {
469
	  if (!pointer_set_contains (body_needed_for_clonning, node->decl))
470
	    cgraph_release_function_body (node);
471
	  else if (!node->clone_of)
472 473
	    gcc_assert (in_lto_p || DECL_RESULT (node->decl));
	  if (node->definition)
474
	    {
475
	      if (file)
476
		fprintf (file, " %s/%i", node->name (), node->order);
477
	      node->body_removed = true;
478 479 480 481
	      node->analyzed = false;
	      node->definition = false;
	      node->cpp_implicit_alias = false;
	      node->alias = false;
482
	      node->thunk.thunk_p = false;
483
	      node->weakref = false;
484 485 486 487 488 489
	      /* After early inlining we drop always_inline attributes on
		 bodies of functions that are still referenced (have their
		 address taken).  */
	      DECL_ATTRIBUTES (node->decl)
		= remove_attribute ("always_inline",
				    DECL_ATTRIBUTES (node->decl));
490
	      if (!node->in_other_partition)
491 492
		node->local.local = false;
	      cgraph_node_remove_callees (node);
493
	      symtab_remove_from_same_comdat_group (node);
Martin Liska committed
494
	      node->remove_all_references ();
495 496
	      changed = true;
	    }
497
	}
498 499
      else
	gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node)
500
		    || in_lto_p || DECL_RESULT (node->decl));
501
    }
502 503 504 505

  /* Inline clones might be kept around so their materializing allows further
     cloning.  If the function the clone is inlined into is removed, we need
     to turn it into normal cone.  */
506
  FOR_EACH_FUNCTION (node)
507 508 509 510 511
    {
      if (node->global.inlined_to
	  && !node->callers)
	{
	  gcc_assert (node->clones);
512 513
	  node->global.inlined_to = NULL;
	  update_inlined_to_pointer (node, node);
514
	}
515
      node->aux = NULL;
516
    }
517

518
  /* Remove unreachable variables.  */
519
  if (file)
520
    fprintf (file, "\nReclaiming variables:");
521
  for (vnode = varpool_first_variable (); vnode; vnode = vnext)
522
    {
523
      vnext = varpool_next_variable (vnode);
524
      if (!vnode->aux
525 526 527
	  /* For can_refer_decl_in_current_unit_p we want to track for
	     all external variables if they are defined in other partition
	     or not.  */
528
	  && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
529
	{
530
	  if (file)
531
	    fprintf (file, " %s/%i", vnode->name (), vnode->order);
532 533
	  varpool_remove_node (vnode);
	  changed = true;
534
	}
535 536
      else if (!pointer_set_contains (reachable, vnode))
        {
537
	  tree init;
538
	  if (vnode->definition)
539 540
	    {
	      if (file)
541
		fprintf (file, " %s", vnode->name ());
542 543
	      changed = true;
	    }
544
	  vnode->body_removed = true;
545 546 547
	  vnode->definition = false;
	  vnode->analyzed = false;
	  vnode->aux = NULL;
548

549 550
	  symtab_remove_from_same_comdat_group (vnode);

551
	  /* Keep body if it may be useful for constant folding.  */
552
	  if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
553
	    varpool_remove_initializer (vnode);
554
	  else
555
	    DECL_INITIAL (vnode->decl) = init;
Martin Liska committed
556
	  vnode->remove_all_references ();
557 558
	}
      else
559
	vnode->aux = NULL;
560
    }
561

562 563
  pointer_set_destroy (reachable);
  pointer_set_destroy (body_needed_for_clonning);
564
  pointer_set_destroy (reachable_call_targets);
565

566
  /* Now update address_taken flags and try to promote functions to be local.  */
567 568
  if (file)
    fprintf (file, "\nClearing address taken flags:");
569
  FOR_EACH_DEFINED_FUNCTION (node)
570 571
    if (node->address_taken
	&& !node->used_from_other_partition)
572
      {
573
	if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
574 575
	  {
	    if (file)
576
	      fprintf (file, " %s", node->name ());
577
	    node->address_taken = false;
578 579 580 581 582 583 584
	    changed = true;
	    if (cgraph_local_node_p (node))
	      {
		node->local.local = true;
		if (file)
		  fprintf (file, " (local)");
	      }
585 586
	  }
      }
587 588
  if (file)
    fprintf (file, "\n");
589

590
#ifdef ENABLE_CHECKING
591
  verify_symtab ();
592
#endif
Diego Novillo committed
593

594
  /* If we removed something, perhaps profile could be improved.  */
595
  if (changed && optimize && inline_edge_summary_vec.exists ())
596
    FOR_EACH_DEFINED_FUNCTION (node)
597
      ipa_propagate_frequency (node);
598

599
  timevar_pop (TV_IPA_UNREACHABLE);
600 601
  return changed;
}
602

603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618
/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
   as needed, also clear EXPLICIT_REFS if the references to given variable
   do not need to be explicit.  */

void
process_references (varpool_node *vnode,
		    bool *written, bool *address_taken,
		    bool *read, bool *explicit_refs)
{
  int i;
  struct ipa_ref *ref;

  if (!varpool_all_refs_explicit_p (vnode)
      || TREE_THIS_VOLATILE (vnode->decl))
    *explicit_refs = false;

Martin Liska committed
619
  for (i = 0; vnode->iterate_referring (i, ref)
620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657
	      && *explicit_refs && (!*written || !*address_taken || !*read); i++)
    switch (ref->use)
      {
      case IPA_REF_ADDR:
	*address_taken = true;
	break;
      case IPA_REF_LOAD:
	*read = true;
	break;
      case IPA_REF_STORE:
	*written = true;
	break;
      case IPA_REF_ALIAS:
	process_references (varpool (ref->referring), written, address_taken,
			    read, explicit_refs);
	break;
      }
}

/* Set TREE_READONLY bit.  */

bool
set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
{
  TREE_READONLY (vnode->decl) = true;
  return false;
}

/* Set writeonly bit and clear the initalizer, since it will not be needed.  */

bool
set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
{
  vnode->writeonly = true;
  if (optimize)
    {
      DECL_INITIAL (vnode->decl) = NULL;
      if (!vnode->alias)
Martin Liska committed
658
	vnode->remove_all_references ();
659 660 661 662 663 664 665 666 667 668 669 670 671 672
    }
  return false;
}

/* Clear addressale bit of VNODE.  */

bool
clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
{
  vnode->address_taken = false;
  TREE_ADDRESSABLE (vnode->decl) = 0;
  return false;
}

673 674 675 676 677
/* Discover variables that have no longer address taken or that are read only
   and update their flags.

   FIXME: This can not be done in between gimplify and omp_expand since
   readonly flag plays role on what is shared and what is not.  Currently we do
678 679 680
   this transformation as part of whole program visibility and re-do at
   ipa-reference pass (to take into account clonning), but it would
   make sense to do it before early optimizations.  */
681 682 683 684

void
ipa_discover_readonly_nonaddressable_vars (void)
{
685
  varpool_node *vnode;
686 687
  if (dump_file)
    fprintf (dump_file, "Clearing variable flags:");
688
  FOR_EACH_VARIABLE (vnode)
689
    if (!vnode->alias
690
	&& (TREE_ADDRESSABLE (vnode->decl)
691
	    || !vnode->writeonly
692
	    || !TREE_READONLY (vnode->decl)))
693 694 695
      {
	bool written = false;
	bool address_taken = false;
696 697 698 699 700 701 702
	bool read = false;
	bool explicit_refs = true;

	process_references (vnode, &written, &address_taken, &read, &explicit_refs);
	if (!explicit_refs)
	  continue;
	if (!address_taken)
703
	  {
704
	    if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
705
	      fprintf (dump_file, " %s (non-addressable)", vnode->name ());
706
	    varpool_for_node_and_aliases (vnode, clear_addressable_bit, NULL, true);
707
	  }
708
	if (!address_taken && !written
709 710 711
	    /* Making variable in explicit section readonly can cause section
	       type conflict. 
	       See e.g. gcc.c-torture/compile/pr23237.c */
712
	    && vnode->get_section () == NULL)
713
	  {
714
	    if (!TREE_READONLY (vnode->decl) && dump_file)
715
	      fprintf (dump_file, " %s (read-only)", vnode->name ());
716 717
	    varpool_for_node_and_aliases (vnode, set_readonly_bit, NULL, true);
	  }
718
	if (!vnode->writeonly && !read && !address_taken && written)
719 720 721 722
	  {
	    if (dump_file)
	      fprintf (dump_file, " %s (write-only)", vnode->name ());
	    varpool_for_node_and_aliases (vnode, set_writeonly_bit, NULL, true);
723 724 725 726 727 728
	  }
      }
  if (dump_file)
    fprintf (dump_file, "\n");
}

729 730
/* Free inline summary.  */

731 732 733
namespace {

const pass_data pass_data_ipa_free_inline_summary =
734
{
735 736 737 738 739 740 741 742 743
  SIMPLE_IPA_PASS, /* type */
  "*free_inline_summary", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
744 745
};

746 747 748
class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
{
public:
749 750
  pass_ipa_free_inline_summary (gcc::context *ctxt)
    : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
751 752 753
  {}

  /* opt_pass methods: */
754 755 756 757 758
  virtual unsigned int execute (function *)
    {
      inline_free_summary ();
      return 0;
    }
759 760 761 762 763 764 765 766 767 768 769

}; // class pass_ipa_free_inline_summary

} // anon namespace

simple_ipa_opt_pass *
make_pass_ipa_free_inline_summary (gcc::context *ctxt)
{
  return new pass_ipa_free_inline_summary (ctxt);
}

770
/* Generate and emit a static constructor or destructor.  WHICH must
771 772 773
   be one of 'I' (for a constructor) or 'D' (for a destructor).  BODY
   is a STATEMENT_LIST containing GENERIC statements.  PRIORITY is the
   initialization priority for this constructor or destructor. 
774

775 776 777 778 779
   FINAL specify whether the externally visible name for collect2 should
   be produced. */

static void
cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
780 781 782 783 784 785 786 787
{
  static int counter = 0;
  char which_buf[16];
  tree decl, name, resdecl;

  /* The priority is encoded in the constructor or destructor name.
     collect2 will sort the names and arrange that they are called at
     program startup.  */
788 789 790 791 792 793
  if (final)
    sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
  else
  /* Proudce sane name but one not recognizable by collect2, just for the
     case we fail to inline the function.  */
    sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812
  name = get_file_function_name (which_buf);

  decl = build_decl (input_location, FUNCTION_DECL, name,
		     build_function_type_list (void_type_node, NULL_TREE));
  current_function_decl = decl;

  resdecl = build_decl (input_location,
			RESULT_DECL, NULL_TREE, void_type_node);
  DECL_ARTIFICIAL (resdecl) = 1;
  DECL_RESULT (decl) = resdecl;
  DECL_CONTEXT (resdecl) = decl;

  allocate_struct_function (decl, false);

  TREE_STATIC (decl) = 1;
  TREE_USED (decl) = 1;
  DECL_ARTIFICIAL (decl) = 1;
  DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
  DECL_SAVED_TREE (decl) = body;
813
  if (!targetm.have_ctors_dtors && final)
814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847
    {
      TREE_PUBLIC (decl) = 1;
      DECL_PRESERVE_P (decl) = 1;
    }
  DECL_UNINLINABLE (decl) = 1;

  DECL_INITIAL (decl) = make_node (BLOCK);
  TREE_USED (DECL_INITIAL (decl)) = 1;

  DECL_SOURCE_LOCATION (decl) = input_location;
  cfun->function_end_locus = input_location;

  switch (which)
    {
    case 'I':
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
      decl_init_priority_insert (decl, priority);
      break;
    case 'D':
      DECL_STATIC_DESTRUCTOR (decl) = 1;
      decl_fini_priority_insert (decl, priority);
      break;
    default:
      gcc_unreachable ();
    }

  gimplify_function_tree (decl);

  cgraph_add_new_function (decl, false);

  set_cfun (NULL);
  current_function_decl = NULL;
}

848
/* Generate and emit a static constructor or destructor.  WHICH must
849 850 851
   be one of 'I' (for a constructor) or 'D' (for a destructor).  BODY
   is a STATEMENT_LIST containing GENERIC statements.  PRIORITY is the
   initialization priority for this constructor or destructor.  */
852 853 854 855 856 857

void
cgraph_build_static_cdtor (char which, tree body, int priority)
{
  cgraph_build_static_cdtor_1 (which, body, priority, false);
}
858 859

/* A vector of FUNCTION_DECLs declared as static constructors.  */
860
static vec<tree> static_ctors;
861
/* A vector of FUNCTION_DECLs declared as static destructors.  */
862
static vec<tree> static_dtors;
863 864 865 866 867 868 869 870 871 872 873

/* When target does not have ctors and dtors, we call all constructor
   and destructor by special initialization/destruction function
   recognized by collect2.

   When we are going to build this function, collect all constructors and
   destructors and turn them into normal functions.  */

static void
record_cdtor_fn (struct cgraph_node *node)
{
874 875 876 877 878 879
  if (DECL_STATIC_CONSTRUCTOR (node->decl))
    static_ctors.safe_push (node->decl);
  if (DECL_STATIC_DESTRUCTOR (node->decl))
    static_dtors.safe_push (node->decl);
  node = cgraph_get_node (node->decl);
  DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
880 881 882 883 884 885 886 887
}

/* Define global constructors/destructor functions for the CDTORS, of
   which they are LEN.  The CDTORS are sorted by initialization
   priority.  If CTOR_P is true, these are constructors; otherwise,
   they are destructors.  */

static void
888
build_cdtor (bool ctor_p, vec<tree> cdtors)
889 890
{
  size_t i,j;
891
  size_t len = cdtors.length ();
892 893 894 895 896 897 898 899 900 901 902 903 904 905

  i = 0;
  while (i < len)
    {
      tree body;
      tree fn;
      priority_type priority;

      priority = 0;
      body = NULL_TREE;
      j = i;
      do
	{
	  priority_type p;
906
	  fn = cdtors[j];
907 908 909 910 911 912 913 914 915
	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
	  if (j == i)
	    priority = p;
	  else if (p != priority)
	    break;
	  j++;
	}
      while (j < len);

916
      /* When there is only one cdtor and target supports them, do nothing.  */
917 918 919 920 921 922 923 924
      if (j == i + 1
	  && targetm.have_ctors_dtors)
	{
	  i++;
	  continue;
	}
      /* Find the next batch of constructors/destructors with the same
	 initialization priority.  */
925
      for (;i < j; i++)
926 927
	{
	  tree call;
928
	  fn = cdtors[i];
929 930 931 932 933 934 935 936 937 938 939 940 941 942
	  call = build_call_expr (fn, 0);
	  if (ctor_p)
	    DECL_STATIC_CONSTRUCTOR (fn) = 0;
	  else
	    DECL_STATIC_DESTRUCTOR (fn) = 0;
	  /* We do not want to optimize away pure/const calls here.
	     When optimizing, these should be already removed, when not
	     optimizing, we want user to be able to breakpoint in them.  */
	  TREE_SIDE_EFFECTS (call) = 1;
	  append_to_statement_list (call, &body);
	}
      gcc_assert (body != NULL_TREE);
      /* Generate a function to call all the function of like
	 priority.  */
943
      cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006
    }
}

/* Comparison function for qsort.  P1 and P2 are actually of type
   "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
   used to determine the sort order.  */

static int
compare_ctor (const void *p1, const void *p2)
{
  tree f1;
  tree f2;
  int priority1;
  int priority2;

  f1 = *(const tree *)p1;
  f2 = *(const tree *)p2;
  priority1 = DECL_INIT_PRIORITY (f1);
  priority2 = DECL_INIT_PRIORITY (f2);

  if (priority1 < priority2)
    return -1;
  else if (priority1 > priority2)
    return 1;
  else
    /* Ensure a stable sort.  Constructors are executed in backwarding
       order to make LTO initialize braries first.  */
    return DECL_UID (f2) - DECL_UID (f1);
}

/* Comparison function for qsort.  P1 and P2 are actually of type
   "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
   used to determine the sort order.  */

static int
compare_dtor (const void *p1, const void *p2)
{
  tree f1;
  tree f2;
  int priority1;
  int priority2;

  f1 = *(const tree *)p1;
  f2 = *(const tree *)p2;
  priority1 = DECL_FINI_PRIORITY (f1);
  priority2 = DECL_FINI_PRIORITY (f2);

  if (priority1 < priority2)
    return -1;
  else if (priority1 > priority2)
    return 1;
  else
    /* Ensure a stable sort.  */
    return DECL_UID (f1) - DECL_UID (f2);
}

/* Generate functions to call static constructors and destructors
   for targets that do not support .ctors/.dtors sections.  These
   functions have magic names which are detected by collect2.  */

static void
build_cdtor_fns (void)
{
1007
  if (!static_ctors.is_empty ())
1008 1009
    {
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1010
      static_ctors.qsort (compare_ctor);
1011
      build_cdtor (/*ctor_p=*/true, static_ctors);
1012 1013
    }

1014
  if (!static_dtors.is_empty ())
1015 1016
    {
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1017
      static_dtors.qsort (compare_dtor);
1018
      build_cdtor (/*ctor_p=*/false, static_dtors);
1019 1020 1021 1022 1023
    }
}

/* Look for constructors and destructors and produce function calling them.
   This is needed for targets not supporting ctors or dtors, but we perform the
Joseph Myers committed
1024
   transformation also at linktime to merge possibly numerous
1025 1026 1027 1028 1029 1030 1031
   constructors/destructors into single function to improve code locality and
   reduce size.  */

static unsigned int
ipa_cdtor_merge (void)
{
  struct cgraph_node *node;
1032
  FOR_EACH_DEFINED_FUNCTION (node)
1033 1034
    if (DECL_STATIC_CONSTRUCTOR (node->decl)
	|| DECL_STATIC_DESTRUCTOR (node->decl))
1035 1036
       record_cdtor_fn (node);
  build_cdtor_fns ();
1037 1038
  static_ctors.release ();
  static_dtors.release ();
1039 1040 1041
  return 0;
}

1042 1043 1044
namespace {

const pass_data pass_data_ipa_cdtor_merge =
1045
{
1046 1047 1048 1049 1050 1051 1052 1053 1054
  IPA_PASS, /* type */
  "cdtor", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CGRAPHOPT, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
1055
};
1056 1057 1058 1059

class pass_ipa_cdtor_merge : public ipa_opt_pass_d
{
public:
1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070
  pass_ipa_cdtor_merge (gcc::context *ctxt)
    : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
		      NULL, /* generate_summary */
		      NULL, /* write_summary */
		      NULL, /* read_summary */
		      NULL, /* write_optimization_summary */
		      NULL, /* read_optimization_summary */
		      NULL, /* stmt_fixup */
		      0, /* function_transform_todo_flags_start */
		      NULL, /* function_transform */
		      NULL) /* variable_transform */
1071 1072 1073
  {}

  /* opt_pass methods: */
1074
  virtual bool gate (function *);
1075
  virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1076 1077 1078

}; // class pass_ipa_cdtor_merge

1079 1080 1081 1082 1083 1084 1085 1086 1087
bool
pass_ipa_cdtor_merge::gate (function *)
{
  /* Perform the pass when we have no ctors/dtors support
     or at LTO time to merge multiple constructors into single
     function.  */
  return !targetm.have_ctors_dtors || (optimize && in_lto_p);
}

1088 1089 1090 1091 1092 1093 1094
} // anon namespace

ipa_opt_pass_d *
make_pass_ipa_cdtor_merge (gcc::context *ctxt)
{
  return new pass_ipa_cdtor_merge (ctxt);
}
1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111

/* Invalid pointer representing BOTTOM for single user dataflow.  */
#define BOTTOM ((cgraph_node *)(size_t) 2)

/* Meet operation for single user dataflow.
   Here we want to associate variables with sigle function that may access it.

   FUNCTION is current single user of a variable, VAR is variable that uses it.
   Latttice is stored in SINGLE_USER_MAP.

   We represent: 
    - TOP by no entry in SIGNLE_USER_MAP
    - BOTTOM by BOTTOM in AUX pointer (to save lookups)
    - known single user by cgraph pointer in SINGLE_USER_MAP.  */

cgraph_node *
meet (cgraph_node *function, varpool_node *var,
Trevor Saunders committed
1112
       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1113 1114 1115 1116 1117 1118
{
  struct cgraph_node *user, **f;

  if (var->aux == BOTTOM)
    return BOTTOM;

Trevor Saunders committed
1119
  f = single_user_map.get (var);
1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137
  if (!f)
    return function;
  user = *f;
  if (!function)
    return user;
  else if (function != user)
    return BOTTOM;
  else
    return function;
}

/* Propagation step of single-use dataflow.

   Check all uses of VNODE and see if they are used by single function FUNCTION.
   SINGLE_USER_MAP represents the dataflow lattice.  */

cgraph_node *
propagate_single_user (varpool_node *vnode, cgraph_node *function,
Trevor Saunders committed
1138
		       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150
{
  int i;
  struct ipa_ref *ref;

  gcc_assert (!vnode->externally_visible);

  /* If node is an alias, first meet with its target.  */
  if (vnode->alias)
    function = meet (function, varpool_alias_target (vnode), single_user_map);

  /* Check all users and see if they correspond to a single function.  */
  for (i = 0;
Martin Liska committed
1151
       vnode->iterate_referring (i, ref)
1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178
       && function != BOTTOM; i++)
    {
      struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
      if (cnode)
	{
	  if (cnode->global.inlined_to)
	    cnode = cnode->global.inlined_to;
	  if (!function)
	    function = cnode;
	  else if (function != cnode)
	    function = BOTTOM;
	}
      else
        function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map);
    }
  return function;
}

/* Pass setting used_by_single_function flag.
   This flag is set on variable when there is only one function that may possibly
   referr to it.  */

static unsigned int
ipa_single_use (void)
{
  varpool_node *first = (varpool_node *) (void *) 1;
  varpool_node *var;
Trevor Saunders committed
1179
  hash_map<varpool_node *, cgraph_node *> single_user_map;
1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199

  FOR_EACH_DEFINED_VARIABLE (var)
    if (!varpool_all_refs_explicit_p (var))
      var->aux = BOTTOM;
    else
      {
	/* Enqueue symbol for dataflow.  */
        var->aux = first;
	first = var;
      }

  /* The actual dataflow.  */

  while (first != (void *) 1)
    {
      cgraph_node *user, *orig_user, **f;

      var = first;
      first = (varpool_node *)first->aux;

Trevor Saunders committed
1200
      f = single_user_map.get (var);
1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214
      if (f)
	orig_user = *f;
      else
	orig_user = NULL;
      user = propagate_single_user (var, orig_user, single_user_map);

      gcc_checking_assert (var->aux != BOTTOM);

      /* If user differs, enqueue all references.  */
      if (user != orig_user)
	{
	  unsigned int i;
	  ipa_ref *ref;

Trevor Saunders committed
1215
	  single_user_map.put (var, user);
1216 1217 1218

	  /* Enqueue all aliases for re-processing.  */
	  for (i = 0;
Martin Liska committed
1219
	       var->iterate_referring (i, ref); i++)
1220 1221 1222 1223 1224 1225 1226 1227
	    if (ref->use == IPA_REF_ALIAS
		&& !ref->referring->aux)
	      {
		ref->referring->aux = first;
		first = dyn_cast <varpool_node *> (ref->referring);
	      }
	  /* Enqueue all users for re-processing.  */
	  for (i = 0;
Martin Liska committed
1228
	       var->iterate_reference (i, ref); i++)
1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251
	    if (!ref->referred->aux
	        && ref->referred->definition
		&& is_a <varpool_node *> (ref->referred))
	      {
		ref->referred->aux = first;
		first = dyn_cast <varpool_node *> (ref->referred);
	      }

	  /* If user is BOTTOM, just punt on this var.  */
	  if (user == BOTTOM)
	    var->aux = BOTTOM;
	  else
	    var->aux = NULL;
	}
      else
	var->aux = NULL;
    }

  FOR_EACH_DEFINED_VARIABLE (var)
    {
      if (var->aux != BOTTOM)
	{
#ifdef ENABLE_CHECKING
Trevor Saunders committed
1252 1253
	  if (!single_user_map.get (var))
          gcc_assert (single_user_map.get (var));
1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316
#endif
	  if (dump_file)
	    {
	      fprintf (dump_file, "Variable %s/%i is used by single function\n",
		       var->name (), var->order);
	    }
	  var->used_by_single_function = true;
	}
      var->aux = NULL;
    }
  return 0;
}

namespace {

const pass_data pass_data_ipa_single_use =
{
  IPA_PASS, /* type */
  "single-use", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_CGRAPHOPT, /* tv_id */
  0, /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
  0, /* todo_flags_finish */
};

class pass_ipa_single_use : public ipa_opt_pass_d
{
public:
  pass_ipa_single_use (gcc::context *ctxt)
    : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
		      NULL, /* generate_summary */
		      NULL, /* write_summary */
		      NULL, /* read_summary */
		      NULL, /* write_optimization_summary */
		      NULL, /* read_optimization_summary */
		      NULL, /* stmt_fixup */
		      0, /* function_transform_todo_flags_start */
		      NULL, /* function_transform */
		      NULL) /* variable_transform */
  {}

  /* opt_pass methods: */
  virtual bool gate (function *);
  virtual unsigned int execute (function *) { return ipa_single_use (); }

}; // class pass_ipa_single_use

bool
pass_ipa_single_use::gate (function *)
{
  return optimize;
}

} // anon namespace

ipa_opt_pass_d *
make_pass_ipa_single_use (gcc::context *ctxt)
{
  return new pass_ipa_single_use (ctxt);
}