tree-ssa-structalias.c 196 KB
Newer Older
1
/* Tree based points-to analysis
2 3
   Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
   Free Software Foundation, Inc.
4 5
   Contributed by Daniel Berlin <dberlin@dberlin.org>

6
   This file is part of GCC.
7

8 9 10 11
   GCC is free software; you can redistribute it and/or modify
   under the terms of the GNU General Public License as published by
   the Free Software Foundation; either version 3 of the License, or
   (at your option) any later version.
12

13 14 15 16
   GCC is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   GNU General Public License for more details.
17

18 19 20
   You should have received a copy of the GNU General Public License
   along with GCC; see the file COPYING3.  If not see
   <http://www.gnu.org/licenses/>.  */
21 22 23 24 25 26 27 28 29 30 31 32 33 34

#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "ggc.h"
#include "obstack.h"
#include "bitmap.h"
#include "flags.h"
#include "basic-block.h"
#include "output.h"
#include "tree.h"
#include "tree-flow.h"
#include "tree-inline.h"
35
#include "diagnostic-core.h"
36
#include "gimple.h"
37 38 39 40 41 42 43
#include "hashtab.h"
#include "function.h"
#include "cgraph.h"
#include "tree-pass.h"
#include "timevar.h"
#include "alloc-pool.h"
#include "splay-tree.h"
44
#include "params.h"
45
#include "cgraph.h"
46
#include "alias.h"
Diego Novillo committed
47
#include "pointer-set.h"
48 49 50

/* The idea behind this analyzer is to generate set constraints from the
   program, then solve the resulting constraints in order to generate the
51
   points-to sets.
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66

   Set constraints are a way of modeling program analysis problems that
   involve sets.  They consist of an inclusion constraint language,
   describing the variables (each variable is a set) and operations that
   are involved on the variables, and a set of rules that derive facts
   from these operations.  To solve a system of set constraints, you derive
   all possible facts under the rules, which gives you the correct sets
   as a consequence.

   See  "Efficient Field-sensitive pointer analysis for C" by "David
   J. Pearce and Paul H. J. Kelly and Chris Hankin, at
   http://citeseer.ist.psu.edu/pearce04efficient.html

   Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
   of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
67 68 69
   http://citeseer.ist.psu.edu/heintze01ultrafast.html

   There are three types of real constraint expressions, DEREF,
70
   ADDRESSOF, and SCALAR.  Each constraint expression consists
71
   of a constraint type, a variable, and an offset.
72 73 74 75

   SCALAR is a constraint expression type used to represent x, whether
   it appears on the LHS or the RHS of a statement.
   DEREF is a constraint expression type used to represent *x, whether
76
   it appears on the LHS or the RHS of a statement.
77
   ADDRESSOF is a constraint expression used to represent &x, whether
78
   it appears on the LHS or the RHS of a statement.
79

80 81
   Each pointer variable in the program is assigned an integer id, and
   each field of a structure variable is assigned an integer id as well.
82

83 84
   Structure variables are linked to their list of fields through a "next
   field" in each variable that points to the next field in offset
85 86
   order.
   Each variable for a structure field has
87 88 89 90 91 92

   1. "size", that tells the size in bits of that field.
   2. "fullsize, that tells the size in bits of the entire structure.
   3. "offset", that tells the offset in bits from the beginning of the
   structure to this field.

93
   Thus,
94 95 96 97 98 99 100 101 102 103 104 105 106
   struct f
   {
     int a;
     int b;
   } foo;
   int *bar;

   looks like

   foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
   foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
   bar -> id 3, size 32, offset 0, fullsize 32, next NULL

107

108 109 110 111 112
  In order to solve the system of set constraints, the following is
  done:

  1. Each constraint variable x has a solution set associated with it,
  Sol(x).
113

114 115 116 117
  2. Constraints are separated into direct, copy, and complex.
  Direct constraints are ADDRESSOF constraints that require no extra
  processing, such as P = &Q
  Copy constraints are those of the form P = Q.
118 119
  Complex constraints are all the constraints involving dereferences
  and offsets (including offsetted copies).
120

121
  3. All direct constraints of the form P = &Q are processed, such
122
  that Q is added to Sol(P)
123 124

  4. All complex constraints for a given constraint variable are stored in a
125
  linked list attached to that variable's node.
126 127

  5. A directed graph is built out of the copy constraints. Each
128
  constraint variable is a node in the graph, and an edge from
129
  Q to P is added for each copy constraint of the form P = Q
130

131 132 133
  6. The graph is then walked, and solution sets are
  propagated along the copy edges, such that an edge from Q to P
  causes Sol(P) <- Sol(P) union Sol(Q).
134

135
  7.  As we visit each node, all complex constraints associated with
136
  that node are processed by adding appropriate copy edges to the graph, or the
137
  appropriate variables to the solution set.
138 139 140 141 142

  8. The process of walking the graph is iterated until no solution
  sets change.

  Prior to walking the graph in steps 6 and 7, We perform static
143
  cycle elimination on the constraint graph, as well
144
  as off-line variable substitution.
145

146 147 148
  TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
  on and turned into anything), but isn't.  You can just see what offset
  inside the pointed-to struct it's going to access.
149

150
  TODO: Constant bounded arrays can be handled as if they were structs of the
151
  same number of elements.
152 153 154 155 156 157 158

  TODO: Modeling heap and incoming pointers becomes much better if we
  add fields to them as we discover them, which we could do.

  TODO: We could handle unions, but to be honest, it's probably not
  worth the pain or slowdown.  */

159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
/* IPA-PTA optimizations possible.

   When the indirect function called is ANYTHING we can add disambiguation
   based on the function signatures (or simply the parameter count which
   is the varinfo size).  We also do not need to consider functions that
   do not have their address taken.

   The is_global_var bit which marks escape points is overly conservative
   in IPA mode.  Split it to is_escape_point and is_global_var - only
   externally visible globals are escape points in IPA mode.  This is
   also needed to fix the pt_solution_includes_global predicate
   (and thus ptr_deref_may_alias_global_p).

   The way we introduce DECL_PT_UID to avoid fixing up all points-to
   sets in the translation unit when we copy a DECL during inlining
   pessimizes precision.  The advantage is that the DECL_PT_UID keeps
   compile-time and memory usage overhead low - the points-to sets
   do not grow or get unshared as they would during a fixup phase.
   An alternative solution is to delay IPA PTA until after all
   inlining transformations have been applied.

   The way we propagate clobber/use information isn't optimized.
   It should use a new complex constraint that properly filters
   out local variables of the callee (though that would make
   the sets invalid after inlining).  OTOH we might as well
   admit defeat to WHOPR and simply do all the clobber/use analysis
   and propagation after PTA finished but before we threw away
   points-to information for memory variables.  WHOPR and PTA
   do not play along well anyway - the whole constraint solving
   would need to be done in WPA phase and it will be very interesting
   to apply the results to local SSA names during LTRANS phase.

   We probably should compute a per-function unit-ESCAPE solution
   propagating it simply like the clobber / uses solutions.  The
   solution can go alongside the non-IPA espaced solution and be
   used to query which vars escape the unit through a function.

   We never put function decls in points-to sets so we do not
   keep the set of called functions for indirect calls.

   And probably more.  */
200
static GTY ((if_marked ("tree_map_marked_p"), param_is (struct heapvar_map)))
201 202
htab_t heapvar_for_stmt;

203
static bool use_field_sensitive = true;
204
static int in_ipa_mode = 0;
205 206

/* Used for predecessor bitmaps. */
207
static bitmap_obstack predbitmap_obstack;
208 209 210 211 212 213 214 215

/* Used for points-to sets.  */
static bitmap_obstack pta_obstack;

/* Used for oldsolution members of variables. */
static bitmap_obstack oldpta_obstack;

/* Used for per-solver-iteration bitmaps.  */
216 217
static bitmap_obstack iteration_obstack;

218
static unsigned int create_variable_info_for (tree, const char *);
219 220
typedef struct constraint_graph *constraint_graph_t;
static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
221

222 223 224
struct constraint;
typedef struct constraint *constraint_t;

225
DEF_VEC_P(constraint_t);
226
DEF_VEC_ALLOC_P(constraint_t,heap);
227

228 229 230 231
#define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d)	\
  if (a)						\
    EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)

232 233 234
static struct constraint_stats
{
  unsigned int total_vars;
235
  unsigned int nonpointer_vars;
236 237 238
  unsigned int unified_vars_static;
  unsigned int unified_vars_dynamic;
  unsigned int iterations;
239
  unsigned int num_edges;
240 241
  unsigned int num_implicit_edges;
  unsigned int points_to_sets_created;
242 243 244 245 246 247 248 249 250
} stats;

struct variable_info
{
  /* ID of this variable  */
  unsigned int id;

  /* True if this is a variable created by the constraint analysis, such as
     heap variables and constraints we had to break up.  */
251
  unsigned int is_artificial_var : 1;
252

253 254
  /* True if this is a special variable whose solution set should not be
     changed.  */
255
  unsigned int is_special_var : 1;
256 257

  /* True for variables whose size is not known or variable.  */
258
  unsigned int is_unknown_size_var : 1;
259

260 261 262
  /* True for (sub-)fields that represent a whole variable.  */
  unsigned int is_full_var : 1;

263
  /* True if this is a heap variable.  */
264 265 266 267
  unsigned int is_heap_var : 1;

  /* True if this is a variable tracking a restrict pointer source.  */
  unsigned int is_restrict_var : 1;
268

269 270 271
  /* True if this field may contain pointers.  */
  unsigned int may_have_pointers : 1;

272 273 274
  /* True if this field has only restrict qualified pointers.  */
  unsigned int only_restrict_pointers : 1;

275 276 277
  /* True if this represents a global variable.  */
  unsigned int is_global_var : 1;

278 279 280
  /* True if this represents a IPA function info.  */
  unsigned int is_fn_info : 1;

281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298
  /* A link to the variable for the next field in this structure.  */
  struct variable_info *next;

  /* Offset of this variable, in bits, from the base variable  */
  unsigned HOST_WIDE_INT offset;

  /* Size of the variable, in bits.  */
  unsigned HOST_WIDE_INT size;

  /* Full size of the base variable, in bits.  */
  unsigned HOST_WIDE_INT fullsize;

  /* Name of this variable */
  const char *name;

  /* Tree that this variable is associated with.  */
  tree decl;

299 300 301
  /* Points-to set for this variable.  */
  bitmap solution;

302 303
  /* Old points-to set for this variable.  */
  bitmap oldsolution;
304 305 306 307
};
typedef struct variable_info *varinfo_t;

static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
308 309
static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
						   unsigned HOST_WIDE_INT);
310
static varinfo_t lookup_vi_for_tree (tree);
311 312 313 314 315 316

/* Pool of variable info structures.  */
static alloc_pool variable_info_pool;

DEF_VEC_P(varinfo_t);

317
DEF_VEC_ALLOC_P(varinfo_t, heap);
318

Diego Novillo committed
319 320
/* Table of variable info structures for constraint variables.
   Indexed directly by variable info id.  */
321
static VEC(varinfo_t,heap) *varmap;
322 323 324 325

/* Return the varmap element N */

static inline varinfo_t
326
get_varinfo (unsigned int n)
327
{
328
  return VEC_index (varinfo_t, varmap, n);
329
}
330

331 332
/* Static IDs for the special variables.  */
enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
333 334
       escaped_id = 3, nonlocal_id = 4,
       storedanything_id = 5, integer_id = 6 };
335

336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356
struct GTY(()) heapvar_map {
  struct tree_map map;
  unsigned HOST_WIDE_INT offset;
};

static int
heapvar_map_eq (const void *p1, const void *p2)
{
  const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
  const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
  return (h1->map.base.from == h2->map.base.from
	  && h1->offset == h2->offset);
}

static unsigned int
heapvar_map_hash (struct heapvar_map *h)
{
  return iterative_hash_host_wide_int (h->offset,
				       htab_hash_pointer (h->map.base.from));
}

357
/* Lookup a heap var for FROM, and return it if we find one.  */
358

359
static tree
360
heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
361
{
362 363 364 365 366
  struct heapvar_map *h, in;
  in.map.base.from = from;
  in.offset = offset;
  h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
						  heapvar_map_hash (&in));
367
  if (h)
368
    return h->map.to;
369 370 371 372 373 374 375
  return NULL_TREE;
}

/* Insert a mapping FROM->TO in the heap var for statement
   hashtable.  */

static void
376
heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
377
{
378
  struct heapvar_map *h;
379 380
  void **loc;

381
  h = ggc_alloc_heapvar_map ();
382 383 384 385 386 387 388
  h->map.base.from = from;
  h->offset = offset;
  h->map.hash = heapvar_map_hash (h);
  h->map.to = to;
  loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
  gcc_assert (*loc == NULL);
  *(struct heapvar_map **) loc = h;
389
}
390

391
/* Return a new variable info structure consisting for a variable
392 393
   named NAME, and using constraint graph node NODE.  Append it
   to the vector of variable info structures.  */
394 395

static varinfo_t
396
new_var_info (tree t, const char *name)
397
{
398
  unsigned index = VEC_length (varinfo_t, varmap);
399
  varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
400

401
  ret->id = index;
402 403
  ret->name = name;
  ret->decl = t;
404 405
  /* Vars without decl are artificial and do not have sub-variables.  */
  ret->is_artificial_var = (t == NULL_TREE);
406
  ret->is_special_var = false;
407
  ret->is_unknown_size_var = false;
408 409 410
  ret->is_full_var = (t == NULL_TREE);
  ret->is_heap_var = false;
  ret->is_restrict_var = false;
411
  ret->may_have_pointers = true;
412
  ret->only_restrict_pointers = false;
413
  ret->is_global_var = (t == NULL_TREE);
414
  ret->is_fn_info = false;
415 416
  if (t && DECL_P (t))
    ret->is_global_var = is_global_var (t);
417 418
  ret->solution = BITMAP_ALLOC (&pta_obstack);
  ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
419
  ret->next = NULL;
420

421 422
  stats.total_vars++;

423 424
  VEC_safe_push (varinfo_t, heap, varmap, ret);

425 426 427
  return ret;
}

428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507

/* A map mapping call statements to per-stmt variables for uses
   and clobbers specific to the call.  */
struct pointer_map_t *call_stmt_vars;

/* Lookup or create the variable for the call statement CALL.  */

static varinfo_t
get_call_vi (gimple call)
{
  void **slot_p;
  varinfo_t vi, vi2;

  slot_p = pointer_map_insert (call_stmt_vars, call);
  if (*slot_p)
    return (varinfo_t) *slot_p;

  vi = new_var_info (NULL_TREE, "CALLUSED");
  vi->offset = 0;
  vi->size = 1;
  vi->fullsize = 2;
  vi->is_full_var = true;

  vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
  vi2->offset = 1;
  vi2->size = 1;
  vi2->fullsize = 2;
  vi2->is_full_var = true;

  *slot_p = (void *) vi;
  return vi;
}

/* Lookup the variable for the call statement CALL representing
   the uses.  Returns NULL if there is nothing special about this call.  */

static varinfo_t
lookup_call_use_vi (gimple call)
{
  void **slot_p;

  slot_p = pointer_map_contains (call_stmt_vars, call);
  if (slot_p)
    return (varinfo_t) *slot_p;

  return NULL;
}

/* Lookup the variable for the call statement CALL representing
   the clobbers.  Returns NULL if there is nothing special about this call.  */

static varinfo_t
lookup_call_clobber_vi (gimple call)
{
  varinfo_t uses = lookup_call_use_vi (call);
  if (!uses)
    return NULL;

  return uses->next;
}

/* Lookup or create the variable for the call statement CALL representing
   the uses.  */

static varinfo_t
get_call_use_vi (gimple call)
{
  return get_call_vi (call);
}

/* Lookup or create the variable for the call statement CALL representing
   the clobbers.  */

static varinfo_t ATTRIBUTE_UNUSED
get_call_clobber_vi (gimple call)
{
  return get_call_vi (call)->next;
}


508
typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
509 510 511

/* An expression that appears in a constraint.  */

512
struct constraint_expr
513 514 515 516 517 518 519 520 521 522 523 524
{
  /* Constraint type.  */
  constraint_expr_type type;

  /* Variable we are referring to in the constraint.  */
  unsigned int var;

  /* Offset, in bits, of this constraint from the beginning of
     variables it ends up referring to.

     IOW, in a deref constraint, we would deref, get the result set,
     then add OFFSET to each member.   */
525
  HOST_WIDE_INT offset;
526 527
};

528 529 530
/* Use 0x8000... as special unknown offset.  */
#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))

531 532 533
typedef struct constraint_expr ce_s;
DEF_VEC_O(ce_s);
DEF_VEC_ALLOC_O(ce_s, heap);
534
static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool);
535
static void get_constraint_for (tree, VEC(ce_s, heap) **);
536
static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **);
537
static void do_deref (VEC (ce_s, heap) **);
538 539

/* Our set constraints are made up of two constraint expressions, one
540
   LHS, and one RHS.
541 542 543 544 545 546 547 548 549 550 551 552

   As described in the introduction, our set constraints each represent an
   operation between set valued variables.
*/
struct constraint
{
  struct constraint_expr lhs;
  struct constraint_expr rhs;
};

/* List of constraints that we use to build the constraint graph from.  */

553
static VEC(constraint_t,heap) *constraints;
554 555
static alloc_pool constraint_pool;

556 557
/* The constraint graph is represented as an array of bitmaps
   containing successor nodes.  */
558 559 560

struct constraint_graph
{
561 562 563 564 565
  /* Size of this graph, which may be different than the number of
     nodes in the variable map.  */
  unsigned int size;

  /* Explicit successors of each node. */
566
  bitmap *succs;
567 568 569 570 571 572

  /* Implicit predecessors of each node (Used for variable
     substitution). */
  bitmap *implicit_preds;

  /* Explicit predecessors of each node (Used for variable substitution).  */
573
  bitmap *preds;
574

575 576 577 578 579 580 581 582
  /* Indirect cycle representatives, or -1 if the node has no indirect
     cycles.  */
  int *indirect_cycles;

  /* Representative node for a node.  rep[a] == a unless the node has
     been unified. */
  unsigned int *rep;

583
  /* Equivalence class representative for a label.  This is used for
584 585 586
     variable substitution.  */
  int *eq_rep;

587 588 589 590
  /* Pointer equivalence label for a node.  All nodes with the same
     pointer equivalence label can be unified together at some point
     (either during constraint optimization or after the constraint
     graph is built).  */
591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614
  unsigned int *pe;

  /* Pointer equivalence representative for a label.  This is used to
     handle nodes that are pointer equivalent but not location
     equivalent.  We can unite these once the addressof constraints
     are transformed into initial points-to sets.  */
  int *pe_rep;

  /* Pointer equivalence label for each node, used during variable
     substitution.  */
  unsigned int *pointer_label;

  /* Location equivalence label for each node, used during location
     equivalence finding.  */
  unsigned int *loc_label;

  /* Pointed-by set for each node, used during location equivalence
     finding.  This is pointed-by rather than pointed-to, because it
     is constructed using the predecessor graph.  */
  bitmap *pointed_by;

  /* Points to sets for pointer equivalence.  This is *not* the actual
     points-to sets for nodes.  */
  bitmap *points_to;
615 616 617 618 619

  /* Bitmap of nodes where the bit is set if the node is a direct
     node.  Used for variable substitution.  */
  sbitmap direct_nodes;

620 621 622 623
  /* Bitmap of nodes where the bit is set if the node is address
     taken.  Used for variable substitution.  */
  bitmap address_taken;

624 625 626 627 628
  /* Vector of complex constraints for each graph node.  Complex
     constraints are those involving dereferences or offsets that are
     not 0.  */
  VEC(constraint_t,heap) **complex;
};
629 630 631

static constraint_graph_t graph;

632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669
/* During variable substitution and the offline version of indirect
   cycle finding, we create nodes to represent dereferences and
   address taken constraints.  These represent where these start and
   end.  */
#define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
#define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))

/* Return the representative node for NODE, if NODE has been unioned
   with another NODE.
   This function performs path compression along the way to finding
   the representative.  */

static unsigned int
find (unsigned int node)
{
  gcc_assert (node < graph->size);
  if (graph->rep[node] != node)
    return graph->rep[node] = find (graph->rep[node]);
  return node;
}

/* Union the TO and FROM nodes to the TO nodes.
   Note that at some point in the future, we may want to do
   union-by-rank, in which case we are going to have to return the
   node we unified to.  */

static bool
unite (unsigned int to, unsigned int from)
{
  gcc_assert (to < graph->size && from < graph->size);
  if (to != from && graph->rep[from] != to)
    {
      graph->rep[from] = to;
      return true;
    }
  return false;
}

670 671
/* Create a new constraint consisting of LHS and RHS expressions.  */

672
static constraint_t
673 674 675
new_constraint (const struct constraint_expr lhs,
		const struct constraint_expr rhs)
{
676
  constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
677 678 679 680 681 682 683
  ret->lhs = lhs;
  ret->rhs = rhs;
  return ret;
}

/* Print out constraint C to FILE.  */

684
static void
685 686 687 688 689
dump_constraint (FILE *file, constraint_t c)
{
  if (c->lhs.type == ADDRESSOF)
    fprintf (file, "&");
  else if (c->lhs.type == DEREF)
690
    fprintf (file, "*");
691 692 693 694
  fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
  if (c->lhs.offset == UNKNOWN_OFFSET)
    fprintf (file, " + UNKNOWN");
  else if (c->lhs.offset != 0)
695 696 697 698 699 700
    fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
  fprintf (file, " = ");
  if (c->rhs.type == ADDRESSOF)
    fprintf (file, "&");
  else if (c->rhs.type == DEREF)
    fprintf (file, "*");
701 702 703 704
  fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
  if (c->rhs.offset == UNKNOWN_OFFSET)
    fprintf (file, " + UNKNOWN");
  else if (c->rhs.offset != 0)
705 706 707 708
    fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
  fprintf (file, "\n");
}

709 710 711 712 713 714 715

void debug_constraint (constraint_t);
void debug_constraints (void);
void debug_constraint_graph (void);
void debug_solution_for_var (unsigned int);
void debug_sa_points_to_info (void);

716 717
/* Print out constraint C to stderr.  */

718
DEBUG_FUNCTION void
719 720 721 722 723 724 725
debug_constraint (constraint_t c)
{
  dump_constraint (stderr, c);
}

/* Print out all constraints to FILE */

726
static void
727
dump_constraints (FILE *file, int from)
728 729 730
{
  int i;
  constraint_t c;
731
  for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
732 733 734 735 736
    dump_constraint (file, c);
}

/* Print out all constraints to stderr.  */

737
DEBUG_FUNCTION void
738 739
debug_constraints (void)
{
740
  dump_constraints (stderr, 0);
741 742
}

743 744 745 746 747 748 749
/* Print out to FILE the edge in the constraint graph that is created by
   constraint c. The edge may have a label, depending on the type of
   constraint that it represents. If complex1, e.g: a = *b, then the label
   is "=*", if complex2, e.g: *a = b, then the label is "*=", if
   complex with an offset, e.g: a = b + 8, then the label is "+".
   Otherwise the edge has no label.  */

750
static void
751 752 753 754
dump_constraint_edge (FILE *file, constraint_t c)
{
  if (c->rhs.type != ADDRESSOF)
    {
755 756
      const char *src = get_varinfo (c->rhs.var)->name;
      const char *dst = get_varinfo (c->lhs.var)->name;
757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777
      fprintf (file, "  \"%s\" -> \"%s\" ", src, dst);
      /* Due to preprocessing of constraints, instructions like *a = *b are
         illegal; thus, we do not have to handle such cases.  */
      if (c->lhs.type == DEREF)
        fprintf (file, " [ label=\"*=\" ] ;\n");
      else if (c->rhs.type == DEREF)
        fprintf (file, " [ label=\"=*\" ] ;\n");
      else
        {
          /* We must check the case where the constraint is an offset.
             In this case, it is treated as a complex constraint.  */
          if (c->rhs.offset != c->lhs.offset)
            fprintf (file, " [ label=\"+\" ] ;\n");
          else
            fprintf (file, " ;\n");
        }
    }
}

/* Print the constraint graph in dot format.  */

778
static void
779 780 781 782 783 784 785 786 787 788 789 790
dump_constraint_graph (FILE *file)
{
  unsigned int i=0, size;
  constraint_t c;

  /* Only print the graph if it has already been initialized:  */
  if (!graph)
    return;

  /* Print the constraints used to produce the constraint graph. The
     constraints will be printed as comments in the dot file:  */
  fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
791
  dump_constraints (file, 0);
792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809
  fprintf (file, "*/\n");

  /* Prints the header of the dot file:  */
  fprintf (file, "\n\n// The constraint graph in dot format:\n");
  fprintf (file, "strict digraph {\n");
  fprintf (file, "  node [\n    shape = box\n  ]\n");
  fprintf (file, "  edge [\n    fontsize = \"12\"\n  ]\n");
  fprintf (file, "\n  // List of nodes in the constraint graph:\n");

  /* The next lines print the nodes in the graph. In order to get the
     number of nodes in the graph, we must choose the minimum between the
     vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
     yet been initialized, then graph->size == 0, otherwise we must only
     read nodes that have an entry in VEC (varinfo_t, varmap).  */
  size = VEC_length (varinfo_t, varmap);
  size = size < graph->size ? size : graph->size;
  for (i = 0; i < size; i++)
    {
810
      const char *name = get_varinfo (graph->rep[i])->name;
811 812 813 814 815 816
      fprintf (file, "  \"%s\" ;\n", name);
    }

  /* Go over the list of constraints printing the edges in the constraint
     graph.  */
  fprintf (file, "\n  // The constraint edges:\n");
817
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
818 819 820 821 822 823 824 825 826
    if (c)
      dump_constraint_edge (file, c);

  /* Prints the tail of the dot file. By now, only the closing bracket.  */
  fprintf (file, "}\n\n\n");
}

/* Print out the constraint graph to stderr.  */

827
DEBUG_FUNCTION void
828 829 830 831 832
debug_constraint_graph (void)
{
  dump_constraint_graph (stderr);
}

833
/* SOLVER FUNCTIONS
834 835 836

   The solver is a simple worklist solver, that works on the following
   algorithm:
837

838 839 840 841 842
   sbitmap changed_nodes = all zeroes;
   changed_count = 0;
   For each node that is not already collapsed:
       changed_count++;
       set bit in changed nodes
843 844 845 846

   while (changed_count > 0)
   {
     compute topological ordering for constraint graph
847

848 849
     find and collapse cycles in the constraint graph (updating
     changed if necessary)
850

851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866
     for each node (n) in the graph in topological order:
       changed_count--;

       Process each complex constraint associated with the node,
       updating changed if necessary.

       For each outgoing edge from n, propagate the solution from n to
       the destination of the edge, updating changed as necessary.

   }  */

/* Return true if two constraint expressions A and B are equal.  */

static bool
constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
{
867
  return a.type == b.type && a.var == b.var && a.offset == b.offset;
868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902
}

/* Return true if constraint expression A is less than constraint expression
   B.  This is just arbitrary, but consistent, in order to give them an
   ordering.  */

static bool
constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
{
  if (a.type == b.type)
    {
      if (a.var == b.var)
	return a.offset < b.offset;
      else
	return a.var < b.var;
    }
  else
    return a.type < b.type;
}

/* Return true if constraint A is less than constraint B.  This is just
   arbitrary, but consistent, in order to give them an ordering.  */

static bool
constraint_less (const constraint_t a, const constraint_t b)
{
  if (constraint_expr_less (a->lhs, b->lhs))
    return true;
  else if (constraint_expr_less (b->lhs, a->lhs))
    return false;
  else
    return constraint_expr_less (a->rhs, b->rhs);
}

/* Return true if two constraints A and B are equal.  */
903

904 905 906
static bool
constraint_equal (struct constraint a, struct constraint b)
{
907
  return constraint_expr_equal (a.lhs, b.lhs)
908 909 910 911 912 913 914
    && constraint_expr_equal (a.rhs, b.rhs);
}


/* Find a constraint LOOKFOR in the sorted constraint vector VEC */

static constraint_t
915
constraint_vec_find (VEC(constraint_t,heap) *vec,
916 917
		     struct constraint lookfor)
{
918
  unsigned int place;
919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935
  constraint_t found;

  if (vec == NULL)
    return NULL;

  place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
  if (place >= VEC_length (constraint_t, vec))
    return NULL;
  found = VEC_index (constraint_t, vec, place);
  if (!constraint_equal (*found, lookfor))
    return NULL;
  return found;
}

/* Union two constraint vectors, TO and FROM.  Put the result in TO.  */

static void
936 937
constraint_set_union (VEC(constraint_t,heap) **to,
		      VEC(constraint_t,heap) **from)
938 939 940 941
{
  int i;
  constraint_t c;

942
  FOR_EACH_VEC_ELT (constraint_t, *from, i, c)
943 944 945 946 947
    {
      if (constraint_vec_find (*to, *c) == NULL)
	{
	  unsigned int place = VEC_lower_bound (constraint_t, *to, c,
						constraint_less);
948
	  VEC_safe_insert (constraint_t, heap, *to, place, c);
949 950 951 952
	}
    }
}

953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990
/* Expands the solution in SET to all sub-fields of variables included.
   Union the expanded result into RESULT.  */

static void
solution_set_expand (bitmap result, bitmap set)
{
  bitmap_iterator bi;
  bitmap vars = NULL;
  unsigned j;

  /* In a first pass record all variables we need to add all
     sub-fields off.  This avoids quadratic behavior.  */
  EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
    {
      varinfo_t v = get_varinfo (j);
      if (v->is_artificial_var
	  || v->is_full_var)
	continue;
      v = lookup_vi_for_tree (v->decl);
      if (vars == NULL)
	vars = BITMAP_ALLOC (NULL);
      bitmap_set_bit (vars, v->id);
    }

  /* In the second pass now do the addition to the solution and
     to speed up solving add it to the delta as well.  */
  if (vars != NULL)
    {
      EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
	{
	  varinfo_t v = get_varinfo (j);
	  for (; v != NULL; v = v->next)
	    bitmap_set_bit (result, v->id);
	}
      BITMAP_FREE (vars);
    }
}

991 992 993 994
/* Take a solution set SET, add OFFSET to each member of the set, and
   overwrite SET with the result when done.  */

static void
995
solution_set_add (bitmap set, HOST_WIDE_INT offset)
996 997 998 999 1000
{
  bitmap result = BITMAP_ALLOC (&iteration_obstack);
  unsigned int i;
  bitmap_iterator bi;

1001 1002 1003 1004 1005 1006 1007 1008
  /* If the offset is unknown we have to expand the solution to
     all subfields.  */
  if (offset == UNKNOWN_OFFSET)
    {
      solution_set_expand (set, set);
      return;
    }

1009 1010
  EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
    {
1011
      varinfo_t vi = get_varinfo (i);
1012

1013 1014 1015 1016 1017 1018 1019
      /* If this is a variable with just one field just set its bit
         in the result.  */
      if (vi->is_artificial_var
	  || vi->is_unknown_size_var
	  || vi->is_full_var)
	bitmap_set_bit (result, i);
      else
1020
	{
1021
	  unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032

	  /* If the offset makes the pointer point to before the
	     variable use offset zero for the field lookup.  */
	  if (offset < 0
	      && fieldoffset > vi->offset)
	    fieldoffset = 0;

	  if (offset != 0)
	    vi = first_or_preceding_vi_for_offset (vi, fieldoffset);

	  bitmap_set_bit (result, vi->id);
1033 1034 1035
	  /* If the result is not exactly at fieldoffset include the next
	     field as well.  See get_constraint_for_ptr_offset for more
	     rationale.  */
1036 1037 1038
	  if (vi->offset != fieldoffset
	      && vi->next != NULL)
	    bitmap_set_bit (result, vi->next->id);
1039 1040
	}
    }
1041 1042

  bitmap_copy (set, result);
1043 1044 1045 1046 1047 1048 1049
  BITMAP_FREE (result);
}

/* Union solution sets TO and FROM, and add INC to each member of FROM in the
   process.  */

static bool
1050
set_union_with_increment  (bitmap to, bitmap from, HOST_WIDE_INT inc)
1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067
{
  if (inc == 0)
    return bitmap_ior_into (to, from);
  else
    {
      bitmap tmp;
      bool res;

      tmp = BITMAP_ALLOC (&iteration_obstack);
      bitmap_copy (tmp, from);
      solution_set_add (tmp, inc);
      res = bitmap_ior_into (to, tmp);
      BITMAP_FREE (tmp);
      return res;
    }
}

1068 1069
/* Insert constraint C into the list of complex constraints for graph
   node VAR.  */
1070 1071

static void
1072 1073
insert_into_complex (constraint_graph_t graph,
		     unsigned int var, constraint_t c)
1074
{
1075 1076
  VEC (constraint_t, heap) *complex = graph->complex[var];
  unsigned int place = VEC_lower_bound (constraint_t, complex, c,
1077
					constraint_less);
1078 1079 1080 1081 1082

  /* Only insert constraints that do not already exist.  */
  if (place >= VEC_length (constraint_t, complex)
      || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
    VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
1083 1084 1085 1086 1087 1088
}


/* Condense two variable nodes into a single variable node, by moving
   all associated info from SRC to TO.  */

1089
static void
1090 1091
merge_node_constraints (constraint_graph_t graph, unsigned int to,
			unsigned int from)
1092 1093 1094
{
  unsigned int i;
  constraint_t c;
1095

1096
  gcc_assert (find (from) == to);
1097

1098
  /* Move all complex constraints from src node into to node  */
1099
  FOR_EACH_VEC_ELT (constraint_t, graph->complex[from], i, c)
1100 1101
    {
      /* In complex constraints for node src, we may have either
1102 1103
	 a = *src, and *src = a, or an offseted constraint which are
	 always added to the rhs node's constraints.  */
1104

1105 1106
      if (c->rhs.type == DEREF)
	c->rhs.var = to;
1107
      else if (c->lhs.type == DEREF)
1108
	c->lhs.var = to;
1109 1110
      else
	c->rhs.var = to;
1111
    }
1112 1113 1114
  constraint_set_union (&graph->complex[to], &graph->complex[from]);
  VEC_free (constraint_t, heap, graph->complex[from]);
  graph->complex[from] = NULL;
1115 1116 1117 1118 1119 1120 1121 1122
}


/* Remove edges involving NODE from GRAPH.  */

static void
clear_edges_for_node (constraint_graph_t graph, unsigned int node)
{
1123
  if (graph->succs[node])
1124
    BITMAP_FREE (graph->succs[node]);
1125 1126
}

1127 1128 1129
/* Merge GRAPH nodes FROM and TO into node TO.  */

static void
1130
merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1131 1132
		   unsigned int from)
{
1133
  if (graph->indirect_cycles[from] != -1)
1134
    {
1135 1136 1137 1138 1139 1140 1141
      /* If we have indirect cycles with the from node, and we have
	 none on the to node, the to node has indirect cycles from the
	 from node now that they are unified.
	 If indirect cycles exist on both, unify the nodes that they
	 are in a cycle with, since we know they are in a cycle with
	 each other.  */
      if (graph->indirect_cycles[to] == -1)
1142
	graph->indirect_cycles[to] = graph->indirect_cycles[from];
1143
    }
1144

1145 1146
  /* Merge all the successor edges.  */
  if (graph->succs[from])
1147
    {
1148
      if (!graph->succs[to])
1149
	graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1150
      bitmap_ior_into (graph->succs[to],
1151
		       graph->succs[from]);
1152 1153
    }

1154 1155 1156
  clear_edges_for_node (graph, from);
}

1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170

/* Add an indirect graph edge to GRAPH, going from TO to FROM if
   it doesn't exist in the graph already.  */

static void
add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
			 unsigned int from)
{
  if (to == from)
    return;

  if (!graph->implicit_preds[to])
    graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);

1171 1172
  if (bitmap_set_bit (graph->implicit_preds[to], from))
    stats.num_implicit_edges++;
1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184
}

/* Add a predecessor graph edge to GRAPH, going from TO to FROM if
   it doesn't exist in the graph already.
   Return false if the edge already existed, true otherwise.  */

static void
add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
		     unsigned int from)
{
  if (!graph->preds[to])
    graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1185
  bitmap_set_bit (graph->preds[to], from);
1186 1187 1188
}

/* Add a graph edge to GRAPH, going from FROM to TO if
1189 1190 1191 1192
   it doesn't exist in the graph already.
   Return false if the edge already existed, true otherwise.  */

static bool
1193 1194
add_graph_edge (constraint_graph_t graph, unsigned int to,
		unsigned int from)
1195
{
1196
  if (to == from)
1197 1198 1199 1200 1201
    {
      return false;
    }
  else
    {
1202
      bool r = false;
1203

1204
      if (!graph->succs[from])
1205
	graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1206
      if (bitmap_set_bit (graph->succs[from], to))
1207
	{
1208
	  r = true;
1209 1210
	  if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
	    stats.num_edges++;
1211
	}
1212 1213 1214 1215 1216
      return r;
    }
}


1217
/* Return true if {DEST.SRC} is an existing graph edge in GRAPH.  */
1218 1219

static bool
1220
valid_graph_edge (constraint_graph_t graph, unsigned int src,
1221
		  unsigned int dest)
1222
{
1223
  return (graph->succs[dest]
1224
	  && bitmap_bit_p (graph->succs[dest], src));
1225 1226
}

1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239
/* Initialize the constraint graph structure to contain SIZE nodes.  */

static void
init_graph (unsigned int size)
{
  unsigned int j;

  graph = XCNEW (struct constraint_graph);
  graph->size = size;
  graph->succs = XCNEWVEC (bitmap, graph->size);
  graph->indirect_cycles = XNEWVEC (int, graph->size);
  graph->rep = XNEWVEC (unsigned int, graph->size);
  graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1240
  graph->pe = XCNEWVEC (unsigned int, graph->size);
1241 1242 1243 1244 1245 1246 1247 1248 1249 1250
  graph->pe_rep = XNEWVEC (int, graph->size);

  for (j = 0; j < graph->size; j++)
    {
      graph->rep[j] = j;
      graph->pe_rep[j] = -1;
      graph->indirect_cycles[j] = -1;
    }
}

1251
/* Build the constraint graph, adding only predecessor edges right now.  */
1252 1253

static void
1254
build_pred_graph (void)
1255
{
1256
  int i;
1257
  constraint_t c;
1258
  unsigned int j;
1259

1260 1261
  graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
  graph->preds = XCNEWVEC (bitmap, graph->size);
1262 1263 1264 1265
  graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
  graph->loc_label = XCNEWVEC (unsigned int, graph->size);
  graph->pointed_by = XCNEWVEC (bitmap, graph->size);
  graph->points_to = XCNEWVEC (bitmap, graph->size);
1266 1267
  graph->eq_rep = XNEWVEC (int, graph->size);
  graph->direct_nodes = sbitmap_alloc (graph->size);
1268
  graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1269 1270 1271 1272 1273 1274 1275 1276 1277
  sbitmap_zero (graph->direct_nodes);

  for (j = 0; j < FIRST_REF_NODE; j++)
    {
      if (!get_varinfo (j)->is_special_var)
	SET_BIT (graph->direct_nodes, j);
    }

  for (j = 0; j < graph->size; j++)
1278
    graph->eq_rep[j] = -1;
1279 1280 1281

  for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
    graph->indirect_cycles[j] = -1;
1282

1283
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
1284 1285 1286
    {
      struct constraint_expr lhs = c->lhs;
      struct constraint_expr rhs = c->rhs;
1287 1288
      unsigned int lhsvar = lhs.var;
      unsigned int rhsvar = rhs.var;
1289

1290 1291
      if (lhs.type == DEREF)
	{
1292 1293 1294
	  /* *x = y.  */
	  if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
	    add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1295 1296 1297
	}
      else if (rhs.type == DEREF)
	{
1298 1299 1300 1301 1302
	  /* x = *y */
	  if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
	    add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
	  else
	    RESET_BIT (graph->direct_nodes, lhsvar);
1303
	}
1304
      else if (rhs.type == ADDRESSOF)
1305
	{
1306 1307
	  varinfo_t v;

1308
	  /* x = &y */
1309 1310 1311 1312 1313 1314 1315 1316
	  if (graph->points_to[lhsvar] == NULL)
	    graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
	  bitmap_set_bit (graph->points_to[lhsvar], rhsvar);

	  if (graph->pointed_by[rhsvar] == NULL)
	    graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
	  bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);

1317 1318 1319
	  /* Implicitly, *x = y */
	  add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);

1320
	  /* All related variables are no longer direct nodes.  */
1321
	  RESET_BIT (graph->direct_nodes, rhsvar);
1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332
          v = get_varinfo (rhsvar);
          if (!v->is_full_var)
            {
              v = lookup_vi_for_tree (v->decl);
              do
                {
                  RESET_BIT (graph->direct_nodes, v->id);
                  v = v->next;
                }
              while (v != NULL);
            }
1333
	  bitmap_set_bit (graph->address_taken, rhsvar);
1334
	}
1335 1336
      else if (lhsvar > anything_id
	       && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1337
	{
1338 1339 1340 1341 1342 1343 1344 1345 1346 1347
	  /* x = y */
	  add_pred_graph_edge (graph, lhsvar, rhsvar);
	  /* Implicitly, *x = *y */
	  add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
				   FIRST_REF_NODE + rhsvar);
	}
      else if (lhs.offset != 0 || rhs.offset != 0)
	{
	  if (rhs.offset != 0)
	    RESET_BIT (graph->direct_nodes, lhs.var);
1348
	  else if (lhs.offset != 0)
1349 1350 1351 1352 1353 1354 1355 1356 1357 1358
	    RESET_BIT (graph->direct_nodes, rhs.var);
	}
    }
}

/* Build the constraint graph, adding successor edges.  */

static void
build_succ_graph (void)
{
1359
  unsigned i, t;
1360 1361
  constraint_t c;

1362
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
1363 1364 1365 1366 1367 1368 1369 1370
    {
      struct constraint_expr lhs;
      struct constraint_expr rhs;
      unsigned int lhsvar;
      unsigned int rhsvar;

      if (!c)
	continue;
1371

1372 1373
      lhs = c->lhs;
      rhs = c->rhs;
1374 1375
      lhsvar = find (lhs.var);
      rhsvar = find (rhs.var);
1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389

      if (lhs.type == DEREF)
	{
	  if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
	    add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
	}
      else if (rhs.type == DEREF)
	{
	  if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
	    add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
	}
      else if (rhs.type == ADDRESSOF)
	{
	  /* x = &y */
1390
	  gcc_assert (find (rhs.var) == rhs.var);
1391 1392 1393 1394 1395 1396
	  bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
	}
      else if (lhsvar > anything_id
	       && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
	{
	  add_graph_edge (graph, lhsvar, rhsvar);
1397 1398
	}
    }
1399

1400 1401
  /* Add edges from STOREDANYTHING to all non-direct nodes that can
     receive pointers.  */
1402 1403 1404
  t = find (storedanything_id);
  for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
    {
1405 1406
      if (!TEST_BIT (graph->direct_nodes, i)
	  && get_varinfo (i)->may_have_pointers)
1407 1408
	add_graph_edge (graph, find (i), t);
    }
1409 1410 1411

  /* Everything stored to ANYTHING also potentially escapes.  */
  add_graph_edge (graph, find (escaped_id), t);
1412
}
1413 1414


1415 1416 1417 1418 1419 1420 1421 1422 1423
/* Changed variables on the last iteration.  */
static unsigned int changed_count;
static sbitmap changed;

/* Strongly Connected Component visitation info.  */

struct scc_info
{
  sbitmap visited;
1424
  sbitmap deleted;
1425 1426
  unsigned int *dfs;
  unsigned int *node_mapping;
1427
  int current_index;
1428
  VEC(unsigned,heap) *scc_stack;
1429 1430 1431 1432 1433 1434
};


/* Recursive routine to find strongly connected components in GRAPH.
   SI is the SCC info to store the information in, and N is the id of current
   graph node we are processing.
1435

1436
   This is Tarjan's strongly connected component finding algorithm, as
1437
   modified by Nuutila to keep only non-root nodes on the stack.
1438 1439 1440 1441 1442 1443 1444 1445
   The algorithm can be found in "On finding the strongly connected
   connected components in a directed graph" by Esko Nuutila and Eljas
   Soisalon-Soininen, in Information Processing Letters volume 49,
   number 1, pages 9-14.  */

static void
scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
{
1446 1447
  unsigned int i;
  bitmap_iterator bi;
1448
  unsigned int my_dfs;
1449 1450

  SET_BIT (si->visited, n);
1451 1452
  si->dfs[n] = si->current_index ++;
  my_dfs = si->dfs[n];
1453

1454
  /* Visit all the successors.  */
1455
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1456
    {
1457 1458 1459 1460 1461 1462
      unsigned int w;

      if (i > LAST_REF_NODE)
	break;

      w = find (i);
1463
      if (TEST_BIT (si->deleted, w))
1464 1465
	continue;

1466 1467
      if (!TEST_BIT (si->visited, w))
	scc_visit (graph, si, w);
1468 1469 1470
      {
	unsigned int t = find (w);
	unsigned int nnode = find (n);
1471
	gcc_assert (nnode == n);
1472 1473 1474 1475

	if (si->dfs[t] < si->dfs[nnode])
	  si->dfs[n] = si->dfs[t];
      }
1476
    }
1477

1478
  /* See if any components have been identified.  */
1479
  if (si->dfs[n] == my_dfs)
1480
    {
1481 1482
      if (VEC_length (unsigned, si->scc_stack) > 0
	  && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1483
	{
1484 1485 1486
	  bitmap scc = BITMAP_ALLOC (NULL);
	  unsigned int lowest_node;
	  bitmap_iterator bi;
1487

1488
	  bitmap_set_bit (scc, n);
1489

1490 1491 1492 1493
	  while (VEC_length (unsigned, si->scc_stack) != 0
		 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
	    {
	      unsigned int w = VEC_pop (unsigned, si->scc_stack);
1494

1495 1496
	      bitmap_set_bit (scc, w);
	    }
1497

1498 1499
	  lowest_node = bitmap_first_set_bit (scc);
	  gcc_assert (lowest_node < FIRST_REF_NODE);
1500 1501 1502

	  /* Collapse the SCC nodes into a single node, and mark the
	     indirect cycles.  */
1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515
	  EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
	    {
	      if (i < FIRST_REF_NODE)
		{
		  if (unite (lowest_node, i))
		    unify_nodes (graph, lowest_node, i, false);
		}
	      else
		{
		  unite (lowest_node, i);
		  graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
		}
	    }
1516
	}
1517
      SET_BIT (si->deleted, n);
1518
    }
1519 1520
  else
    VEC_safe_push (unsigned, heap, si->scc_stack, n);
1521 1522
}

1523 1524
/* Unify node FROM into node TO, updating the changed count if
   necessary when UPDATE_CHANGED is true.  */
1525 1526

static void
1527 1528
unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
	     bool update_changed)
1529 1530
{

1531 1532 1533 1534 1535
  gcc_assert (to != from && find (to) == to);
  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "Unifying %s to %s\n",
	     get_varinfo (from)->name,
	     get_varinfo (to)->name);
1536

1537 1538 1539 1540
  if (update_changed)
    stats.unified_vars_dynamic++;
  else
    stats.unified_vars_static++;
1541

1542 1543
  merge_graph_nodes (graph, to, from);
  merge_node_constraints (graph, to, from);
1544

1545 1546 1547
  /* Mark TO as changed if FROM was changed. If TO was already marked
     as changed, decrease the changed count.  */

1548
  if (update_changed && TEST_BIT (changed, from))
1549
    {
1550 1551 1552
      RESET_BIT (changed, from);
      if (!TEST_BIT (changed, to))
	SET_BIT (changed, to);
1553
      else
1554 1555 1556 1557 1558
	{
	  gcc_assert (changed_count > 0);
	  changed_count--;
	}
    }
1559
  if (get_varinfo (from)->solution)
1560
    {
1561 1562 1563 1564
      /* If the solution changes because of the merging, we need to mark
	 the variable as changed.  */
      if (bitmap_ior_into (get_varinfo (to)->solution,
			   get_varinfo (from)->solution))
1565
	{
1566 1567 1568 1569 1570 1571
	  if (update_changed && !TEST_BIT (changed, to))
	    {
	      SET_BIT (changed, to);
	      changed_count++;
	    }
	}
H.J. Lu committed
1572

1573 1574
      BITMAP_FREE (get_varinfo (from)->solution);
      BITMAP_FREE (get_varinfo (from)->oldsolution);
H.J. Lu committed
1575

1576 1577 1578 1579
      if (stats.iterations > 0)
	{
	  BITMAP_FREE (get_varinfo (to)->oldsolution);
	  get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1580
	}
1581 1582 1583 1584 1585
    }
  if (valid_graph_edge (graph, to, to))
    {
      if (graph->succs[to])
	bitmap_clear_bit (graph->succs[to], to);
1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596
    }
}

/* Information needed to compute the topological ordering of a graph.  */

struct topo_info
{
  /* sbitmap of visited nodes.  */
  sbitmap visited;
  /* Array that stores the topological order of the graph, *in
     reverse*.  */
1597
  VEC(unsigned,heap) *topo_order;
1598 1599 1600 1601 1602 1603 1604 1605
};


/* Initialize and return a topological info structure.  */

static struct topo_info *
init_topo_info (void)
{
1606
  size_t size = graph->size;
1607
  struct topo_info *ti = XNEW (struct topo_info);
1608 1609
  ti->visited = sbitmap_alloc (size);
  sbitmap_zero (ti->visited);
1610
  ti->topo_order = VEC_alloc (unsigned, heap, 1);
1611 1612 1613 1614 1615 1616 1617 1618 1619 1620
  return ti;
}


/* Free the topological sort info pointed to by TI.  */

static void
free_topo_info (struct topo_info *ti)
{
  sbitmap_free (ti->visited);
1621
  VEC_free (unsigned, heap, ti->topo_order);
1622 1623 1624 1625 1626 1627 1628 1629 1630 1631
  free (ti);
}

/* Visit the graph in topological order, and store the order in the
   topo_info structure.  */

static void
topo_visit (constraint_graph_t graph, struct topo_info *ti,
	    unsigned int n)
{
1632 1633 1634
  bitmap_iterator bi;
  unsigned int j;

1635
  SET_BIT (ti->visited, n);
1636

1637 1638
  if (graph->succs[n])
    EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1639 1640 1641 1642
      {
	if (!TEST_BIT (ti->visited, j))
	  topo_visit (graph, ti, j);
      }
1643

1644
  VEC_safe_push (unsigned, heap, ti->topo_order, n);
1645 1646
}

1647 1648
/* Process a constraint C that represents x = *(y + off), using DELTA as the
   starting solution for y.  */
1649 1650 1651 1652 1653

static void
do_sd_constraint (constraint_graph_t graph, constraint_t c,
		  bitmap delta)
{
1654
  unsigned int lhs = c->lhs.var;
1655 1656 1657 1658
  bool flag = false;
  bitmap sol = get_varinfo (lhs)->solution;
  unsigned int j;
  bitmap_iterator bi;
1659
  HOST_WIDE_INT roffset = c->rhs.offset;
1660

1661 1662
  /* Our IL does not allow this.  */
  gcc_assert (c->lhs.offset == 0);
1663

1664 1665
  /* If the solution of Y contains anything it is good enough to transfer
     this to the LHS.  */
1666 1667 1668 1669 1670 1671
  if (bitmap_bit_p (delta, anything_id))
    {
      flag |= bitmap_set_bit (sol, anything_id);
      goto done;
    }

1672 1673 1674 1675 1676 1677 1678 1679 1680 1681
  /* If we do not know at with offset the rhs is dereferenced compute
     the reachability set of DELTA, conservatively assuming it is
     dereferenced at all valid offsets.  */
  if (roffset == UNKNOWN_OFFSET)
    {
      solution_set_expand (delta, delta);
      /* No further offset processing is necessary.  */
      roffset = 0;
    }

1682
  /* For each variable j in delta (Sol(y)), add
1683 1684 1685
     an edge in the graph from j to x, and union Sol(j) into Sol(x).  */
  EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
    {
1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696
      varinfo_t v = get_varinfo (j);
      HOST_WIDE_INT fieldoffset = v->offset + roffset;
      unsigned int t;

      if (v->is_full_var)
	fieldoffset = v->offset;
      else if (roffset != 0)
	v = first_vi_for_offset (v, fieldoffset);
      /* If the access is outside of the variable we can ignore it.  */
      if (!v)
	continue;
1697

1698 1699
      do
	{
1700
	  t = find (v->id);
1701 1702 1703

	  /* Adding edges from the special vars is pointless.
	     They don't have sets that can change.  */
1704
	  if (get_varinfo (t)->is_special_var)
1705
	    flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1706
	  /* Merging the solution from ESCAPED needlessly increases
1707
	     the set.  Use ESCAPED as representative instead.  */
1708
	  else if (v->id == escaped_id)
1709
	    flag |= bitmap_set_bit (sol, escaped_id);
1710 1711
	  else if (v->may_have_pointers
		   && add_graph_edge (graph, lhs, t))
1712
	    flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1713 1714 1715 1716 1717 1718 1719 1720 1721

	  /* If the variable is not exactly at the requested offset
	     we have to include the next one.  */
	  if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
	      || v->next == NULL)
	    break;

	  v = v->next;
	  fieldoffset = v->offset;
1722
	}
1723
      while (1);
1724
    }
1725

1726
done:
1727 1728 1729 1730 1731 1732 1733 1734 1735
  /* If the LHS solution changed, mark the var as changed.  */
  if (flag)
    {
      get_varinfo (lhs)->solution = sol;
      if (!TEST_BIT (changed, lhs))
	{
	  SET_BIT (changed, lhs);
	  changed_count++;
	}
1736
    }
1737 1738
}

1739 1740
/* Process a constraint C that represents *(x + off) = y using DELTA
   as the starting solution for x.  */
1741 1742

static void
1743
do_ds_constraint (constraint_t c, bitmap delta)
1744
{
1745
  unsigned int rhs = c->rhs.var;
1746 1747 1748
  bitmap sol = get_varinfo (rhs)->solution;
  unsigned int j;
  bitmap_iterator bi;
1749
  HOST_WIDE_INT loff = c->lhs.offset;
1750
  bool escaped_p = false;
1751

1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778
  /* Our IL does not allow this.  */
  gcc_assert (c->rhs.offset == 0);

  /* If the solution of y contains ANYTHING simply use the ANYTHING
     solution.  This avoids needlessly increasing the points-to sets.  */
  if (bitmap_bit_p (sol, anything_id))
    sol = get_varinfo (find (anything_id))->solution;

  /* If the solution for x contains ANYTHING we have to merge the
     solution of y into all pointer variables which we do via
     STOREDANYTHING.  */
  if (bitmap_bit_p (delta, anything_id))
    {
      unsigned t = find (storedanything_id);
      if (add_graph_edge (graph, t, rhs))
	{
	  if (bitmap_ior_into (get_varinfo (t)->solution, sol))
	    {
	      if (!TEST_BIT (changed, t))
		{
		  SET_BIT (changed, t);
		  changed_count++;
		}
	    }
	}
      return;
    }
1779

1780 1781 1782 1783 1784 1785 1786 1787 1788
  /* If we do not know at with offset the rhs is dereferenced compute
     the reachability set of DELTA, conservatively assuming it is
     dereferenced at all valid offsets.  */
  if (loff == UNKNOWN_OFFSET)
    {
      solution_set_expand (delta, delta);
      loff = 0;
    }

1789 1790 1791 1792
  /* For each member j of delta (Sol(x)), add an edge from y to j and
     union Sol(y) into Sol(j) */
  EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
    {
1793 1794 1795
      varinfo_t v = get_varinfo (j);
      unsigned int t;
      HOST_WIDE_INT fieldoffset = v->offset + loff;
1796

1797 1798 1799 1800 1801 1802 1803
      if (v->is_full_var)
	fieldoffset = v->offset;
      else if (loff != 0)
	v = first_vi_for_offset (v, fieldoffset);
      /* If the access is outside of the variable we can ignore it.  */
      if (!v)
	continue;
1804

1805 1806
      do
	{
1807
	  if (v->may_have_pointers)
1808
	    {
1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827
	      /* If v is a global variable then this is an escape point.  */
	      if (v->is_global_var
		  && !escaped_p)
		{
		  t = find (escaped_id);
		  if (add_graph_edge (graph, t, rhs)
		      && bitmap_ior_into (get_varinfo (t)->solution, sol)
		      && !TEST_BIT (changed, t))
		    {
		      SET_BIT (changed, t);
		      changed_count++;
		    }
		  /* Enough to let rhs escape once.  */
		  escaped_p = true;
		}

	      if (v->is_special_var)
		break;

1828
	      t = find (v->id);
1829 1830 1831
	      if (add_graph_edge (graph, t, rhs)
		  && bitmap_ior_into (get_varinfo (t)->solution, sol)
		  && !TEST_BIT (changed, t))
1832
		{
1833 1834 1835 1836
		  SET_BIT (changed, t);
		  changed_count++;
		}
	    }
1837 1838 1839 1840 1841 1842 1843 1844 1845

	  /* If the variable is not exactly at the requested offset
	     we have to include the next one.  */
	  if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
	      || v->next == NULL)
	    break;

	  v = v->next;
	  fieldoffset = v->offset;
1846
	}
1847
      while (1);
1848 1849 1850
    }
}

1851 1852
/* Handle a non-simple (simple meaning requires no iteration),
   constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved).  */
1853

1854 1855 1856 1857 1858 1859 1860
static void
do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
{
  if (c->lhs.type == DEREF)
    {
      if (c->rhs.type == ADDRESSOF)
	{
1861
	  gcc_unreachable();
1862 1863 1864 1865
	}
      else
	{
	  /* *x = y */
1866
	  do_ds_constraint (c, delta);
1867 1868
	}
    }
1869
  else if (c->rhs.type == DEREF)
1870 1871
    {
      /* x = *y */
1872 1873
      if (!(get_varinfo (c->lhs.var)->is_special_var))
	do_sd_constraint (graph, c, delta);
1874
    }
1875
  else
1876
    {
1877
      bitmap tmp;
1878 1879 1880
      bitmap solution;
      bool flag = false;

1881
      gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1882 1883
      solution = get_varinfo (c->rhs.var)->solution;
      tmp = get_varinfo (c->lhs.var)->solution;
1884 1885

      flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1886

1887 1888
      if (flag)
	{
1889 1890
	  get_varinfo (c->lhs.var)->solution = tmp;
	  if (!TEST_BIT (changed, c->lhs.var))
1891
	    {
1892
	      SET_BIT (changed, c->lhs.var);
1893 1894 1895 1896
	      changed_count++;
	    }
	}
    }
1897 1898 1899 1900 1901
}

/* Initialize and return a new SCC info structure.  */

static struct scc_info *
1902
init_scc_info (size_t size)
1903
{
1904
  struct scc_info *si = XNEW (struct scc_info);
1905
  size_t i;
1906 1907 1908 1909

  si->current_index = 0;
  si->visited = sbitmap_alloc (size);
  sbitmap_zero (si->visited);
1910 1911
  si->deleted = sbitmap_alloc (size);
  sbitmap_zero (si->deleted);
1912 1913 1914 1915 1916 1917
  si->node_mapping = XNEWVEC (unsigned int, size);
  si->dfs = XCNEWVEC (unsigned int, size);

  for (i = 0; i < size; i++)
    si->node_mapping[i] = i;

1918
  si->scc_stack = VEC_alloc (unsigned, heap, 1);
1919 1920 1921 1922 1923 1924 1925
  return si;
}

/* Free an SCC info structure pointed to by SI */

static void
free_scc_info (struct scc_info *si)
1926
{
1927
  sbitmap_free (si->visited);
1928
  sbitmap_free (si->deleted);
1929 1930
  free (si->node_mapping);
  free (si->dfs);
1931
  VEC_free (unsigned, heap, si->scc_stack);
1932
  free (si);
1933 1934 1935
}


1936 1937 1938 1939 1940 1941
/* Find indirect cycles in GRAPH that occur, using strongly connected
   components, and note them in the indirect cycles map.

   This technique comes from Ben Hardekopf and Calvin Lin,
   "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
   Lines of Code", submitted to PLDI 2007.  */
1942 1943

static void
1944
find_indirect_cycles (constraint_graph_t graph)
1945 1946
{
  unsigned int i;
1947 1948
  unsigned int size = graph->size;
  struct scc_info *si = init_scc_info (size);
1949

1950 1951
  for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
    if (!TEST_BIT (si->visited, i) && find (i) == i)
1952
      scc_visit (graph, si, i);
1953

1954 1955 1956 1957 1958 1959
  free_scc_info (si);
}

/* Compute a topological ordering for GRAPH, and store the result in the
   topo_info structure TI.  */

1960
static void
1961 1962 1963 1964
compute_topo_order (constraint_graph_t graph,
		    struct topo_info *ti)
{
  unsigned int i;
1965
  unsigned int size = graph->size;
1966

1967
  for (i = 0; i != size; ++i)
1968
    if (!TEST_BIT (ti->visited, i) && find (i) == i)
1969 1970 1971
      topo_visit (graph, ti, i);
}

1972 1973 1974 1975 1976
/* Structure used to for hash value numbering of pointer equivalence
   classes.  */

typedef struct equiv_class_label
{
1977
  hashval_t hashcode;
1978 1979 1980
  unsigned int equivalence_class;
  bitmap labels;
} *equiv_class_label_t;
1981
typedef const struct equiv_class_label *const_equiv_class_label_t;
1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995

/* A hashtable for mapping a bitmap of labels->pointer equivalence
   classes.  */
static htab_t pointer_equiv_class_table;

/* A hashtable for mapping a bitmap of labels->location equivalence
   classes.  */
static htab_t location_equiv_class_table;

/* Hash function for a equiv_class_label_t */

static hashval_t
equiv_class_label_hash (const void *p)
{
1996
  const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1997 1998 1999 2000 2001 2002 2003 2004
  return ecl->hashcode;
}

/* Equality function for two equiv_class_label_t's.  */

static int
equiv_class_label_eq (const void *p1, const void *p2)
{
2005 2006
  const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
  const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
2007 2008
  return (eql1->hashcode == eql2->hashcode
	  && bitmap_equal_p (eql1->labels, eql2->labels));
2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021
}

/* Lookup a equivalence class in TABLE by the bitmap of LABELS it
   contains.  */

static unsigned int
equiv_class_lookup (htab_t table, bitmap labels)
{
  void **slot;
  struct equiv_class_label ecl;

  ecl.labels = labels;
  ecl.hashcode = bitmap_hash (labels);
2022

2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052
  slot = htab_find_slot_with_hash (table, &ecl,
				   ecl.hashcode, NO_INSERT);
  if (!slot)
    return 0;
  else
    return ((equiv_class_label_t) *slot)->equivalence_class;
}


/* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
   to TABLE.  */

static void
equiv_class_add (htab_t table, unsigned int equivalence_class,
		 bitmap labels)
{
  void **slot;
  equiv_class_label_t ecl = XNEW (struct equiv_class_label);

  ecl->labels = labels;
  ecl->equivalence_class = equivalence_class;
  ecl->hashcode = bitmap_hash (labels);

  slot = htab_find_slot_with_hash (table, ecl,
				   ecl->hashcode, INSERT);
  gcc_assert (!*slot);
  *slot = (void *) ecl;
}

/* Perform offline variable substitution.
2053

2054 2055 2056
   This is a worst case quadratic time way of identifying variables
   that must have equivalent points-to sets, including those caused by
   static cycles, and single entry subgraphs, in the constraint graph.
2057

2058 2059 2060 2061 2062
   The technique is described in "Exploiting Pointer and Location
   Equivalence to Optimize Pointer Analysis. In the 14th International
   Static Analysis Symposium (SAS), August 2007."  It is known as the
   "HU" algorithm, and is equivalent to value numbering the collapsed
   constraint graph including evaluating unions.
2063 2064 2065

   The general method of finding equivalence classes is as follows:
   Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082
   Initialize all non-REF nodes to be direct nodes.
   For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
   variable}
   For each constraint containing the dereference, we also do the same
   thing.

   We then compute SCC's in the graph and unify nodes in the same SCC,
   including pts sets.

   For each non-collapsed node x:
    Visit all unvisited explicit incoming edges.
    Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
    where y->x.
    Lookup the equivalence class for pts(x).
     If we found one, equivalence_class(x) = found class.
     Otherwise, equivalence_class(x) = new class, and new_class is
    added to the lookup table.
2083 2084 2085 2086 2087

   All direct nodes with the same equivalence class can be replaced
   with a single representative node.
   All unlabeled nodes (label == 0) are not pointers and all edges
   involving them can be eliminated.
2088 2089 2090 2091 2092 2093 2094 2095 2096
   We perform these optimizations during rewrite_constraints

   In addition to pointer equivalence class finding, we also perform
   location equivalence class finding.  This is the set of variables
   that always appear together in points-to sets.  We use this to
   compress the size of the points-to sets.  */

/* Current maximum pointer equivalence class id.  */
static int pointer_equiv_class;
2097

2098 2099
/* Current maximum location equivalence class id.  */
static int location_equiv_class;
2100 2101

/* Recursive routine to find strongly connected components in GRAPH,
2102
   and label it's nodes with DFS numbers.  */
2103 2104

static void
2105
condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2106
{
2107 2108 2109
  unsigned int i;
  bitmap_iterator bi;
  unsigned int my_dfs;
2110

2111 2112 2113 2114
  gcc_assert (si->node_mapping[n] == n);
  SET_BIT (si->visited, n);
  si->dfs[n] = si->current_index ++;
  my_dfs = si->dfs[n];
2115

2116 2117
  /* Visit all the successors.  */
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2118
    {
2119
      unsigned int w = si->node_mapping[i];
2120

2121
      if (TEST_BIT (si->deleted, w))
2122 2123
	continue;

2124
      if (!TEST_BIT (si->visited, w))
2125
	condense_visit (graph, si, w);
2126 2127 2128
      {
	unsigned int t = si->node_mapping[w];
	unsigned int nnode = si->node_mapping[n];
2129
	gcc_assert (nnode == n);
2130

2131 2132 2133 2134
	if (si->dfs[t] < si->dfs[nnode])
	  si->dfs[n] = si->dfs[t];
      }
    }
2135

2136 2137 2138 2139 2140
  /* Visit all the implicit predecessors.  */
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
    {
      unsigned int w = si->node_mapping[i];

2141
      if (TEST_BIT (si->deleted, w))
2142 2143 2144
	continue;

      if (!TEST_BIT (si->visited, w))
2145
	condense_visit (graph, si, w);
2146 2147 2148 2149 2150 2151 2152 2153 2154
      {
	unsigned int t = si->node_mapping[w];
	unsigned int nnode = si->node_mapping[n];
	gcc_assert (nnode == n);

	if (si->dfs[t] < si->dfs[nnode])
	  si->dfs[n] = si->dfs[t];
      }
    }
2155

2156 2157 2158 2159 2160
  /* See if any components have been identified.  */
  if (si->dfs[n] == my_dfs)
    {
      while (VEC_length (unsigned, si->scc_stack) != 0
	     && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2161
	{
2162 2163 2164 2165 2166 2167
	  unsigned int w = VEC_pop (unsigned, si->scc_stack);
	  si->node_mapping[w] = n;

	  if (!TEST_BIT (graph->direct_nodes, w))
	    RESET_BIT (graph->direct_nodes, n);

2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188
	  /* Unify our nodes.  */
	  if (graph->preds[w])
	    {
	      if (!graph->preds[n])
		graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
	      bitmap_ior_into (graph->preds[n], graph->preds[w]);
	    }
	  if (graph->implicit_preds[w])
	    {
	      if (!graph->implicit_preds[n])
		graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
	      bitmap_ior_into (graph->implicit_preds[n],
			       graph->implicit_preds[w]);
	    }
	  if (graph->points_to[w])
	    {
	      if (!graph->points_to[n])
		graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
	      bitmap_ior_into (graph->points_to[n],
			       graph->points_to[w]);
	    }
2189
	}
2190
      SET_BIT (si->deleted, n);
2191 2192 2193 2194 2195
    }
  else
    VEC_safe_push (unsigned, heap, si->scc_stack, n);
}

2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216
/* Label pointer equivalences.  */

static void
label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
{
  unsigned int i;
  bitmap_iterator bi;
  SET_BIT (si->visited, n);

  if (!graph->points_to[n])
    graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);

  /* Label and union our incoming edges's points to sets.  */
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
    {
      unsigned int w = si->node_mapping[i];
      if (!TEST_BIT (si->visited, w))
	label_visit (graph, si, w);

      /* Skip unused edges  */
      if (w == n || graph->pointer_label[w] == 0)
2217 2218
	continue;

2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239
      if (graph->points_to[w])
	bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
    }
  /* Indirect nodes get fresh variables.  */
  if (!TEST_BIT (graph->direct_nodes, n))
    bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);

  if (!bitmap_empty_p (graph->points_to[n]))
    {
      unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
					       graph->points_to[n]);
      if (!label)
	{
	  label = pointer_equiv_class++;
	  equiv_class_add (pointer_equiv_class_table,
			   label, graph->points_to[n]);
	}
      graph->pointer_label[n] = label;
    }
}

2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250
/* Perform offline variable substitution, discovering equivalence
   classes, and eliminating non-pointer variables.  */

static struct scc_info *
perform_var_substitution (constraint_graph_t graph)
{
  unsigned int i;
  unsigned int size = graph->size;
  struct scc_info *si = init_scc_info (size);

  bitmap_obstack_initialize (&iteration_obstack);
2251 2252 2253 2254 2255 2256 2257 2258 2259
  pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
					   equiv_class_label_eq, free);
  location_equiv_class_table = htab_create (511, equiv_class_label_hash,
					    equiv_class_label_eq, free);
  pointer_equiv_class = 1;
  location_equiv_class = 1;

  /* Condense the nodes, which means to find SCC's, count incoming
     predecessors, and unite nodes in SCC's.  */
2260
  for (i = 0; i < FIRST_REF_NODE; i++)
2261 2262
    if (!TEST_BIT (si->visited, si->node_mapping[i]))
      condense_visit (graph, si, si->node_mapping[i]);
2263

2264 2265
  sbitmap_zero (si->visited);
  /* Actually the label the nodes for pointer equivalences  */
2266
  for (i = 0; i < FIRST_REF_NODE; i++)
2267 2268 2269
    if (!TEST_BIT (si->visited, si->node_mapping[i]))
      label_visit (graph, si, si->node_mapping[i]);

2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312
  /* Calculate location equivalence labels.  */
  for (i = 0; i < FIRST_REF_NODE; i++)
    {
      bitmap pointed_by;
      bitmap_iterator bi;
      unsigned int j;
      unsigned int label;

      if (!graph->pointed_by[i])
	continue;
      pointed_by = BITMAP_ALLOC (&iteration_obstack);

      /* Translate the pointed-by mapping for pointer equivalence
	 labels.  */
      EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
	{
	  bitmap_set_bit (pointed_by,
			  graph->pointer_label[si->node_mapping[j]]);
	}
      /* The original pointed_by is now dead.  */
      BITMAP_FREE (graph->pointed_by[i]);

      /* Look up the location equivalence label if one exists, or make
	 one otherwise.  */
      label = equiv_class_lookup (location_equiv_class_table,
				  pointed_by);
      if (label == 0)
	{
	  label = location_equiv_class++;
	  equiv_class_add (location_equiv_class_table,
			   label, pointed_by);
	}
      else
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Found location equivalence for node %s\n",
		     get_varinfo (i)->name);
	  BITMAP_FREE (pointed_by);
	}
      graph->loc_label[i] = label;

    }

2313 2314 2315 2316 2317
  if (dump_file && (dump_flags & TDF_DETAILS))
    for (i = 0; i < FIRST_REF_NODE; i++)
      {
	bool direct_node = TEST_BIT (graph->direct_nodes, i);
	fprintf (dump_file,
2318 2319
		 "Equivalence classes for %s node id %d:%s are pointer: %d"
		 ", location:%d\n",
2320
		 direct_node ? "Direct node" : "Indirect node", i,
2321
		 get_varinfo (i)->name,
2322 2323
		 graph->pointer_label[si->node_mapping[i]],
		 graph->loc_label[si->node_mapping[i]]);
2324 2325 2326 2327 2328 2329 2330 2331
      }

  /* Quickly eliminate our non-pointer variables.  */

  for (i = 0; i < FIRST_REF_NODE; i++)
    {
      unsigned int node = si->node_mapping[i];

2332
      if (graph->pointer_label[node] == 0)
2333
	{
2334
	  if (dump_file && (dump_flags & TDF_DETAILS))
2335 2336 2337 2338 2339
	    fprintf (dump_file,
		     "%s is a non-pointer variable, eliminating edges.\n",
		     get_varinfo (node)->name);
	  stats.nonpointer_vars++;
	  clear_edges_for_node (graph, node);
2340 2341
	}
    }
2342

2343 2344 2345 2346 2347
  return si;
}

/* Free information that was only necessary for variable
   substitution.  */
2348

2349 2350 2351 2352
static void
free_var_substitution_info (struct scc_info *si)
{
  free_scc_info (si);
2353 2354 2355 2356
  free (graph->pointer_label);
  free (graph->loc_label);
  free (graph->pointed_by);
  free (graph->points_to);
2357 2358
  free (graph->eq_rep);
  sbitmap_free (graph->direct_nodes);
2359 2360
  htab_delete (pointer_equiv_class_table);
  htab_delete (location_equiv_class_table);
2361
  bitmap_obstack_release (&iteration_obstack);
2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373
}

/* Return an existing node that is equivalent to NODE, which has
   equivalence class LABEL, if one exists.  Return NODE otherwise.  */

static unsigned int
find_equivalent_node (constraint_graph_t graph,
		      unsigned int node, unsigned int label)
{
  /* If the address version of this variable is unused, we can
     substitute it for anything else with the same label.
     Otherwise, we know the pointers are equivalent, but not the
2374
     locations, and we can unite them later.  */
2375

2376
  if (!bitmap_bit_p (graph->address_taken, node))
2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389
    {
      gcc_assert (label < graph->size);

      if (graph->eq_rep[label] != -1)
	{
	  /* Unify the two variables since we know they are equivalent.  */
	  if (unite (graph->eq_rep[label], node))
	    unify_nodes (graph, graph->eq_rep[label], node, false);
	  return graph->eq_rep[label];
	}
      else
	{
	  graph->eq_rep[label] = node;
2390
	  graph->pe_rep[label] = node;
2391 2392
	}
    }
2393 2394 2395 2396 2397 2398 2399 2400
  else
    {
      gcc_assert (label < graph->size);
      graph->pe[node] = label;
      if (graph->pe_rep[label] == -1)
	graph->pe_rep[label] = node;
    }

2401 2402 2403
  return node;
}

2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414
/* Unite pointer equivalent but not location equivalent nodes in
   GRAPH.  This may only be performed once variable substitution is
   finished.  */

static void
unite_pointer_equivalences (constraint_graph_t graph)
{
  unsigned int i;

  /* Go through the pointer equivalences and unite them to their
     representative, if they aren't already.  */
2415
  for (i = 0; i < FIRST_REF_NODE; i++)
2416 2417
    {
      unsigned int label = graph->pe[i];
2418 2419 2420
      if (label)
	{
	  int label_rep = graph->pe_rep[label];
H.J. Lu committed
2421

2422 2423
	  if (label_rep == -1)
	    continue;
H.J. Lu committed
2424

2425 2426 2427 2428
	  label_rep = find (label_rep);
	  if (label_rep >= 0 && unite (label_rep, find (i)))
	    unify_nodes (graph, label_rep, i, false);
	}
2429 2430 2431 2432
    }
}

/* Move complex constraints to the GRAPH nodes they belong to.  */
2433 2434

static void
2435 2436 2437 2438 2439
move_complex_constraints (constraint_graph_t graph)
{
  int i;
  constraint_t c;

2440
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
2441 2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468 2469 2470 2471 2472
    {
      if (c)
	{
	  struct constraint_expr lhs = c->lhs;
	  struct constraint_expr rhs = c->rhs;

	  if (lhs.type == DEREF)
	    {
	      insert_into_complex (graph, lhs.var, c);
	    }
	  else if (rhs.type == DEREF)
	    {
	      if (!(get_varinfo (lhs.var)->is_special_var))
		insert_into_complex (graph, rhs.var, c);
	    }
	  else if (rhs.type != ADDRESSOF && lhs.var > anything_id
		   && (lhs.offset != 0 || rhs.offset != 0))
	    {
	      insert_into_complex (graph, rhs.var, c);
	    }
	}
    }
}


/* Optimize and rewrite complex constraints while performing
   collapsing of equivalent nodes.  SI is the SCC_INFO that is the
   result of perform_variable_substitution.  */

static void
rewrite_constraints (constraint_graph_t graph,
		     struct scc_info *si)
2473 2474 2475 2476 2477 2478 2479 2480
{
  int i;
  unsigned int j;
  constraint_t c;

  for (j = 0; j < graph->size; j++)
    gcc_assert (find (j) == j);

2481
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
2482 2483 2484
    {
      struct constraint_expr lhs = c->lhs;
      struct constraint_expr rhs = c->rhs;
2485 2486
      unsigned int lhsvar = find (lhs.var);
      unsigned int rhsvar = find (rhs.var);
2487 2488 2489 2490 2491
      unsigned int lhsnode, rhsnode;
      unsigned int lhslabel, rhslabel;

      lhsnode = si->node_mapping[lhsvar];
      rhsnode = si->node_mapping[rhsvar];
2492 2493
      lhslabel = graph->pointer_label[lhsnode];
      rhslabel = graph->pointer_label[rhsnode];
2494 2495 2496 2497 2498

      /* See if it is really a non-pointer variable, and if so, ignore
	 the constraint.  */
      if (lhslabel == 0)
	{
2499
	  if (dump_file && (dump_flags & TDF_DETAILS))
2500
	    {
H.J. Lu committed
2501

2502 2503 2504 2505
	      fprintf (dump_file, "%s is a non-pointer variable,"
		       "ignoring constraint:",
		       get_varinfo (lhs.var)->name);
	      dump_constraint (dump_file, c);
2506
	    }
2507 2508
	  VEC_replace (constraint_t, constraints, i, NULL);
	  continue;
2509 2510 2511 2512
	}

      if (rhslabel == 0)
	{
2513
	  if (dump_file && (dump_flags & TDF_DETAILS))
2514
	    {
H.J. Lu committed
2515

2516 2517 2518 2519
	      fprintf (dump_file, "%s is a non-pointer variable,"
		       "ignoring constraint:",
		       get_varinfo (rhs.var)->name);
	      dump_constraint (dump_file, c);
2520
	    }
2521 2522
	  VEC_replace (constraint_t, constraints, i, NULL);
	  continue;
2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554 2555 2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569 2570
	}

      lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
      rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
      c->lhs.var = lhsvar;
      c->rhs.var = rhsvar;

    }
}

/* Eliminate indirect cycles involving NODE.  Return true if NODE was
   part of an SCC, false otherwise.  */

static bool
eliminate_indirect_cycles (unsigned int node)
{
  if (graph->indirect_cycles[node] != -1
      && !bitmap_empty_p (get_varinfo (node)->solution))
    {
      unsigned int i;
      VEC(unsigned,heap) *queue = NULL;
      int queuepos;
      unsigned int to = find (graph->indirect_cycles[node]);
      bitmap_iterator bi;

      /* We can't touch the solution set and call unify_nodes
	 at the same time, because unify_nodes is going to do
	 bitmap unions into it. */

      EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
	{
	  if (find (i) == i && i != to)
	    {
	      if (unite (to, i))
		VEC_safe_push (unsigned, heap, queue, i);
	    }
	}

      for (queuepos = 0;
	   VEC_iterate (unsigned, queue, queuepos, i);
	   queuepos++)
	{
	  unify_nodes (graph, to, i, true);
	}
      VEC_free (unsigned, heap, queue);
      return true;
    }
  return false;
2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582
}

/* Solve the constraint graph GRAPH using our worklist solver.
   This is based on the PW* family of solvers from the "Efficient Field
   Sensitive Pointer Analysis for C" paper.
   It works by iterating over all the graph nodes, processing the complex
   constraints and propagating the copy constraints, until everything stops
   changed.  This corresponds to steps 6-8 in the solving list given above.  */

static void
solve_graph (constraint_graph_t graph)
{
2583
  unsigned int size = graph->size;
2584
  unsigned int i;
2585
  bitmap pts;
2586

2587
  changed_count = 0;
2588
  changed = sbitmap_alloc (size);
2589
  sbitmap_zero (changed);
2590

2591
  /* Mark all initial non-collapsed nodes as changed.  */
2592
  for (i = 0; i < size; i++)
2593 2594 2595 2596 2597 2598 2599 2600 2601 2602 2603 2604 2605
    {
      varinfo_t ivi = get_varinfo (i);
      if (find (i) == i && !bitmap_empty_p (ivi->solution)
	  && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
	      || VEC_length (constraint_t, graph->complex[i]) > 0))
	{
	  SET_BIT (changed, i);
	  changed_count++;
	}
    }

  /* Allocate a bitmap to be used to store the changed bits.  */
  pts = BITMAP_ALLOC (&pta_obstack);
2606

2607 2608 2609 2610 2611
  while (changed_count > 0)
    {
      unsigned int i;
      struct topo_info *ti = init_topo_info ();
      stats.iterations++;
2612

2613
      bitmap_obstack_initialize (&iteration_obstack);
2614

2615 2616
      compute_topo_order (graph, ti);

2617
      while (VEC_length (unsigned, ti->topo_order) != 0)
2618
	{
2619

2620
	  i = VEC_pop (unsigned, ti->topo_order);
2621 2622 2623 2624 2625

	  /* If this variable is not a representative, skip it.  */
	  if (find (i) != i)
	    continue;

2626 2627
	  /* In certain indirect cycle cases, we may merge this
	     variable to another.  */
2628
	  if (eliminate_indirect_cycles (i) && find (i) != i)
2629
	    continue;
2630 2631 2632 2633 2634 2635 2636 2637

	  /* If the node has changed, we need to process the
	     complex constraints and outgoing edges again.  */
	  if (TEST_BIT (changed, i))
	    {
	      unsigned int j;
	      constraint_t c;
	      bitmap solution;
2638
	      VEC(constraint_t,heap) *complex = graph->complex[i];
2639
	      bool solution_empty;
2640

2641 2642 2643
	      RESET_BIT (changed, i);
	      changed_count--;

2644 2645 2646 2647 2648 2649 2650 2651 2652
	      /* Compute the changed set of solution bits.  */
	      bitmap_and_compl (pts, get_varinfo (i)->solution,
				get_varinfo (i)->oldsolution);

	      if (bitmap_empty_p (pts))
		continue;

	      bitmap_ior_into (get_varinfo (i)->oldsolution, pts);

2653
	      solution = get_varinfo (i)->solution;
2654 2655 2656
	      solution_empty = bitmap_empty_p (solution);

	      /* Process the complex constraints */
2657
	      FOR_EACH_VEC_ELT (constraint_t, complex, j, c)
2658
		{
2659 2660 2661 2662 2663 2664 2665
		  /* XXX: This is going to unsort the constraints in
		     some cases, which will occasionally add duplicate
		     constraints during unification.  This does not
		     affect correctness.  */
		  c->lhs.var = find (c->lhs.var);
		  c->rhs.var = find (c->rhs.var);

2666 2667 2668 2669 2670
		  /* The only complex constraint that can change our
		     solution to non-empty, given an empty solution,
		     is a constraint where the lhs side is receiving
		     some set from elsewhere.  */
		  if (!solution_empty || c->lhs.type != DEREF)
2671
		    do_complex_constraint (graph, c, pts);
2672
		}
2673

2674 2675
	      solution_empty = bitmap_empty_p (solution);

2676
	      if (!solution_empty)
2677
		{
2678
		  bitmap_iterator bi;
2679
		  unsigned eff_escaped_id = find (escaped_id);
2680

2681
		  /* Propagate solution to all successors.  */
2682
		  EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2683
						0, j, bi)
2684
		    {
2685 2686 2687 2688 2689 2690
		      bitmap tmp;
		      bool flag;

		      unsigned int to = find (j);
		      tmp = get_varinfo (to)->solution;
		      flag = false;
2691

2692 2693 2694
		      /* Don't try to propagate to ourselves.  */
		      if (to == i)
			continue;
2695

2696 2697 2698 2699 2700 2701
		      /* If we propagate from ESCAPED use ESCAPED as
		         placeholder.  */
		      if (i == eff_escaped_id)
			flag = bitmap_set_bit (tmp, escaped_id);
		      else
			flag = set_union_with_increment (tmp, pts, 0);
2702

2703
		      if (flag)
2704
			{
2705 2706
			  get_varinfo (to)->solution = tmp;
			  if (!TEST_BIT (changed, to))
2707
			    {
2708
			      SET_BIT (changed, to);
2709 2710
			      changed_count++;
			    }
2711 2712
			}
		    }
2713 2714 2715 2716 2717 2718
		}
	    }
	}
      free_topo_info (ti);
      bitmap_obstack_release (&iteration_obstack);
    }
2719

2720
  BITMAP_FREE (pts);
2721
  sbitmap_free (changed);
2722
  bitmap_obstack_release (&oldpta_obstack);
2723 2724
}

2725
/* Map from trees to variable infos.  */
2726
static struct pointer_map_t *vi_for_tree;
2727 2728


2729
/* Insert ID as the variable id for tree T in the vi_for_tree map.  */
2730

2731
static void
2732
insert_vi_for_tree (tree t, varinfo_t vi)
2733
{
2734 2735
  void **slot = pointer_map_insert (vi_for_tree, t);
  gcc_assert (vi);
2736
  gcc_assert (*slot == NULL);
2737
  *slot = vi;
2738 2739
}

2740
/* Find the variable info for tree T in VI_FOR_TREE.  If T does not
2741
   exist in the map, return NULL, otherwise, return the varinfo we found.  */
2742

2743 2744
static varinfo_t
lookup_vi_for_tree (tree t)
2745
{
2746 2747 2748
  void **slot = pointer_map_contains (vi_for_tree, t);
  if (slot == NULL)
    return NULL;
2749

2750
  return (varinfo_t) *slot;
2751 2752 2753 2754 2755 2756 2757
}

/* Return a printable name for DECL  */

static const char *
alias_get_name (tree decl)
{
2758
  const char *res;
2759 2760 2761
  char *temp;
  int num_printed = 0;

2762 2763 2764 2765
  if (DECL_ASSEMBLER_NAME_SET_P (decl))
    res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
  else
    res= get_name (decl);
2766 2767 2768 2769
  if (res != NULL)
    return res;

  res = "NULL";
2770 2771 2772
  if (!dump_file)
    return res;

2773 2774
  if (TREE_CODE (decl) == SSA_NAME)
    {
2775
      num_printed = asprintf (&temp, "%s_%u",
2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788 2789 2790
			      alias_get_name (SSA_NAME_VAR (decl)),
			      SSA_NAME_VERSION (decl));
    }
  else if (DECL_P (decl))
    {
      num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
    }
  if (num_printed > 0)
    {
      res = ggc_strdup (temp);
      free (temp);
    }
  return res;
}

2791 2792
/* Find the variable id for tree T in the map.
   If T doesn't exist in the map, create an entry for it and return it.  */
2793

2794 2795
static varinfo_t
get_vi_for_tree (tree t)
2796
{
2797 2798
  void **slot = pointer_map_contains (vi_for_tree, t);
  if (slot == NULL)
2799
    return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2800

2801
  return (varinfo_t) *slot;
2802 2803
}

2804
/* Get a scalar constraint expression for a new temporary variable.  */
2805 2806

static struct constraint_expr
2807
new_scalar_tmp_constraint_exp (const char *name)
2808
{
2809 2810
  struct constraint_expr tmp;
  varinfo_t vi;
2811

2812
  vi = new_var_info (NULL_TREE, name);
2813 2814 2815 2816
  vi->offset = 0;
  vi->size = -1;
  vi->fullsize = -1;
  vi->is_full_var = 1;
2817

2818 2819 2820
  tmp.var = vi->id;
  tmp.type = SCALAR;
  tmp.offset = 0;
2821

2822
  return tmp;
2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834
}

/* Get a constraint expression vector from an SSA_VAR_P node.
   If address_p is true, the result will be taken its address of.  */

static void
get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
{
  struct constraint_expr cexpr;
  varinfo_t vi;

  /* We allow FUNCTION_DECLs here even though it doesn't make much sense.  */
2835 2836 2837 2838
  gcc_assert (SSA_VAR_P (t) || DECL_P (t));

  /* For parameters, get at the points-to set for the actual parm
     decl.  */
2839
  if (TREE_CODE (t) == SSA_NAME
2840 2841
      && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
	  || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
Diego Novillo committed
2842
      && SSA_NAME_IS_DEFAULT_DEF (t))
2843 2844 2845 2846
    {
      get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
      return;
    }
2847

2848 2849
  vi = get_vi_for_tree (t);
  cexpr.var = vi->id;
2850
  cexpr.type = SCALAR;
2851
  cexpr.offset = 0;
2852 2853 2854
  /* If we determine the result is "anything", and we know this is readonly,
     say it points to readonly memory instead.  */
  if (cexpr.var == anything_id && TREE_READONLY (t))
2855
    {
2856
      gcc_unreachable ();
2857
      cexpr.type = ADDRESSOF;
2858 2859
      cexpr.var = readonly_id;
    }
2860

2861 2862
  /* If we are not taking the address of the constraint expr, add all
     sub-fiels of the variable as well.  */
2863 2864
  if (!address_p
      && !vi->is_full_var)
2865 2866 2867 2868 2869 2870 2871 2872 2873 2874
    {
      for (; vi; vi = vi->next)
	{
	  cexpr.var = vi->id;
	  VEC_safe_push (ce_s, heap, *results, &cexpr);
	}
      return;
    }

  VEC_safe_push (ce_s, heap, *results, &cexpr);
2875 2876
}

2877 2878
/* Process constraint T, performing various simplifications and then
   adding it to our list of overall constraints.  */
2879 2880

static void
2881
process_constraint (constraint_t t)
2882 2883 2884
{
  struct constraint_expr rhs = t->rhs;
  struct constraint_expr lhs = t->lhs;
2885

2886 2887 2888
  gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
  gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));

2889 2890 2891 2892 2893 2894 2895 2896 2897
  /* If we didn't get any useful constraint from the lhs we get
     &ANYTHING as fallback from get_constraint_for.  Deal with
     it here by turning it into *ANYTHING.  */
  if (lhs.type == ADDRESSOF
      && lhs.var == anything_id)
    lhs.type = DEREF;

  /* ADDRESSOF on the lhs is invalid.  */
  gcc_assert (lhs.type != ADDRESSOF);
2898

2899 2900 2901 2902 2903 2904 2905 2906 2907 2908
  /* We shouldn't add constraints from things that cannot have pointers.
     It's not completely trivial to avoid in the callers, so do it here.  */
  if (rhs.type != ADDRESSOF
      && !get_varinfo (rhs.var)->may_have_pointers)
    return;

  /* Likewise adding to the solution of a non-pointer var isn't useful.  */
  if (!get_varinfo (lhs.var)->may_have_pointers)
    return;

2909
  /* This can happen in our IR with things like n->a = *p */
2910
  if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2911 2912
    {
      /* Split into tmp = *rhs, *lhs = tmp */
2913 2914
      struct constraint_expr tmplhs;
      tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2915 2916
      process_constraint (new_constraint (tmplhs, rhs));
      process_constraint (new_constraint (lhs, tmplhs));
2917 2918 2919 2920
    }
  else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
    {
      /* Split into tmp = &rhs, *lhs = tmp */
2921 2922
      struct constraint_expr tmplhs;
      tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2923 2924
      process_constraint (new_constraint (tmplhs, rhs));
      process_constraint (new_constraint (lhs, tmplhs));
2925 2926 2927
    }
  else
    {
2928
      gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2929
      VEC_safe_push (constraint_t, heap, constraints, t);
2930 2931 2932 2933 2934 2935 2936
    }
}


/* Return the position, in bits, of FIELD_DECL from the beginning of its
   structure.  */

2937
static HOST_WIDE_INT
2938 2939 2940
bitpos_of_field (const tree fdecl)
{

2941 2942
  if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
      || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2943
    return -1;
2944

2945 2946
  return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
	  + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2947 2948 2949
}


2950 2951 2952 2953 2954 2955 2956
/* Get constraint expressions for offsetting PTR by OFFSET.  Stores the
   resulting constraint expressions in *RESULTS.  */

static void
get_constraint_for_ptr_offset (tree ptr, tree offset,
			       VEC (ce_s, heap) **results)
{
2957
  struct constraint_expr c;
2958
  unsigned int j, n;
2959
  HOST_WIDE_INT rhsunitoffset, rhsoffset;
2960 2961 2962 2963 2964

  /* If we do not do field-sensitive PTA adding offsets to pointers
     does not change the points-to solution.  */
  if (!use_field_sensitive)
    {
2965
      get_constraint_for_rhs (ptr, results);
2966 2967 2968 2969 2970 2971
      return;
    }

  /* If the offset is not a non-negative integer constant that fits
     in a HOST_WIDE_INT, we have to fall back to a conservative
     solution which includes all sub-fields of all pointed-to
2972
     variables of ptr.  */
2973 2974
  if (offset == NULL_TREE
      || !host_integerp (offset, 0))
2975 2976
    rhsoffset = UNKNOWN_OFFSET;
  else
2977
    {
2978 2979 2980 2981 2982
      /* Make sure the bit-offset also fits.  */
      rhsunitoffset = TREE_INT_CST_LOW (offset);
      rhsoffset = rhsunitoffset * BITS_PER_UNIT;
      if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
	rhsoffset = UNKNOWN_OFFSET;
2983 2984
    }

2985
  get_constraint_for_rhs (ptr, results);
2986 2987 2988 2989 2990 2991 2992 2993 2994
  if (rhsoffset == 0)
    return;

  /* As we are eventually appending to the solution do not use
     VEC_iterate here.  */
  n = VEC_length (ce_s, *results);
  for (j = 0; j < n; j++)
    {
      varinfo_t curr;
2995 2996
      c = *VEC_index (ce_s, *results, j);
      curr = get_varinfo (c.var);
2997

2998
      if (c.type == ADDRESSOF
2999 3000
	  /* If this varinfo represents a full variable just use it.  */
	  && curr->is_full_var)
3001 3002
	c.offset = 0;
      else if (c.type == ADDRESSOF
3003 3004 3005 3006 3007 3008 3009 3010 3011 3012
	       /* If we do not know the offset add all subfields.  */
	       && rhsoffset == UNKNOWN_OFFSET)
	{
	  varinfo_t temp = lookup_vi_for_tree (curr->decl);
	  do
	    {
	      struct constraint_expr c2;
	      c2.var = temp->id;
	      c2.type = ADDRESSOF;
	      c2.offset = 0;
3013
	      if (c2.var != c.var)
3014
		VEC_safe_push (ce_s, heap, *results, &c2);
3015 3016 3017 3018
	      temp = temp->next;
	    }
	  while (temp);
	}
3019
      else if (c.type == ADDRESSOF)
3020
	{
3021 3022
	  varinfo_t temp;
	  unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3023 3024

	  /* Search the sub-field which overlaps with the
3025 3026 3027 3028 3029
	     pointed-to offset.  If the result is outside of the variable
	     we have to provide a conservative result, as the variable is
	     still reachable from the resulting pointer (even though it
	     technically cannot point to anything).  The last and first
	     sub-fields are such conservative results.
3030 3031
	     ???  If we always had a sub-field for &object + 1 then
	     we could represent this in a more precise way.  */
3032 3033 3034 3035
	  if (rhsoffset < 0
	      && curr->offset < offset)
	    offset = 0;
	  temp = first_or_preceding_vi_for_offset (curr, offset);
3036 3037 3038 3039 3040 3041

	  /* If the found variable is not exactly at the pointed to
	     result, we have to include the next variable in the
	     solution as well.  Otherwise two increments by offset / 2
	     do not result in the same or a conservative superset
	     solution.  */
3042
	  if (temp->offset != offset
3043 3044 3045 3046 3047 3048 3049 3050
	      && temp->next != NULL)
	    {
	      struct constraint_expr c2;
	      c2.var = temp->next->id;
	      c2.type = ADDRESSOF;
	      c2.offset = 0;
	      VEC_safe_push (ce_s, heap, *results, &c2);
	    }
3051 3052
	  c.var = temp->id;
	  c.offset = 0;
3053 3054
	}
      else
3055 3056 3057
	c.offset = rhsoffset;

      VEC_replace (ce_s, *results, j, &c);
3058 3059 3060 3061
    }
}


3062
/* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3063 3064 3065
   If address_p is true the result will be taken its address of.
   If lhs_p is true then the constraint expression is assumed to be used
   as the lhs.  */
3066

3067
static void
3068
get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
3069
				  bool address_p, bool lhs_p)
3070
{
3071
  tree orig_t = t;
3072
  HOST_WIDE_INT bitsize = -1;
3073
  HOST_WIDE_INT bitmaxsize = -1;
3074 3075
  HOST_WIDE_INT bitpos;
  tree forzero;
3076
  struct constraint_expr *result;
3077 3078 3079 3080

  /* Some people like to do cute things like take the address of
     &0->a.b */
  forzero = t;
3081
  while (handled_component_p (forzero)
3082 3083
	 || INDIRECT_REF_P (forzero)
	 || TREE_CODE (forzero) == MEM_REF)
3084
    forzero = TREE_OPERAND (forzero, 0);
3085

3086
  if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3087
    {
3088
      struct constraint_expr temp;
3089

3090 3091 3092 3093 3094
      temp.offset = 0;
      temp.var = integer_id;
      temp.type = SCALAR;
      VEC_safe_push (ce_s, heap, *results, &temp);
      return;
3095
    }
3096

3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119
  /* Handle type-punning through unions.  If we are extracting a pointer
     from a union via a possibly type-punning access that pointer
     points to anything, similar to a conversion of an integer to
     a pointer.  */
  if (!lhs_p)
    {
      tree u;
      for (u = t;
	   TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
	   u = TREE_OPERAND (u, 0))
	if (TREE_CODE (u) == COMPONENT_REF
	    && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
	  {
	    struct constraint_expr temp;

	    temp.offset = 0;
	    temp.var = anything_id;
	    temp.type = ADDRESSOF;
	    VEC_safe_push (ce_s, heap, *results, &temp);
	    return;
	  }
    }

3120
  t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3121

3122 3123
  /* Pretend to take the address of the base, we'll take care of
     adding the required subset of sub-fields below.  */
3124
  get_constraint_for_1 (t, results, true, lhs_p);
3125
  gcc_assert (VEC_length (ce_s, *results) == 1);
3126
  result = VEC_last (ce_s, *results);
3127

3128 3129 3130 3131 3132
  if (result->type == SCALAR
      && get_varinfo (result->var)->is_full_var)
    /* For single-field vars do not bother about the offset.  */
    result->offset = 0;
  else if (result->type == SCALAR)
3133 3134 3135 3136 3137
    {
      /* In languages like C, you can access one past the end of an
	 array.  You aren't allowed to dereference it, so we can
	 ignore this constraint. When we handle pointer subtraction,
	 we may have to do something cute here.  */
3138

3139
      if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
3140
	  && bitmaxsize != 0)
3141 3142 3143 3144
	{
	  /* It's also not true that the constraint will actually start at the
	     right offset, it may start in some padding.  We only care about
	     setting the constraint to the first actual field it touches, so
3145
	     walk to find it.  */
3146
	  struct constraint_expr cexpr = *result;
3147
	  varinfo_t curr;
3148 3149 3150
	  VEC_pop (ce_s, *results);
	  cexpr.offset = 0;
	  for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3151
	    {
3152
	      if (ranges_overlap_p (curr->offset, curr->size,
3153
				    bitpos, bitmaxsize))
3154
		{
3155 3156 3157 3158
		  cexpr.var = curr->id;
		  VEC_safe_push (ce_s, heap, *results, &cexpr);
		  if (address_p)
		    break;
3159 3160
		}
	    }
3161 3162 3163 3164 3165 3166 3167 3168 3169 3170 3171 3172
	  /* If we are going to take the address of this field then
	     to be able to compute reachability correctly add at least
	     the last field of the variable.  */
	  if (address_p
	      && VEC_length (ce_s, *results) == 0)
	    {
	      curr = get_varinfo (cexpr.var);
	      while (curr->next != NULL)
		curr = curr->next;
	      cexpr.var = curr->id;
	      VEC_safe_push (ce_s, heap, *results, &cexpr);
	    }
3173
	  else if (VEC_length (ce_s, *results) == 0)
3174 3175 3176 3177
	    /* Assert that we found *some* field there. The user couldn't be
	       accessing *only* padding.  */
	    /* Still the user could access one past the end of an array
	       embedded in a struct resulting in accessing *only* padding.  */
3178 3179 3180 3181 3182 3183 3184 3185
	    /* Or accessing only padding via type-punning to a type
	       that has a filed just in padding space.  */
	    {
	      cexpr.type = SCALAR;
	      cexpr.var = anything_id;
	      cexpr.offset = 0;
	      VEC_safe_push (ce_s, heap, *results, &cexpr);
	    }
3186
	}
3187 3188 3189 3190 3191 3192
      else if (bitmaxsize == 0)
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Access to zero-sized part of variable,"
		     "ignoring\n");
	}
3193 3194 3195 3196
      else
	if (dump_file && (dump_flags & TDF_DETAILS))
	  fprintf (dump_file, "Access to past the end of variable, ignoring\n");
    }
3197
  else if (result->type == DEREF)
3198
    {
3199 3200 3201 3202 3203
      /* If we do not know exactly where the access goes say so.  Note
	 that only for non-structure accesses we know that we access
	 at most one subfiled of any variable.  */
      if (bitpos == -1
	  || bitsize != bitmaxsize
3204 3205
	  || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
	  || result->offset == UNKNOWN_OFFSET)
3206 3207
	result->offset = UNKNOWN_OFFSET;
      else
3208
	result->offset += bitpos;
3209
    }
3210 3211 3212 3213 3214 3215 3216 3217
  else if (result->type == ADDRESSOF)
    {
      /* We can end up here for component references on a
         VIEW_CONVERT_EXPR <>(&foobar).  */
      result->type = SCALAR;
      result->var = anything_id;
      result->offset = 0;
    }
3218
  else
3219
    gcc_unreachable ();
3220 3221 3222 3223 3224 3225 3226 3227 3228
}


/* Dereference the constraint expression CONS, and return the result.
   DEREF (ADDRESSOF) = SCALAR
   DEREF (SCALAR) = DEREF
   DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
   This is needed so that we can handle dereferencing DEREF constraints.  */

3229 3230
static void
do_deref (VEC (ce_s, heap) **constraints)
3231
{
3232 3233
  struct constraint_expr *c;
  unsigned int i = 0;
3234

3235
  FOR_EACH_VEC_ELT (ce_s, *constraints, i, c)
3236
    {
3237 3238 3239 3240 3241 3242
      if (c->type == SCALAR)
	c->type = DEREF;
      else if (c->type == ADDRESSOF)
	c->type = SCALAR;
      else if (c->type == DEREF)
	{
3243 3244
	  struct constraint_expr tmplhs;
	  tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3245 3246 3247 3248 3249
	  process_constraint (new_constraint (tmplhs, *c));
	  c->var = tmplhs.var;
	}
      else
	gcc_unreachable ();
3250 3251 3252
    }
}

3253 3254 3255 3256 3257 3258 3259 3260 3261
/* Given a tree T, return the constraint expression for taking the
   address of it.  */

static void
get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
{
  struct constraint_expr *c;
  unsigned int i;

3262
  get_constraint_for_1 (t, results, true, true);
3263

3264
  FOR_EACH_VEC_ELT (ce_s, *results, i, c)
3265 3266 3267 3268 3269 3270 3271 3272
    {
      if (c->type == DEREF)
	c->type = SCALAR;
      else
	c->type = ADDRESSOF;
    }
}

3273 3274
/* Given a tree T, return the constraint expression for it.  */

3275
static void
3276 3277
get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
		      bool lhs_p)
3278 3279 3280 3281 3282 3283 3284 3285 3286 3287
{
  struct constraint_expr temp;

  /* x = integer is all glommed to a single variable, which doesn't
     point to anything by itself.  That is, of course, unless it is an
     integer constant being treated as a pointer, in which case, we
     will return that this is really the addressof anything.  This
     happens below, since it will fall into the default case. The only
     case we know something about an integer treated like a pointer is
     when it is the NULL pointer, and then we just say it points to
3288 3289 3290 3291 3292 3293
     NULL.

     Do not do that if -fno-delete-null-pointer-checks though, because
     in that case *NULL does not fail, so it _should_ alias *anything.
     It is not worth adding a new option or renaming the existing one,
     since this case is relatively obscure.  */
3294 3295 3296 3297 3298 3299 3300 3301 3302 3303 3304
  if ((TREE_CODE (t) == INTEGER_CST
       && integer_zerop (t))
      /* The only valid CONSTRUCTORs in gimple with pointer typed
	 elements are zero-initializer.  But in IPA mode we also
	 process global initializers, so verify at least.  */
      || (TREE_CODE (t) == CONSTRUCTOR
	  && CONSTRUCTOR_NELTS (t) == 0))
    {
      if (flag_delete_null_pointer_checks)
	temp.var = nothing_id;
      else
3305
	temp.var = nonlocal_id;
3306 3307
      temp.type = ADDRESSOF;
      temp.offset = 0;
3308 3309
      VEC_safe_push (ce_s, heap, *results, &temp);
      return;
3310 3311
    }

3312 3313 3314 3315 3316 3317 3318 3319 3320 3321
  /* String constants are read-only.  */
  if (TREE_CODE (t) == STRING_CST)
    {
      temp.var = readonly_id;
      temp.type = SCALAR;
      temp.offset = 0;
      VEC_safe_push (ce_s, heap, *results, &temp);
      return;
    }

3322 3323 3324 3325 3326 3327 3328
  switch (TREE_CODE_CLASS (TREE_CODE (t)))
    {
    case tcc_expression:
      {
	switch (TREE_CODE (t))
	  {
	  case ADDR_EXPR:
3329 3330
	    get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
	    return;
3331
	  default:;
3332
	  }
3333
	break;
3334 3335 3336 3337 3338
      }
    case tcc_reference:
      {
	switch (TREE_CODE (t))
	  {
3339
	  case MEM_REF:
3340
	    {
3341
	      struct constraint_expr cs;
3342
	      varinfo_t vi, curr;
3343 3344
	      tree off = double_int_to_tree (sizetype, mem_ref_offset (t));
	      get_constraint_for_ptr_offset (TREE_OPERAND (t, 0), off, results);
3345
	      do_deref (results);
3346 3347 3348

	      /* If we are not taking the address then make sure to process
		 all subvariables we might access.  */
3349
	      cs = *VEC_last (ce_s, *results);
3350
	      if (address_p
3351
		  || cs.type != SCALAR)
3352 3353
		return;

3354
	      vi = get_varinfo (cs.var);
3355 3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374
	      curr = vi->next;
	      if (!vi->is_full_var
		  && curr)
		{
		  unsigned HOST_WIDE_INT size;
		  if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
		    size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
		  else
		    size = -1;
		  for (; curr; curr = curr->next)
		    {
		      if (curr->offset - vi->offset < size)
			{
			  cs.var = curr->id;
			  VEC_safe_push (ce_s, heap, *results, &cs);
			}
		      else
			break;
		    }
		}
3375
	      return;
3376 3377
	    }
	  case ARRAY_REF:
3378
	  case ARRAY_RANGE_REF:
3379
	  case COMPONENT_REF:
3380
	    get_constraint_for_component_ref (t, results, address_p, lhs_p);
3381
	    return;
3382
	  case VIEW_CONVERT_EXPR:
3383 3384
	    get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
				  lhs_p);
3385 3386
	    return;
	  /* We are missing handling for TARGET_MEM_REF here.  */
3387
	  default:;
3388
	  }
3389
	break;
3390 3391 3392 3393 3394 3395
      }
    case tcc_exceptional:
      {
	switch (TREE_CODE (t))
	  {
	  case SSA_NAME:
3396
	    {
3397
	      get_constraint_for_ssa_var (t, results, address_p);
3398 3399
	      return;
	    }
3400 3401 3402 3403 3404 3405 3406 3407 3408
	  case CONSTRUCTOR:
	    {
	      unsigned int i;
	      tree val;
	      VEC (ce_s, heap) *tmp = NULL;
	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
		{
		  struct constraint_expr *rhsp;
		  unsigned j;
3409
		  get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3410
		  FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
3411 3412 3413 3414 3415 3416 3417 3418 3419
		    VEC_safe_push (ce_s, heap, *results, rhsp);
		  VEC_truncate (ce_s, tmp, 0);
		}
	      VEC_free (ce_s, heap, tmp);
	      /* We do not know whether the constructor was complete,
	         so technically we have to add &NOTHING or &ANYTHING
		 like we do for an empty constructor as well.  */
	      return;
	    }
3420
	  default:;
3421
	  }
3422
	break;
3423 3424
      }
    case tcc_declaration:
3425
      {
3426
	get_constraint_for_ssa_var (t, results, address_p);
3427 3428
	return;
      }
3429 3430 3431 3432 3433 3434 3435 3436 3437
    case tcc_constant:
      {
	/* We cannot refer to automatic variables through constants.  */ 
	temp.type = ADDRESSOF;
	temp.var = nonlocal_id;
	temp.offset = 0;
	VEC_safe_push (ce_s, heap, *results, &temp);
	return;
      }
3438
    default:;
3439
    }
3440 3441 3442 3443 3444 3445

  /* The default fallback is a constraint from anything.  */
  temp.type = ADDRESSOF;
  temp.var = anything_id;
  temp.offset = 0;
  VEC_safe_push (ce_s, heap, *results, &temp);
3446 3447
}

3448 3449 3450 3451 3452 3453 3454
/* Given a gimple tree T, return the constraint expression vector for it.  */

static void
get_constraint_for (tree t, VEC (ce_s, heap) **results)
{
  gcc_assert (VEC_length (ce_s, *results) == 0);

3455 3456 3457 3458 3459 3460 3461 3462 3463 3464 3465 3466
  get_constraint_for_1 (t, results, false, true);
}

/* Given a gimple tree T, return the constraint expression vector for it
   to be used as the rhs of a constraint.  */

static void
get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
{
  gcc_assert (VEC_length (ce_s, *results) == 0);

  get_constraint_for_1 (t, results, false, false);
3467
}
3468

3469 3470 3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481

/* Efficiently generates constraints from all entries in *RHSC to all
   entries in *LHSC.  */

static void
process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
{
  struct constraint_expr *lhsp, *rhsp;
  unsigned i, j;

  if (VEC_length (ce_s, lhsc) <= 1
      || VEC_length (ce_s, rhsc) <= 1)
    {
3482 3483
      FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
	FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
3484 3485 3486 3487 3488
	  process_constraint (new_constraint (*lhsp, *rhsp));
    }
  else
    {
      struct constraint_expr tmp;
3489
      tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3490
      FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
3491
	process_constraint (new_constraint (tmp, *rhsp));
3492
      FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
3493 3494 3495 3496
	process_constraint (new_constraint (*lhsp, tmp));
    }
}

3497 3498 3499 3500 3501 3502
/* Handle aggregate copies by expanding into copies of the respective
   fields of the structures.  */

static void
do_structure_copy (tree lhsop, tree rhsop)
{
3503
  struct constraint_expr *lhsp, *rhsp;
3504
  VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3505 3506 3507
  unsigned j;

  get_constraint_for (lhsop, &lhsc);
3508
  get_constraint_for_rhs (rhsop, &rhsc);
3509 3510 3511 3512 3513
  lhsp = VEC_index (ce_s, lhsc, 0);
  rhsp = VEC_index (ce_s, rhsc, 0);
  if (lhsp->type == DEREF
      || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
      || rhsp->type == DEREF)
3514 3515 3516 3517 3518 3519 3520 3521 3522 3523 3524 3525 3526
    {
      if (lhsp->type == DEREF)
	{
	  gcc_assert (VEC_length (ce_s, lhsc) == 1);
	  lhsp->offset = UNKNOWN_OFFSET;
	}
      if (rhsp->type == DEREF)
	{
	  gcc_assert (VEC_length (ce_s, rhsc) == 1);
	  rhsp->offset = UNKNOWN_OFFSET;
	}
      process_all_all_constraints (lhsc, rhsc);
    }
3527 3528 3529
  else if (lhsp->type == SCALAR
	   && (rhsp->type == SCALAR
	       || rhsp->type == ADDRESSOF))
3530
    {
3531 3532 3533
      HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
      HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
      unsigned k = 0;
3534 3535
      get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
      get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3536
      for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3537
	{
3538 3539 3540 3541 3542
	  varinfo_t lhsv, rhsv;
	  rhsp = VEC_index (ce_s, rhsc, k);
	  lhsv = get_varinfo (lhsp->var);
	  rhsv = get_varinfo (rhsp->var);
	  if (lhsv->may_have_pointers
3543 3544 3545 3546
	      && (lhsv->is_full_var
		  || rhsv->is_full_var
		  || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
				       rhsv->offset + lhsoffset, rhsv->size)))
3547
	    process_constraint (new_constraint (*lhsp, *rhsp));
3548 3549 3550 3551
	  if (!rhsv->is_full_var
	      && (lhsv->is_full_var
		  || (lhsv->offset + rhsoffset + lhsv->size
		      > rhsv->offset + lhsoffset + rhsv->size)))
3552 3553 3554 3555 3556
	    {
	      ++k;
	      if (k >= VEC_length (ce_s, rhsc))
		break;
	    }
3557
	  else
3558
	    ++j;
3559 3560 3561
	}
    }
  else
3562
    gcc_unreachable ();
3563

3564 3565
  VEC_free (ce_s, heap, lhsc);
  VEC_free (ce_s, heap, rhsc);
3566 3567
}

3568
/* Create constraints ID = { rhsc }.  */
3569 3570

static void
3571
make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
3572 3573 3574 3575 3576 3577 3578 3579 3580
{
  struct constraint_expr *c;
  struct constraint_expr includes;
  unsigned int j;

  includes.var = id;
  includes.offset = 0;
  includes.type = SCALAR;

3581
  FOR_EACH_VEC_ELT (ce_s, rhsc, j, c)
3582
    process_constraint (new_constraint (includes, *c));
3583 3584 3585 3586 3587 3588 3589 3590 3591 3592
}

/* Create a constraint ID = OP.  */

static void
make_constraint_to (unsigned id, tree op)
{
  VEC(ce_s, heap) *rhsc = NULL;
  get_constraint_for_rhs (op, &rhsc);
  make_constraints_to (id, rhsc);
3593 3594 3595
  VEC_free (ce_s, heap, rhsc);
}

3596 3597 3598 3599 3600 3601 3602 3603 3604 3605 3606 3607 3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629
/* Create a constraint ID = &FROM.  */

static void
make_constraint_from (varinfo_t vi, int from)
{
  struct constraint_expr lhs, rhs;

  lhs.var = vi->id;
  lhs.offset = 0;
  lhs.type = SCALAR;

  rhs.var = from;
  rhs.offset = 0;
  rhs.type = ADDRESSOF;
  process_constraint (new_constraint (lhs, rhs));
}

/* Create a constraint ID = FROM.  */

static void
make_copy_constraint (varinfo_t vi, int from)
{
  struct constraint_expr lhs, rhs;

  lhs.var = vi->id;
  lhs.offset = 0;
  lhs.type = SCALAR;

  rhs.var = from;
  rhs.offset = 0;
  rhs.type = SCALAR;
  process_constraint (new_constraint (lhs, rhs));
}

3630 3631 3632 3633 3634 3635 3636 3637
/* Make constraints necessary to make OP escape.  */

static void
make_escape_constraint (tree op)
{
  make_constraint_to (escaped_id, op);
}

3638 3639 3640 3641 3642 3643 3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663
/* Add constraints to that the solution of VI is transitively closed.  */

static void
make_transitive_closure_constraints (varinfo_t vi)
{
  struct constraint_expr lhs, rhs;

  /* VAR = *VAR;  */
  lhs.type = SCALAR;
  lhs.var = vi->id;
  lhs.offset = 0;
  rhs.type = DEREF;
  rhs.var = vi->id;
  rhs.offset = 0;
  process_constraint (new_constraint (lhs, rhs));

  /* VAR = VAR + UNKNOWN;  */
  lhs.type = SCALAR;
  lhs.var = vi->id;
  lhs.offset = 0;
  rhs.type = SCALAR;
  rhs.var = vi->id;
  rhs.offset = UNKNOWN_OFFSET;
  process_constraint (new_constraint (lhs, rhs));
}

3664 3665
/* Create a new artificial heap variable with NAME.
   Return the created variable.  */
3666 3667

static varinfo_t
3668
make_heapvar_for (varinfo_t lhs, const char *name)
3669 3670
{
  varinfo_t vi;
3671
  tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
3672 3673 3674 3675 3676 3677 3678

  if (heapvar == NULL_TREE)
    {
      var_ann_t ann;
      heapvar = create_tmp_var_raw (ptr_type_node, name);
      DECL_EXTERNAL (heapvar) = 1;

3679
      heapvar_insert (lhs->decl, lhs->offset, heapvar);
3680 3681 3682 3683 3684 3685 3686

      ann = get_var_ann (heapvar);
      ann->is_heapvar = 1;
    }

  /* For global vars we need to add a heapvar to the list of referenced
     vars of a different function than it was created for originally.  */
3687
  if (cfun && gimple_referenced_vars (cfun))
3688 3689 3690 3691 3692 3693
    add_referenced_var (heapvar);

  vi = new_var_info (heapvar, name);
  vi->is_artificial_var = true;
  vi->is_heap_var = true;
  vi->is_unknown_size_var = true;
3694
  vi->offset = 0;
3695 3696 3697 3698 3699
  vi->fullsize = ~0;
  vi->size = ~0;
  vi->is_full_var = true;
  insert_vi_for_tree (heapvar, vi);

3700 3701 3702 3703 3704 3705 3706 3707 3708 3709
  return vi;
}

/* Create a new artificial heap variable with NAME and make a
   constraint from it to LHS.  Return the created variable.  */

static varinfo_t
make_constraint_from_heapvar (varinfo_t lhs, const char *name)
{
  varinfo_t vi = make_heapvar_for (lhs, name);
3710 3711 3712 3713 3714 3715 3716 3717 3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729
  make_constraint_from (lhs, vi->id);

  return vi;
}

/* Create a new artificial heap variable with NAME and make a
   constraint from it to LHS.  Set flags according to a tag used
   for tracking restrict pointers.  */

static void
make_constraint_from_restrict (varinfo_t lhs, const char *name)
{
  varinfo_t vi;
  vi = make_constraint_from_heapvar (lhs, name);
  vi->is_restrict_var = 1;
  vi->is_global_var = 0;
  vi->is_special_var = 1;
  vi->may_have_pointers = 0;
}

3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751 3752 3753 3754 3755 3756 3757 3758
/* In IPA mode there are varinfos for different aspects of reach
   function designator.  One for the points-to set of the return
   value, one for the variables that are clobbered by the function,
   one for its uses and one for each parameter (including a single
   glob for remaining variadic arguments).  */

enum { fi_clobbers = 1, fi_uses = 2,
       fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };

/* Get a constraint for the requested part of a function designator FI
   when operating in IPA mode.  */

static struct constraint_expr
get_function_part_constraint (varinfo_t fi, unsigned part)
{
  struct constraint_expr c;

  gcc_assert (in_ipa_mode);

  if (fi->id == anything_id)
    {
      /* ???  We probably should have a ANYFN special variable.  */
      c.var = anything_id;
      c.offset = 0;
      c.type = SCALAR;
    }
  else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
    {
      varinfo_t ai = first_vi_for_offset (fi, part);
3759 3760 3761 3762
      if (ai)
	c.var = ai->id;
      else
	c.var = anything_id;
3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775
      c.offset = 0;
      c.type = SCALAR;
    }
  else
    {
      c.var = fi->id;
      c.offset = part;
      c.type = DEREF;
    }

  return c;
}

3776 3777 3778 3779
/* For non-IPA mode, generate constraints necessary for a call on the
   RHS.  */

static void
3780
handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3781
{
3782
  struct constraint_expr rhsc;
3783
  unsigned i;
3784
  bool returns_uses = false;
3785

3786 3787 3788
  for (i = 0; i < gimple_call_num_args (stmt); ++i)
    {
      tree arg = gimple_call_arg (stmt, i);
3789
      int flags = gimple_call_arg_flags (stmt, i);
3790

3791 3792
      /* If the argument is not used we can ignore it.  */
      if (flags & EAF_UNUSED)
3793 3794 3795 3796 3797 3798 3799 3800 3801 3802 3803 3804 3805 3806 3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817 3818 3819 3820 3821 3822 3823
	continue;

      /* As we compute ESCAPED context-insensitive we do not gain
         any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
	 set.  The argument would still get clobbered through the
	 escape solution.
	 ???  We might get away with less (and more precise) constraints
	 if using a temporary for transitively closing things.  */
      if ((flags & EAF_NOCLOBBER)
	   && (flags & EAF_NOESCAPE))
	{
	  varinfo_t uses = get_call_use_vi (stmt);
	  if (!(flags & EAF_DIRECT))
	    make_transitive_closure_constraints (uses);
	  make_constraint_to (uses->id, arg);
	  returns_uses = true;
	}
      else if (flags & EAF_NOESCAPE)
	{
	  varinfo_t uses = get_call_use_vi (stmt);
	  varinfo_t clobbers = get_call_clobber_vi (stmt);
	  if (!(flags & EAF_DIRECT))
	    {
	      make_transitive_closure_constraints (uses);
	      make_transitive_closure_constraints (clobbers);
	    }
	  make_constraint_to (uses->id, arg);
	  make_constraint_to (clobbers->id, arg);
	  returns_uses = true;
	}
      else
3824 3825
	make_escape_constraint (arg);
    }
3826

3827 3828 3829 3830 3831 3832 3833 3834 3835 3836
  /* If we added to the calls uses solution make sure we account for
     pointers to it to be returned.  */
  if (returns_uses)
    {
      rhsc.var = get_call_use_vi (stmt)->id;
      rhsc.offset = 0;
      rhsc.type = SCALAR;
      VEC_safe_push (ce_s, heap, *results, &rhsc);
    }

3837
  /* The static chain escapes as well.  */
3838 3839
  if (gimple_call_chain (stmt))
    make_escape_constraint (gimple_call_chain (stmt));
3840

3841 3842 3843
  /* And if we applied NRV the address of the return slot escapes as well.  */
  if (gimple_call_return_slot_opt_p (stmt)
      && gimple_call_lhs (stmt) != NULL_TREE
3844
      && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3845 3846 3847 3848 3849 3850 3851
    {
      VEC(ce_s, heap) *tmpc = NULL;
      struct constraint_expr lhsc, *c;
      get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
      lhsc.var = escaped_id;
      lhsc.offset = 0;
      lhsc.type = SCALAR;
3852
      FOR_EACH_VEC_ELT (ce_s, tmpc, i, c)
3853 3854 3855 3856
	process_constraint (new_constraint (lhsc, *c));
      VEC_free(ce_s, heap, tmpc);
    }

3857 3858
  /* Regular functions return nonlocal memory.  */
  rhsc.var = nonlocal_id;
3859
  rhsc.offset = 0;
3860
  rhsc.type = SCALAR;
3861
  VEC_safe_push (ce_s, heap, *results, &rhsc);
3862
}
3863

3864 3865
/* For non-IPA mode, generate constraints necessary for a call
   that returns a pointer and assigns it to LHS.  This simply makes
3866
   the LHS point to global and escaped variables.  */
3867 3868

static void
3869 3870
handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
		 tree fndecl)
3871 3872 3873
{
  VEC(ce_s, heap) *lhsc = NULL;

3874
  get_constraint_for (lhs, &lhsc);
3875 3876 3877 3878 3879 3880 3881 3882 3883 3884 3885 3886 3887
  /* If the store is to a global decl make sure to
     add proper escape constraints.  */
  lhs = get_base_address (lhs);
  if (lhs
      && DECL_P (lhs)
      && is_global_var (lhs))
    {
      struct constraint_expr tmpc;
      tmpc.var = escaped_id;
      tmpc.offset = 0;
      tmpc.type = SCALAR;
      VEC_safe_push (ce_s, heap, lhsc, &tmpc);
    }
3888

3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899 3900 3901 3902
  /* If the call returns an argument unmodified override the rhs
     constraints.  */
  flags = gimple_call_return_flags (stmt);
  if (flags & ERF_RETURNS_ARG
      && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
    {
      tree arg;
      rhsc = NULL;
      arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
      get_constraint_for (arg, &rhsc);
      process_all_all_constraints (lhsc, rhsc);
      VEC_free (ce_s, heap, rhsc);
    }
  else if (flags & ERF_NOALIAS)
3903 3904
    {
      varinfo_t vi;
3905 3906 3907
      struct constraint_expr tmpc;
      rhsc = NULL;
      vi = make_heapvar_for (get_vi_for_tree (lhs), "HEAP");
3908 3909
      /* We delay marking allocated storage global until we know if
         it escapes.  */
3910
      DECL_EXTERNAL (vi->decl) = 0;
3911
      vi->is_global_var = 0;
3912
      /* If this is not a real malloc call assume the memory was
3913
	 initialized and thus may point to global memory.  All
3914 3915 3916 3917
	 builtin functions with the malloc attribute behave in a sane way.  */
      if (!fndecl
	  || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
	make_constraint_from (vi, nonlocal_id);
3918 3919 3920 3921
      tmpc.var = vi->id;
      tmpc.offset = 0;
      tmpc.type = ADDRESSOF;
      VEC_safe_push (ce_s, heap, rhsc, &tmpc);
3922
    }
3923 3924 3925

  process_all_all_constraints (lhsc, rhsc);

3926 3927 3928 3929 3930 3931 3932
  VEC_free (ce_s, heap, lhsc);
}

/* For non-IPA mode, generate constraints necessary for a call of a
   const function that returns a pointer in the statement STMT.  */

static void
3933
handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3934
{
3935
  struct constraint_expr rhsc;
3936
  unsigned int k;
3937

3938 3939
  /* Treat nested const functions the same as pure functions as far
     as the static chain is concerned.  */
3940
  if (gimple_call_chain (stmt))
3941
    {
3942 3943 3944 3945
      varinfo_t uses = get_call_use_vi (stmt);
      make_transitive_closure_constraints (uses);
      make_constraint_to (uses->id, gimple_call_chain (stmt));
      rhsc.var = uses->id;
3946
      rhsc.offset = 0;
3947 3948
      rhsc.type = SCALAR;
      VEC_safe_push (ce_s, heap, *results, &rhsc);
3949 3950 3951
    }

  /* May return arguments.  */
3952 3953 3954
  for (k = 0; k < gimple_call_num_args (stmt); ++k)
    {
      tree arg = gimple_call_arg (stmt, k);
3955 3956 3957 3958 3959 3960 3961
      VEC(ce_s, heap) *argc = NULL;
      unsigned i;
      struct constraint_expr *argp;
      get_constraint_for_rhs (arg, &argc);
      FOR_EACH_VEC_ELT (ce_s, argc, i, argp)
	VEC_safe_push (ce_s, heap, *results, argp);
      VEC_free(ce_s, heap, argc);
3962
    }
3963

3964 3965 3966 3967 3968
  /* May return addresses of globals.  */
  rhsc.var = nonlocal_id;
  rhsc.offset = 0;
  rhsc.type = ADDRESSOF;
  VEC_safe_push (ce_s, heap, *results, &rhsc);
3969 3970
}

3971 3972 3973 3974
/* For non-IPA mode, generate constraints necessary for a call to a
   pure function in statement STMT.  */

static void
3975
handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3976
{
3977
  struct constraint_expr rhsc;
3978
  unsigned i;
3979
  varinfo_t uses = NULL;
3980 3981

  /* Memory reached from pointer arguments is call-used.  */
3982 3983 3984
  for (i = 0; i < gimple_call_num_args (stmt); ++i)
    {
      tree arg = gimple_call_arg (stmt, i);
3985
      if (!uses)
3986
	{
3987 3988
	  uses = get_call_use_vi (stmt);
	  make_transitive_closure_constraints (uses);
3989
	}
3990
      make_constraint_to (uses->id, arg);
3991
    }
3992 3993

  /* The static chain is used as well.  */
3994
  if (gimple_call_chain (stmt))
3995
    {
3996 3997 3998 3999 4000 4001
      if (!uses)
	{
	  uses = get_call_use_vi (stmt);
	  make_transitive_closure_constraints (uses);
	}
      make_constraint_to (uses->id, gimple_call_chain (stmt));
4002
    }
4003

4004 4005
  /* Pure functions may return call-used and nonlocal memory.  */
  if (uses)
4006
    {
4007
      rhsc.var = uses->id;
4008
      rhsc.offset = 0;
4009 4010
      rhsc.type = SCALAR;
      VEC_safe_push (ce_s, heap, *results, &rhsc);
4011
    }
4012
  rhsc.var = nonlocal_id;
4013
  rhsc.offset = 0;
4014
  rhsc.type = SCALAR;
4015
  VEC_safe_push (ce_s, heap, *results, &rhsc);
4016 4017
}

4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037 4038 4039

/* Return the varinfo for the callee of CALL.  */

static varinfo_t
get_fi_for_callee (gimple call)
{
  tree decl;

  /* If we can directly resolve the function being called, do so.
     Otherwise, it must be some sort of indirect expression that
     we should still be able to handle.  */
  decl = gimple_call_fndecl (call);
  if (decl)
    return get_vi_for_tree (decl);

  decl = gimple_call_fn (call);
  /* The function can be either an SSA name pointer or,
     worse, an OBJ_TYPE_REF.  In this case we have no
     clue and should be getting ANYFN (well, ANYTHING for now).  */
  if (TREE_CODE (decl) == SSA_NAME)
    {
      if (TREE_CODE (decl) == SSA_NAME
4040 4041
	  && (TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
	      || TREE_CODE (SSA_NAME_VAR (decl)) == RESULT_DECL)
4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052
	  && SSA_NAME_IS_DEFAULT_DEF (decl))
	decl = SSA_NAME_VAR (decl);
      return get_vi_for_tree (decl);
    }
  else if (TREE_CODE (decl) == INTEGER_CST
	   || TREE_CODE (decl) == OBJ_TYPE_REF)
    return get_varinfo (anything_id);
  else
    gcc_unreachable ();
}

4053 4054 4055 4056
/* Walk statement T setting up aliasing constraints according to the
   references found in T.  This function is the main part of the
   constraint builder.  AI points to auxiliary alias information used
   when building alias sets and computing alias grouping heuristics.  */
4057 4058

static void
4059
find_func_aliases (gimple origt)
4060
{
4061
  gimple t = origt;
4062 4063 4064
  VEC(ce_s, heap) *lhsc = NULL;
  VEC(ce_s, heap) *rhsc = NULL;
  struct constraint_expr *c;
4065
  varinfo_t fi;
4066

4067
  /* Now build constraints expressions.  */
4068
  if (gimple_code (t) == GIMPLE_PHI)
4069
    {
4070 4071
      size_t i;
      unsigned int j;
4072

4073 4074 4075 4076
      /* For a phi node, assign all the arguments to
	 the result.  */
      get_constraint_for (gimple_phi_result (t), &lhsc);
      for (i = 0; i < gimple_phi_num_args (t); i++)
4077
	{
4078
	  tree strippedrhs = PHI_ARG_DEF (t, i);
4079

4080 4081
	  STRIP_NOPS (strippedrhs);
	  get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4082

4083 4084 4085 4086
	  FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
	    {
	      struct constraint_expr *c2;
	      while (VEC_length (ce_s, rhsc) > 0)
4087
		{
4088 4089 4090
		  c2 = VEC_last (ce_s, rhsc);
		  process_constraint (new_constraint (*c, *c2));
		  VEC_pop (ce_s, rhsc);
4091
		}
4092
	    }
4093 4094 4095
	}
    }
  /* In IPA mode, we need to generate constraints to pass call
4096 4097 4098
     arguments through their calls.   There are two cases,
     either a GIMPLE_CALL returning a value, or just a plain
     GIMPLE_CALL when we are not.
4099 4100 4101

     In non-ipa mode, we need to generate constraints for each
     pointer passed by address.  */
4102
  else if (is_gimple_call (t))
4103
    {
4104 4105
      tree fndecl = gimple_call_fndecl (t);
      if (fndecl != NULL_TREE
4106 4107 4108 4109 4110 4111 4112 4113 4114 4115 4116 4117 4118 4119 4120 4121 4122 4123 4124 4125 4126 4127
	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
	/* ???  All builtins that are handled here need to be handled
	   in the alias-oracle query functions explicitly!  */
	switch (DECL_FUNCTION_CODE (fndecl))
	  {
	  /* All the following functions return a pointer to the same object
	     as their first argument points to.  The functions do not add
	     to the ESCAPED solution.  The functions make the first argument
	     pointed to memory point to what the second argument pointed to
	     memory points to.  */
	  case BUILT_IN_STRCPY:
	  case BUILT_IN_STRNCPY:
	  case BUILT_IN_BCOPY:
	  case BUILT_IN_MEMCPY:
	  case BUILT_IN_MEMMOVE:
	  case BUILT_IN_MEMPCPY:
	  case BUILT_IN_STPCPY:
	  case BUILT_IN_STPNCPY:
	  case BUILT_IN_STRCAT:
	  case BUILT_IN_STRNCAT:
	    {
	      tree res = gimple_call_lhs (t);
4128 4129 4130 4131
	      tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
					       == BUILT_IN_BCOPY ? 1 : 0));
	      tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
					      == BUILT_IN_BCOPY ? 0 : 1));
4132 4133 4134 4135 4136 4137 4138 4139 4140 4141 4142 4143 4144 4145 4146 4147 4148 4149 4150 4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170
	      if (res != NULL_TREE)
		{
		  get_constraint_for (res, &lhsc);
		  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
		      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
		      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
		    get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
		  else
		    get_constraint_for (dest, &rhsc);
		  process_all_all_constraints (lhsc, rhsc);
		  VEC_free (ce_s, heap, lhsc);
		  VEC_free (ce_s, heap, rhsc);
		}
	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
	      get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
	      do_deref (&lhsc);
	      do_deref (&rhsc);
	      process_all_all_constraints (lhsc, rhsc);
	      VEC_free (ce_s, heap, lhsc);
	      VEC_free (ce_s, heap, rhsc);
	      return;
	    }
	  case BUILT_IN_MEMSET:
	    {
	      tree res = gimple_call_lhs (t);
	      tree dest = gimple_call_arg (t, 0);
	      unsigned i;
	      ce_s *lhsp;
	      struct constraint_expr ac;
	      if (res != NULL_TREE)
		{
		  get_constraint_for (res, &lhsc);
		  get_constraint_for (dest, &rhsc);
		  process_all_all_constraints (lhsc, rhsc);
		  VEC_free (ce_s, heap, lhsc);
		  VEC_free (ce_s, heap, rhsc);
		}
	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
	      do_deref (&lhsc);
4171 4172 4173 4174 4175 4176 4177 4178 4179 4180 4181
	      if (flag_delete_null_pointer_checks
		  && integer_zerop (gimple_call_arg (t, 1)))
		{
		  ac.type = ADDRESSOF;
		  ac.var = nothing_id;
		}
	      else
		{
		  ac.type = SCALAR;
		  ac.var = integer_id;
		}
4182
	      ac.offset = 0;
4183
	      FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4184 4185 4186 4187 4188 4189 4190 4191 4192 4193 4194 4195 4196 4197 4198 4199 4200 4201 4202 4203 4204 4205 4206 4207 4208 4209 4210
		process_constraint (new_constraint (*lhsp, ac));
	      VEC_free (ce_s, heap, lhsc);
	      return;
	    }
	  /* All the following functions do not return pointers, do not
	     modify the points-to sets of memory reachable from their
	     arguments and do not add to the ESCAPED solution.  */
	  case BUILT_IN_SINCOS:
	  case BUILT_IN_SINCOSF:
	  case BUILT_IN_SINCOSL:
	  case BUILT_IN_FREXP:
	  case BUILT_IN_FREXPF:
	  case BUILT_IN_FREXPL:
	  case BUILT_IN_GAMMA_R:
	  case BUILT_IN_GAMMAF_R:
	  case BUILT_IN_GAMMAL_R:
	  case BUILT_IN_LGAMMA_R:
	  case BUILT_IN_LGAMMAF_R:
	  case BUILT_IN_LGAMMAL_R:
	  case BUILT_IN_MODF:
	  case BUILT_IN_MODFF:
	  case BUILT_IN_MODFL:
	  case BUILT_IN_REMQUO:
	  case BUILT_IN_REMQUOF:
	  case BUILT_IN_REMQUOL:
	  case BUILT_IN_FREE:
	    return;
4211 4212 4213 4214 4215 4216 4217 4218 4219 4220 4221 4222 4223 4224 4225 4226 4227 4228
	  /* Trampolines are special - they set up passing the static
	     frame.  */
	  case BUILT_IN_INIT_TRAMPOLINE:
	    {
	      tree tramp = gimple_call_arg (t, 0);
	      tree nfunc = gimple_call_arg (t, 1);
	      tree frame = gimple_call_arg (t, 2);
	      unsigned i;
	      struct constraint_expr lhs, *rhsp;
	      if (in_ipa_mode)
		{
		  varinfo_t nfi = NULL;
		  gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
		  nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
		  if (nfi)
		    {
		      lhs = get_function_part_constraint (nfi, fi_static_chain);
		      get_constraint_for (frame, &rhsc);
4229
		      FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4230 4231 4232 4233 4234 4235 4236 4237 4238 4239 4240 4241 4242 4243 4244 4245 4246 4247 4248 4249 4250 4251 4252 4253 4254 4255 4256 4257 4258 4259 4260 4261 4262 4263 4264 4265 4266 4267 4268 4269 4270 4271 4272 4273 4274 4275 4276 4277 4278 4279 4280
			process_constraint (new_constraint (lhs, *rhsp));
		      VEC_free (ce_s, heap, rhsc);

		      /* Make the frame point to the function for
			 the trampoline adjustment call.  */
		      get_constraint_for (tramp, &lhsc);
		      do_deref (&lhsc);
		      get_constraint_for (nfunc, &rhsc);
		      process_all_all_constraints (lhsc, rhsc);
		      VEC_free (ce_s, heap, rhsc);
		      VEC_free (ce_s, heap, lhsc);

		      return;
		    }
		}
	      /* Else fallthru to generic handling which will let
	         the frame escape.  */
	      break;
	    }
	  case BUILT_IN_ADJUST_TRAMPOLINE:
	    {
	      tree tramp = gimple_call_arg (t, 0);
	      tree res = gimple_call_lhs (t);
	      if (in_ipa_mode && res)
		{
		  get_constraint_for (res, &lhsc);
		  get_constraint_for (tramp, &rhsc);
		  do_deref (&rhsc);
		  process_all_all_constraints (lhsc, rhsc);
		  VEC_free (ce_s, heap, rhsc);
		  VEC_free (ce_s, heap, lhsc);
		}
	      return;
	    }
	  /* Variadic argument handling needs to be handled in IPA
	     mode as well.  */
	  case BUILT_IN_VA_START:
	    {
	      if (in_ipa_mode)
		{
		  tree valist = gimple_call_arg (t, 0);
		  struct constraint_expr rhs, *lhsp;
		  unsigned i;
		  /* The va_list gets access to pointers in variadic
		     arguments.  */
		  fi = lookup_vi_for_tree (cfun->decl);
		  gcc_assert (fi != NULL);
		  get_constraint_for (valist, &lhsc);
		  do_deref (&lhsc);
		  rhs = get_function_part_constraint (fi, ~0);
		  rhs.type = ADDRESSOF;
4281
		  FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4282 4283 4284 4285 4286 4287 4288 4289 4290 4291 4292
		    process_constraint (new_constraint (*lhsp, rhs));
		  VEC_free (ce_s, heap, lhsc);
		  /* va_list is clobbered.  */
		  make_constraint_to (get_call_clobber_vi (t)->id, valist);
		  return;
		}
	      break;
	    }
	  /* va_end doesn't have any effect that matters.  */
	  case BUILT_IN_VA_END:
	    return;
4293 4294 4295 4296 4297 4298 4299 4300 4301 4302 4303 4304 4305 4306 4307 4308 4309 4310 4311
	  /* Alternate return.  Simply give up for now.  */
	  case BUILT_IN_RETURN:
	    {
	      fi = NULL;
	      if (!in_ipa_mode
		  || !(fi = get_vi_for_tree (cfun->decl)))
		make_constraint_from (get_varinfo (escaped_id), anything_id);
	      else if (in_ipa_mode
		       && fi != NULL)
		{
		  struct constraint_expr lhs, rhs;
		  lhs = get_function_part_constraint (fi, fi_result);
		  rhs.var = anything_id;
		  rhs.offset = 0;
		  rhs.type = SCALAR;
		  process_constraint (new_constraint (lhs, rhs));
		}
	      return;
	    }
4312 4313 4314 4315 4316 4317
	  /* printf-style functions may have hooks to set pointers to
	     point to somewhere into the generated string.  Leave them
	     for a later excercise...  */
	  default:
	    /* Fallthru to general call handling.  */;
	  }
4318 4319
      if (!in_ipa_mode
	  || (fndecl
4320 4321
	      && (!(fi = lookup_vi_for_tree (fndecl))
		  || !fi->is_fn_info)))
4322
	{
4323
	  VEC(ce_s, heap) *rhsc = NULL;
4324 4325
	  int flags = gimple_call_flags (t);

4326 4327
	  /* Const functions can return their arguments and addresses
	     of global memory but not of escaped memory.  */
4328
	  if (flags & (ECF_CONST|ECF_NOVOPS))
4329
	    {
4330
	      if (gimple_call_lhs (t))
4331
		handle_const_call (t, &rhsc);
4332
	    }
4333 4334 4335
	  /* Pure functions can return addresses in and of memory
	     reachable from their arguments, but they are not an escape
	     point for reachable memory of their arguments.  */
4336 4337
	  else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
	    handle_pure_call (t, &rhsc);
4338
	  else
4339
	    handle_rhs_call (t, &rhsc);
4340
	  if (gimple_call_lhs (t))
4341
	    handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4342
	  VEC_free (ce_s, heap, rhsc);
4343 4344 4345
	}
      else
	{
4346
	  tree lhsop;
4347
	  unsigned j;
4348

4349
	  fi = get_fi_for_callee (t);
4350

4351 4352
	  /* Assign all the passed arguments to the appropriate incoming
	     parameters of the function.  */
4353
	  for (j = 0; j < gimple_call_num_args (t); j++)
4354
	    {
4355 4356
	      struct constraint_expr lhs ;
	      struct constraint_expr *rhsp;
4357
	      tree arg = gimple_call_arg (t, j);
4358

4359
	      get_constraint_for_rhs (arg, &rhsc);
4360
	      lhs = get_function_part_constraint (fi, fi_parm_base + j);
4361 4362 4363 4364 4365 4366
	      while (VEC_length (ce_s, rhsc) != 0)
		{
		  rhsp = VEC_last (ce_s, rhsc);
		  process_constraint (new_constraint (lhs, *rhsp));
		  VEC_pop (ce_s, rhsc);
		}
4367
	    }
4368 4369

	  /* If we are returning a value, assign it to the result.  */
4370
	  lhsop = gimple_call_lhs (t);
4371
	  if (lhsop)
4372
	    {
4373 4374 4375 4376
	      struct constraint_expr rhs;
	      struct constraint_expr *lhsp;

	      get_constraint_for (lhsop, &lhsc);
4377 4378 4379 4380
	      rhs = get_function_part_constraint (fi, fi_result);
	      if (fndecl
		  && DECL_RESULT (fndecl)
		  && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4381
		{
4382 4383 4384 4385 4386
		  VEC(ce_s, heap) *tem = NULL;
		  VEC_safe_push (ce_s, heap, tem, &rhs);
		  do_deref (&tem);
		  rhs = *VEC_index (ce_s, tem, 0);
		  VEC_free(ce_s, heap, tem);
4387
		}
4388
	      FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
4389
		process_constraint (new_constraint (*lhsp, rhs));
4390
	    }
4391 4392 4393 4394 4395 4396 4397 4398 4399 4400 4401 4402

	  /* If we pass the result decl by reference, honor that.  */
	  if (lhsop
	      && fndecl
	      && DECL_RESULT (fndecl)
	      && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
	    {
	      struct constraint_expr lhs;
	      struct constraint_expr *rhsp;

	      get_constraint_for_address_of (lhsop, &rhsc);
	      lhs = get_function_part_constraint (fi, fi_result);
4403
	      FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4404 4405 4406 4407 4408 4409 4410 4411 4412 4413 4414 4415
		process_constraint (new_constraint (lhs, *rhsp));
	      VEC_free (ce_s, heap, rhsc);
	    }

	  /* If we use a static chain, pass it along.  */
	  if (gimple_call_chain (t))
	    {
	      struct constraint_expr lhs;
	      struct constraint_expr *rhsp;

	      get_constraint_for (gimple_call_chain (t), &rhsc);
	      lhs = get_function_part_constraint (fi, fi_static_chain);
4416
	      FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4417 4418
		process_constraint (new_constraint (lhs, *rhsp));
	    }
4419
	}
4420
    }
4421 4422 4423
  /* Otherwise, just a regular assignment statement.  Only care about
     operations with pointer result, others are dealt with as escape
     points if they have pointer operands.  */
4424
  else if (is_gimple_assign (t))
4425
    {
4426 4427 4428
      /* Otherwise, just a regular assignment statement.  */
      tree lhsop = gimple_assign_lhs (t);
      tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4429

4430
      if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4431
	do_structure_copy (lhsop, rhsop);
4432 4433
      else
	{
4434
	  get_constraint_for (lhsop, &lhsc);
4435 4436 4437 4438

	  if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
	    get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
					   gimple_assign_rhs2 (t), &rhsc);
4439 4440 4441 4442 4443 4444 4445 4446
	  else if (gimple_assign_rhs_code (t) == BIT_AND_EXPR
		   && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
	    {
	      /* Aligning a pointer via a BIT_AND_EXPR is offsetting
		 the pointer.  Handle it by offsetting it by UNKNOWN.  */
	      get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
					     NULL_TREE, &rhsc);
	    }
4447
	  else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
4448 4449 4450
		    && !(POINTER_TYPE_P (gimple_expr_type (t))
			 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
		   || gimple_assign_single_p (t))
4451
	    get_constraint_for_rhs (rhsop, &rhsc);
4452 4453
	  else
	    {
4454 4455 4456 4457 4458 4459 4460 4461 4462 4463 4464 4465 4466
	      /* All other operations are merges.  */
	      VEC (ce_s, heap) *tmp = NULL;
	      struct constraint_expr *rhsp;
	      unsigned i, j;
	      get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
	      for (i = 2; i < gimple_num_ops (t); ++i)
		{
		  get_constraint_for_rhs (gimple_op (t, i), &tmp);
		  FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
		    VEC_safe_push (ce_s, heap, rhsc, rhsp);
		  VEC_truncate (ce_s, tmp, 0);
		}
	      VEC_free (ce_s, heap, tmp);
4467
	    }
4468
	  process_all_all_constraints (lhsc, rhsc);
4469
	}
4470 4471 4472
      /* If there is a store to a global variable the rhs escapes.  */
      if ((lhsop = get_base_address (lhsop)) != NULL_TREE
	  && DECL_P (lhsop)
4473 4474 4475
	  && is_global_var (lhsop)
	  && (!in_ipa_mode
	      || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4476
	make_escape_constraint (rhsop);
4477 4478 4479 4480 4481 4482 4483 4484 4485
      /* If this is a conversion of a non-restrict pointer to a
	 restrict pointer track it with a new heapvar.  */
      else if (gimple_assign_cast_p (t)
	       && POINTER_TYPE_P (TREE_TYPE (rhsop))
	       && POINTER_TYPE_P (TREE_TYPE (lhsop))
	       && !TYPE_RESTRICT (TREE_TYPE (rhsop))
	       && TYPE_RESTRICT (TREE_TYPE (lhsop)))
	make_constraint_from_restrict (get_vi_for_tree (lhsop),
				       "CAST_RESTRICT");
4486
    }
4487 4488
  /* Handle escapes through return.  */
  else if (gimple_code (t) == GIMPLE_RETURN
4489
	   && gimple_return_retval (t) != NULL_TREE)
4490
    {
4491 4492 4493 4494 4495 4496 4497 4498 4499 4500 4501 4502
      fi = NULL;
      if (!in_ipa_mode
	  || !(fi = get_vi_for_tree (cfun->decl)))
	make_escape_constraint (gimple_return_retval (t));
      else if (in_ipa_mode
	       && fi != NULL)
	{
	  struct constraint_expr lhs ;
	  struct constraint_expr *rhsp;
	  unsigned i;

	  lhs = get_function_part_constraint (fi, fi_result);
4503
	  get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4504
	  FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4505 4506
	    process_constraint (new_constraint (lhs, *rhsp));
	}
4507
    }
4508 4509
  /* Handle asms conservatively by adding escape constraints to everything.  */
  else if (gimple_code (t) == GIMPLE_ASM)
4510
    {
4511 4512 4513 4514 4515 4516 4517 4518 4519
      unsigned i, noutputs;
      const char **oconstraints;
      const char *constraint;
      bool allows_mem, allows_reg, is_inout;

      noutputs = gimple_asm_noutputs (t);
      oconstraints = XALLOCAVEC (const char *, noutputs);

      for (i = 0; i < noutputs; ++i)
4520
	{
4521 4522 4523 4524 4525 4526 4527 4528 4529 4530 4531 4532 4533 4534
	  tree link = gimple_asm_output_op (t, i);
	  tree op = TREE_VALUE (link);

	  constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
	  oconstraints[i] = constraint;
	  parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
				   &allows_reg, &is_inout);

	  /* A memory constraint makes the address of the operand escape.  */
	  if (!allows_reg && allows_mem)
	    make_escape_constraint (build_fold_addr_expr (op));

	  /* The asm may read global memory, so outputs may point to
	     any global memory.  */
4535
	  if (op)
4536 4537 4538 4539 4540 4541 4542 4543
	    {
	      VEC(ce_s, heap) *lhsc = NULL;
	      struct constraint_expr rhsc, *lhsp;
	      unsigned j;
	      get_constraint_for (op, &lhsc);
	      rhsc.var = nonlocal_id;
	      rhsc.offset = 0;
	      rhsc.type = SCALAR;
4544
	      FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
4545 4546 4547
		process_constraint (new_constraint (*lhsp, rhsc));
	      VEC_free (ce_s, heap, lhsc);
	    }
4548
	}
4549
      for (i = 0; i < gimple_asm_ninputs (t); ++i)
4550
	{
4551 4552 4553 4554 4555 4556 4557 4558 4559 4560 4561 4562
	  tree link = gimple_asm_input_op (t, i);
	  tree op = TREE_VALUE (link);

	  constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));

	  parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
				  &allows_mem, &allows_reg);

	  /* A memory constraint makes the address of the operand escape.  */
	  if (!allows_reg && allows_mem)
	    make_escape_constraint (build_fold_addr_expr (op));
	  /* Strictly we'd only need the constraint to ESCAPED if
4563 4564
	     the asm clobbers memory, otherwise using something
	     along the lines of per-call clobbers/uses would be enough.  */
4565
	  else if (op)
4566 4567 4568 4569
	    make_escape_constraint (op);
	}
    }

4570 4571
  VEC_free (ce_s, heap, rhsc);
  VEC_free (ce_s, heap, lhsc);
4572 4573 4574
}


4575 4576 4577 4578 4579 4580 4581 4582 4583
/* Create a constraint adding to the clobber set of FI the memory
   pointed to by PTR.  */

static void
process_ipa_clobber (varinfo_t fi, tree ptr)
{
  VEC(ce_s, heap) *ptrc = NULL;
  struct constraint_expr *c, lhs;
  unsigned i;
4584
  get_constraint_for_rhs (ptr, &ptrc);
4585
  lhs = get_function_part_constraint (fi, fi_clobbers);
4586
  FOR_EACH_VEC_ELT (ce_s, ptrc, i, c)
4587 4588 4589 4590 4591 4592 4593 4594 4595 4596 4597 4598 4599 4600 4601 4602 4603 4604 4605 4606 4607 4608 4609 4610 4611 4612 4613 4614 4615 4616 4617 4618 4619 4620 4621 4622 4623 4624 4625 4626
    process_constraint (new_constraint (lhs, *c));
  VEC_free (ce_s, heap, ptrc);
}

/* Walk statement T setting up clobber and use constraints according to the
   references found in T.  This function is a main part of the
   IPA constraint builder.  */

static void
find_func_clobbers (gimple origt)
{
  gimple t = origt;
  VEC(ce_s, heap) *lhsc = NULL;
  VEC(ce_s, heap) *rhsc = NULL;
  varinfo_t fi;

  /* Add constraints for clobbered/used in IPA mode.
     We are not interested in what automatic variables are clobbered
     or used as we only use the information in the caller to which
     they do not escape.  */
  gcc_assert (in_ipa_mode);

  /* If the stmt refers to memory in any way it better had a VUSE.  */
  if (gimple_vuse (t) == NULL_TREE)
    return;

  /* We'd better have function information for the current function.  */
  fi = lookup_vi_for_tree (cfun->decl);
  gcc_assert (fi != NULL);

  /* Account for stores in assignments and calls.  */
  if (gimple_vdef (t) != NULL_TREE
      && gimple_has_lhs (t))
    {
      tree lhs = gimple_get_lhs (t);
      tree tem = lhs;
      while (handled_component_p (tem))
	tem = TREE_OPERAND (tem, 0);
      if ((DECL_P (tem)
	   && !auto_var_in_fn_p (tem, cfun->decl))
4627 4628 4629 4630 4631
	  || INDIRECT_REF_P (tem)
	  || (TREE_CODE (tem) == MEM_REF
	      && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
		   && auto_var_in_fn_p
		        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4632 4633 4634 4635 4636
	{
	  struct constraint_expr lhsc, *rhsp;
	  unsigned i;
	  lhsc = get_function_part_constraint (fi, fi_clobbers);
	  get_constraint_for_address_of (lhs, &rhsc);
4637
	  FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4638 4639 4640 4641 4642 4643 4644 4645 4646 4647 4648 4649 4650 4651 4652 4653 4654
	    process_constraint (new_constraint (lhsc, *rhsp));
	  VEC_free (ce_s, heap, rhsc);
	}
    }

  /* Account for uses in assigments and returns.  */
  if (gimple_assign_single_p (t)
      || (gimple_code (t) == GIMPLE_RETURN
	  && gimple_return_retval (t) != NULL_TREE))
    {
      tree rhs = (gimple_assign_single_p (t)
		  ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
      tree tem = rhs;
      while (handled_component_p (tem))
	tem = TREE_OPERAND (tem, 0);
      if ((DECL_P (tem)
	   && !auto_var_in_fn_p (tem, cfun->decl))
4655 4656 4657 4658 4659
	  || INDIRECT_REF_P (tem)
	  || (TREE_CODE (tem) == MEM_REF
	      && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
		   && auto_var_in_fn_p
		        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4660 4661 4662 4663 4664
	{
	  struct constraint_expr lhs, *rhsp;
	  unsigned i;
	  lhs = get_function_part_constraint (fi, fi_uses);
	  get_constraint_for_address_of (rhs, &rhsc);
4665
	  FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4666 4667 4668 4669 4670 4671 4672 4673 4674 4675 4676 4677 4678 4679 4680 4681 4682 4683 4684 4685 4686 4687 4688 4689 4690 4691 4692 4693 4694 4695 4696 4697 4698 4699 4700 4701 4702 4703 4704
	    process_constraint (new_constraint (lhs, *rhsp));
	  VEC_free (ce_s, heap, rhsc);
	}
    }

  if (is_gimple_call (t))
    {
      varinfo_t cfi = NULL;
      tree decl = gimple_call_fndecl (t);
      struct constraint_expr lhs, rhs;
      unsigned i, j;

      /* For builtins we do not have separate function info.  For those
	 we do not generate escapes for we have to generate clobbers/uses.  */
      if (decl
	  && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
	switch (DECL_FUNCTION_CODE (decl))
	  {
	  /* The following functions use and clobber memory pointed to
	     by their arguments.  */
	  case BUILT_IN_STRCPY:
	  case BUILT_IN_STRNCPY:
	  case BUILT_IN_BCOPY:
	  case BUILT_IN_MEMCPY:
	  case BUILT_IN_MEMMOVE:
	  case BUILT_IN_MEMPCPY:
	  case BUILT_IN_STPCPY:
	  case BUILT_IN_STPNCPY:
	  case BUILT_IN_STRCAT:
	  case BUILT_IN_STRNCAT:
	    {
	      tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
					       == BUILT_IN_BCOPY ? 1 : 0));
	      tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
					      == BUILT_IN_BCOPY ? 0 : 1));
	      unsigned i;
	      struct constraint_expr *rhsp, *lhsp;
	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
	      lhs = get_function_part_constraint (fi, fi_clobbers);
4705
	      FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4706 4707 4708 4709
		process_constraint (new_constraint (lhs, *lhsp));
	      VEC_free (ce_s, heap, lhsc);
	      get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
	      lhs = get_function_part_constraint (fi, fi_uses);
4710
	      FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4711 4712 4713 4714 4715 4716 4717 4718 4719 4720 4721 4722 4723
		process_constraint (new_constraint (lhs, *rhsp));
	      VEC_free (ce_s, heap, rhsc);
	      return;
	    }
	  /* The following function clobbers memory pointed to by
	     its argument.  */
	  case BUILT_IN_MEMSET:
	    {
	      tree dest = gimple_call_arg (t, 0);
	      unsigned i;
	      ce_s *lhsp;
	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
	      lhs = get_function_part_constraint (fi, fi_clobbers);
4724
	      FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4725 4726 4727 4728 4729 4730 4731 4732 4733 4734 4735 4736 4737 4738 4739 4740 4741 4742 4743 4744 4745 4746 4747 4748 4749 4750 4751 4752 4753 4754 4755 4756 4757 4758 4759 4760 4761 4762 4763 4764 4765 4766 4767 4768 4769 4770 4771 4772 4773 4774 4775 4776 4777 4778 4779 4780 4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791 4792
		process_constraint (new_constraint (lhs, *lhsp));
	      VEC_free (ce_s, heap, lhsc);
	      return;
	    }
	  /* The following functions clobber their second and third
	     arguments.  */
	  case BUILT_IN_SINCOS:
	  case BUILT_IN_SINCOSF:
	  case BUILT_IN_SINCOSL:
	    {
	      process_ipa_clobber (fi, gimple_call_arg (t, 1));
	      process_ipa_clobber (fi, gimple_call_arg (t, 2));
	      return;
	    }
	  /* The following functions clobber their second argument.  */
	  case BUILT_IN_FREXP:
	  case BUILT_IN_FREXPF:
	  case BUILT_IN_FREXPL:
	  case BUILT_IN_LGAMMA_R:
	  case BUILT_IN_LGAMMAF_R:
	  case BUILT_IN_LGAMMAL_R:
	  case BUILT_IN_GAMMA_R:
	  case BUILT_IN_GAMMAF_R:
	  case BUILT_IN_GAMMAL_R:
	  case BUILT_IN_MODF:
	  case BUILT_IN_MODFF:
	  case BUILT_IN_MODFL:
	    {
	      process_ipa_clobber (fi, gimple_call_arg (t, 1));
	      return;
	    }
	  /* The following functions clobber their third argument.  */
	  case BUILT_IN_REMQUO:
	  case BUILT_IN_REMQUOF:
	  case BUILT_IN_REMQUOL:
	    {
	      process_ipa_clobber (fi, gimple_call_arg (t, 2));
	      return;
	    }
	  /* The following functions neither read nor clobber memory.  */
	  case BUILT_IN_FREE:
	    return;
	  /* Trampolines are of no interest to us.  */
	  case BUILT_IN_INIT_TRAMPOLINE:
	  case BUILT_IN_ADJUST_TRAMPOLINE:
	    return;
	  case BUILT_IN_VA_START:
	  case BUILT_IN_VA_END:
	    return;
	  /* printf-style functions may have hooks to set pointers to
	     point to somewhere into the generated string.  Leave them
	     for a later excercise...  */
	  default:
	    /* Fallthru to general call handling.  */;
	  }

      /* Parameters passed by value are used.  */
      lhs = get_function_part_constraint (fi, fi_uses);
      for (i = 0; i < gimple_call_num_args (t); i++)
	{
	  struct constraint_expr *rhsp;
	  tree arg = gimple_call_arg (t, i);

	  if (TREE_CODE (arg) == SSA_NAME
	      || is_gimple_min_invariant (arg))
	    continue;

	  get_constraint_for_address_of (arg, &rhsc);
4793
	  FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4794 4795 4796 4797 4798 4799 4800 4801 4802 4803 4804 4805 4806 4807 4808 4809 4810 4811 4812 4813 4814 4815 4816 4817 4818 4819 4820 4821 4822 4823 4824 4825 4826 4827 4828 4829 4830 4831 4832 4833 4834 4835 4836 4837 4838 4839 4840 4841 4842 4843 4844 4845 4846 4847 4848 4849 4850 4851 4852 4853 4854 4855 4856 4857 4858 4859
	    process_constraint (new_constraint (lhs, *rhsp));
	  VEC_free (ce_s, heap, rhsc);
	}

      /* Build constraints for propagating clobbers/uses along the
	 callgraph edges.  */
      cfi = get_fi_for_callee (t);
      if (cfi->id == anything_id)
	{
	  if (gimple_vdef (t))
	    make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
				  anything_id);
	  make_constraint_from (first_vi_for_offset (fi, fi_uses),
				anything_id);
	  return;
	}

      /* For callees without function info (that's external functions),
	 ESCAPED is clobbered and used.  */
      if (gimple_call_fndecl (t)
	  && !cfi->is_fn_info)
	{
	  varinfo_t vi;

	  if (gimple_vdef (t))
	    make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
				  escaped_id);
	  make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);

	  /* Also honor the call statement use/clobber info.  */
	  if ((vi = lookup_call_clobber_vi (t)) != NULL)
	    make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
				  vi->id);
	  if ((vi = lookup_call_use_vi (t)) != NULL)
	    make_copy_constraint (first_vi_for_offset (fi, fi_uses),
				  vi->id);
	  return;
	}

      /* Otherwise the caller clobbers and uses what the callee does.
	 ???  This should use a new complex constraint that filters
	 local variables of the callee.  */
      if (gimple_vdef (t))
	{
	  lhs = get_function_part_constraint (fi, fi_clobbers);
	  rhs = get_function_part_constraint (cfi, fi_clobbers);
	  process_constraint (new_constraint (lhs, rhs));
	}
      lhs = get_function_part_constraint (fi, fi_uses);
      rhs = get_function_part_constraint (cfi, fi_uses);
      process_constraint (new_constraint (lhs, rhs));
    }
  else if (gimple_code (t) == GIMPLE_ASM)
    {
      /* ???  Ick.  We can do better.  */
      if (gimple_vdef (t))
	make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
			      anything_id);
      make_constraint_from (first_vi_for_offset (fi, fi_uses),
			    anything_id);
    }

  VEC_free (ce_s, heap, rhsc);
}


4860
/* Find the first varinfo in the same variable as START that overlaps with
4861
   OFFSET.  Return NULL if we can't find one.  */
4862

4863
static varinfo_t
4864 4865
first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
{
4866 4867 4868 4869 4870 4871 4872 4873 4874 4875
  /* If the offset is outside of the variable, bail out.  */
  if (offset >= start->fullsize)
    return NULL;

  /* If we cannot reach offset from start, lookup the first field
     and start from there.  */
  if (start->offset > offset)
    start = lookup_vi_for_tree (start->decl);

  while (start)
4876 4877 4878 4879 4880
    {
      /* We may not find a variable in the field list with the actual
	 offset when when we have glommed a structure to a variable.
	 In that case, however, offset should still be within the size
	 of the variable. */
4881
      if (offset >= start->offset
4882
	  && (offset - start->offset) < start->size)
4883 4884 4885
	return start;

      start= start->next;
4886
    }
4887

4888
  return NULL;
4889 4890
}

4891 4892 4893 4894 4895 4896 4897 4898 4899 4900 4901 4902 4903 4904 4905 4906 4907 4908 4909 4910 4911
/* Find the first varinfo in the same variable as START that overlaps with
   OFFSET.  If there is no such varinfo the varinfo directly preceding
   OFFSET is returned.  */

static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t start,
				  unsigned HOST_WIDE_INT offset)
{
  /* If we cannot reach offset from start, lookup the first field
     and start from there.  */
  if (start->offset > offset)
    start = lookup_vi_for_tree (start->decl);

  /* We may not find a variable in the field list with the actual
     offset when when we have glommed a structure to a variable.
     In that case, however, offset should still be within the size
     of the variable.
     If we got beyond the offset we look for return the field
     directly preceding offset which may be the last field.  */
  while (start->next
	 && offset >= start->offset
4912
	 && !((offset - start->offset) < start->size))
4913 4914 4915 4916 4917
    start = start->next;

  return start;
}

4918

4919 4920 4921 4922 4923 4924 4925
/* This structure is used during pushing fields onto the fieldstack
   to track the offset of the field, since bitpos_of_field gives it
   relative to its immediate containing type, and we want it relative
   to the ultimate containing object.  */

struct fieldoff
{
4926 4927
  /* Offset from the base of the base containing object to this field.  */
  HOST_WIDE_INT offset;
4928 4929

  /* Size, in bits, of the field.  */
4930
  unsigned HOST_WIDE_INT size;
4931

4932
  unsigned has_unknown_size : 1;
4933

4934 4935
  unsigned must_have_pointers : 1;

4936
  unsigned may_have_pointers : 1;
4937 4938

  unsigned only_restrict_pointers : 1;
4939 4940 4941 4942 4943 4944
};
typedef struct fieldoff fieldoff_s;

DEF_VEC_O(fieldoff_s);
DEF_VEC_ALLOC_O(fieldoff_s,heap);

4945 4946
/* qsort comparison function for two fieldoff's PA and PB */

4947
static int
4948 4949 4950 4951
fieldoff_compare (const void *pa, const void *pb)
{
  const fieldoff_s *foa = (const fieldoff_s *)pa;
  const fieldoff_s *fob = (const fieldoff_s *)pb;
4952
  unsigned HOST_WIDE_INT foasize, fobsize;
4953

4954 4955 4956 4957
  if (foa->offset < fob->offset)
    return -1;
  else if (foa->offset > fob->offset)
    return 1;
4958

4959 4960
  foasize = foa->size;
  fobsize = fob->size;
4961
  if (foasize < fobsize)
4962
    return -1;
4963 4964 4965
  else if (foasize > fobsize)
    return 1;
  return 0;
4966 4967 4968
}

/* Sort a fieldstack according to the field offset and sizes.  */
4969
static void
4970
sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4971
{
4972
  VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
4973 4974
}

4975 4976 4977 4978 4979 4980 4981 4982 4983 4984 4985 4986
/* Return true if V is a tree that we can have subvars for.
   Normally, this is any aggregate type.  Also complex
   types which are not gimple registers can have subvars.  */

static inline bool
var_can_have_subvars (const_tree v)
{
  /* Volatile variables should never have subvars.  */
  if (TREE_THIS_VOLATILE (v))
    return false;

  /* Non decls or memory tags can never have subvars.  */
4987
  if (!DECL_P (v))
4988 4989 4990 4991 4992 4993 4994 4995 4996
    return false;

  /* Aggregates without overlapping fields can have subvars.  */
  if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
    return true;

  return false;
}

4997 4998 4999 5000 5001 5002 5003 5004 5005 5006 5007 5008 5009 5010 5011 5012 5013 5014 5015 5016 5017 5018 5019 5020 5021 5022
/* Return true if T is a type that does contain pointers.  */

static bool
type_must_have_pointers (tree type)
{
  if (POINTER_TYPE_P (type))
    return true;

  if (TREE_CODE (type) == ARRAY_TYPE)
    return type_must_have_pointers (TREE_TYPE (type));

  /* A function or method can have pointers as arguments, so track
     those separately.  */
  if (TREE_CODE (type) == FUNCTION_TYPE
      || TREE_CODE (type) == METHOD_TYPE)
    return true;

  return false;
}

static bool
field_must_have_pointers (tree t)
{
  return type_must_have_pointers (TREE_TYPE (t));
}

Diego Novillo committed
5023 5024 5025 5026 5027 5028
/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
   the fields of TYPE onto fieldstack, recording their offsets along
   the way.

   OFFSET is used to keep track of the offset in this entire
   structure, rather than just the immediately containing structure.
5029 5030
   Returns false if the caller is supposed to handle the field we
   recursed for.  */
5031

5032
static bool
5033
push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
5034
			     HOST_WIDE_INT offset)
5035 5036
{
  tree field;
5037
  bool empty_p = true;
5038 5039

  if (TREE_CODE (type) != RECORD_TYPE)
5040
    return false;
5041 5042 5043 5044

  /* If the vector of fields is growing too big, bail out early.
     Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
     sure this fails.  */
5045
  if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5046
    return false;
5047

5048
  for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5049 5050 5051
    if (TREE_CODE (field) == FIELD_DECL)
      {
	bool push = false;
5052
	HOST_WIDE_INT foff = bitpos_of_field (field);
5053

5054 5055 5056
	if (!var_can_have_subvars (field)
	    || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
	    || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5057
	  push = true;
5058
	else if (!push_fields_onto_fieldstack
5059
		    (TREE_TYPE (field), fieldstack, offset + foff)
5060 5061 5062 5063 5064 5065 5066 5067
		 && (DECL_SIZE (field)
		     && !integer_zerop (DECL_SIZE (field))))
	  /* Empty structures may have actual size, like in C++.  So
	     see if we didn't push any subfields and the size is
	     nonzero, push the field onto the stack.  */
	  push = true;

	if (push)
5068
	  {
5069 5070
	    fieldoff_s *pair = NULL;
	    bool has_unknown_size = false;
5071
	    bool must_have_pointers_p;
5072 5073 5074 5075 5076 5077 5078 5079 5080

	    if (!VEC_empty (fieldoff_s, *fieldstack))
	      pair = VEC_last (fieldoff_s, *fieldstack);

	    if (!DECL_SIZE (field)
		|| !host_integerp (DECL_SIZE (field), 1))
	      has_unknown_size = true;

	    /* If adjacent fields do not contain pointers merge them.  */
5081
	    must_have_pointers_p = field_must_have_pointers (field);
5082 5083
	    if (pair
		&& !has_unknown_size
5084
		&& !must_have_pointers_p
5085 5086 5087
		&& !pair->must_have_pointers
		&& !pair->has_unknown_size
		&& pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5088 5089 5090 5091 5092 5093 5094 5095 5096 5097 5098 5099
	      {
		pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
	      }
	    else
	      {
		pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
		pair->offset = offset + foff;
		pair->has_unknown_size = has_unknown_size;
		if (!has_unknown_size)
		  pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
		else
		  pair->size = -1;
5100 5101
		pair->must_have_pointers = must_have_pointers_p;
		pair->may_have_pointers = true;
5102 5103 5104 5105
		pair->only_restrict_pointers
		  = (!has_unknown_size
		     && POINTER_TYPE_P (TREE_TYPE (field))
		     && TYPE_RESTRICT (TREE_TYPE (field)));
5106
	      }
5107
	  }
5108 5109

	empty_p = false;
5110
      }
5111

5112
  return !empty_p;
5113 5114
}

5115 5116 5117 5118 5119 5120
/* Count the number of arguments DECL has, and set IS_VARARGS to true
   if it is a varargs function.  */

static unsigned int
count_num_arguments (tree decl, bool *is_varargs)
{
5121
  unsigned int num = 0;
5122 5123
  tree t;

5124 5125
  /* Capture named arguments for K&R functions.  They do not
     have a prototype and thus no TYPE_ARG_TYPES.  */
5126
  for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5127
    ++num;
5128

5129 5130 5131 5132
  /* Check if the function has variadic arguments.  */
  for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
    if (TREE_VALUE (t) == void_type_node)
      break;
5133 5134
  if (!t)
    *is_varargs = true;
5135 5136

  return num;
5137 5138 5139 5140 5141
}

/* Creation function node for DECL, using NAME, and return the index
   of the variable we've created for the function.  */

5142
static varinfo_t
5143 5144
create_function_info_for (tree decl, const char *name)
{
5145 5146
  struct function *fn = DECL_STRUCT_FUNCTION (decl);
  varinfo_t vi, prev_vi;
5147
  tree arg;
5148 5149
  unsigned int i;
  bool is_varargs = false;
5150
  unsigned int num_args = count_num_arguments (decl, &is_varargs);
5151 5152 5153

  /* Create the variable info.  */

5154
  vi = new_var_info (decl, name);
5155 5156
  vi->offset = 0;
  vi->size = 1;
5157 5158 5159 5160 5161
  vi->fullsize = fi_parm_base + num_args;
  vi->is_fn_info = 1;
  vi->may_have_pointers = false;
  if (is_varargs)
    vi->fullsize = ~0;
5162
  insert_vi_for_tree (vi->decl, vi);
5163

5164 5165 5166 5167
  prev_vi = vi;

  /* Create a variable for things the function clobbers and one for
     things the function uses.  */
5168
    {
5169 5170 5171 5172 5173 5174 5175 5176 5177 5178 5179 5180 5181 5182 5183 5184 5185 5186 5187 5188 5189 5190 5191 5192 5193 5194 5195 5196 5197 5198 5199
      varinfo_t clobbervi, usevi;
      const char *newname;
      char *tempname;

      asprintf (&tempname, "%s.clobber", name);
      newname = ggc_strdup (tempname);
      free (tempname);

      clobbervi = new_var_info (NULL, newname);
      clobbervi->offset = fi_clobbers;
      clobbervi->size = 1;
      clobbervi->fullsize = vi->fullsize;
      clobbervi->is_full_var = true;
      clobbervi->is_global_var = false;
      gcc_assert (prev_vi->offset < clobbervi->offset);
      prev_vi->next = clobbervi;
      prev_vi = clobbervi;

      asprintf (&tempname, "%s.use", name);
      newname = ggc_strdup (tempname);
      free (tempname);

      usevi = new_var_info (NULL, newname);
      usevi->offset = fi_uses;
      usevi->size = 1;
      usevi->fullsize = vi->fullsize;
      usevi->is_full_var = true;
      usevi->is_global_var = false;
      gcc_assert (prev_vi->offset < usevi->offset);
      prev_vi->next = usevi;
      prev_vi = usevi;
5200 5201
    }

5202 5203 5204 5205 5206 5207 5208 5209 5210 5211 5212 5213 5214 5215 5216 5217 5218 5219 5220 5221 5222 5223 5224 5225 5226 5227 5228 5229 5230 5231 5232 5233 5234 5235 5236 5237 5238 5239 5240 5241 5242 5243 5244 5245 5246
  /* And one for the static chain.  */
  if (fn->static_chain_decl != NULL_TREE)
    {
      varinfo_t chainvi;
      const char *newname;
      char *tempname;

      asprintf (&tempname, "%s.chain", name);
      newname = ggc_strdup (tempname);
      free (tempname);

      chainvi = new_var_info (fn->static_chain_decl, newname);
      chainvi->offset = fi_static_chain;
      chainvi->size = 1;
      chainvi->fullsize = vi->fullsize;
      chainvi->is_full_var = true;
      chainvi->is_global_var = false;
      gcc_assert (prev_vi->offset < chainvi->offset);
      prev_vi->next = chainvi;
      prev_vi = chainvi;
      insert_vi_for_tree (fn->static_chain_decl, chainvi);
    }

  /* Create a variable for the return var.  */
  if (DECL_RESULT (decl) != NULL
      || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
    {
      varinfo_t resultvi;
      const char *newname;
      char *tempname;
      tree resultdecl = decl;

      if (DECL_RESULT (decl))
	resultdecl = DECL_RESULT (decl);

      asprintf (&tempname, "%s.result", name);
      newname = ggc_strdup (tempname);
      free (tempname);

      resultvi = new_var_info (resultdecl, newname);
      resultvi->offset = fi_result;
      resultvi->size = 1;
      resultvi->fullsize = vi->fullsize;
      resultvi->is_full_var = true;
      if (DECL_RESULT (decl))
5247
	resultvi->may_have_pointers = true;
5248 5249 5250 5251 5252 5253
      gcc_assert (prev_vi->offset < resultvi->offset);
      prev_vi->next = resultvi;
      prev_vi = resultvi;
      if (DECL_RESULT (decl))
	insert_vi_for_tree (DECL_RESULT (decl), resultvi);
    }
5254

5255
  /* Set up variables for each argument.  */
5256 5257
  arg = DECL_ARGUMENTS (decl);
  for (i = 0; i < num_args; i++)
5258
    {
5259 5260 5261 5262 5263 5264 5265
      varinfo_t argvi;
      const char *newname;
      char *tempname;
      tree argdecl = decl;

      if (arg)
	argdecl = arg;
5266

5267
      asprintf (&tempname, "%s.arg%d", name, i);
5268 5269 5270
      newname = ggc_strdup (tempname);
      free (tempname);

5271
      argvi = new_var_info (argdecl, newname);
5272
      argvi->offset = fi_parm_base + i;
5273
      argvi->size = 1;
5274
      argvi->is_full_var = true;
5275
      argvi->fullsize = vi->fullsize;
5276
      if (arg)
5277
	argvi->may_have_pointers = true;
5278 5279 5280
      gcc_assert (prev_vi->offset < argvi->offset);
      prev_vi->next = argvi;
      prev_vi = argvi;
5281 5282
      if (arg)
	{
5283
	  insert_vi_for_tree (arg, argvi);
5284
	  arg = DECL_CHAIN (arg);
5285 5286
	}
    }
5287

5288 5289
  /* Add one representative for all further args.  */
  if (is_varargs)
5290
    {
5291
      varinfo_t argvi;
5292 5293
      const char *newname;
      char *tempname;
5294
      tree decl;
5295

5296
      asprintf (&tempname, "%s.varargs", name);
5297 5298 5299
      newname = ggc_strdup (tempname);
      free (tempname);

5300 5301 5302 5303 5304 5305 5306 5307 5308 5309 5310 5311 5312
      /* We need sth that can be pointed to for va_start.  */
      decl = create_tmp_var_raw (ptr_type_node, name);
      get_var_ann (decl);

      argvi = new_var_info (decl, newname);
      argvi->offset = fi_parm_base + num_args;
      argvi->size = ~0;
      argvi->is_full_var = true;
      argvi->is_heap_var = true;
      argvi->fullsize = vi->fullsize;
      gcc_assert (prev_vi->offset < argvi->offset);
      prev_vi->next = argvi;
      prev_vi = argvi;
5313
    }
5314

5315
  return vi;
5316
}
5317

5318

5319
/* Return true if FIELDSTACK contains fields that overlap.
5320 5321 5322 5323 5324 5325 5326
   FIELDSTACK is assumed to be sorted by offset.  */

static bool
check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
{
  fieldoff_s *fo = NULL;
  unsigned int i;
5327
  HOST_WIDE_INT lastoffset = -1;
5328

5329
  FOR_EACH_VEC_ELT (fieldoff_s, fieldstack, i, fo)
5330 5331 5332 5333 5334 5335 5336
    {
      if (fo->offset == lastoffset)
	return true;
      lastoffset = fo->offset;
    }
  return false;
}
5337

5338 5339 5340 5341
/* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
   This will also create any varinfo structures necessary for fields
   of DECL.  */

5342 5343
static varinfo_t
create_variable_info_for_1 (tree decl, const char *name)
5344
{
5345
  varinfo_t vi, newvi;
5346 5347
  tree decl_type = TREE_TYPE (decl);
  tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5348
  VEC (fieldoff_s,heap) *fieldstack = NULL;
5349 5350
  fieldoff_s *fo;
  unsigned int i;
5351

5352
  if (!declsize
5353
      || !host_integerp (declsize, 1))
5354
    {
5355 5356
      vi = new_var_info (decl, name);
      vi->offset = 0;
5357
      vi->size = ~0;
5358 5359 5360
      vi->fullsize = ~0;
      vi->is_unknown_size_var = true;
      vi->is_full_var = true;
5361
      vi->may_have_pointers = true;
5362
      return vi;
5363
    }
5364 5365 5366 5367 5368 5369 5370 5371 5372

  /* Collect field information.  */
  if (use_field_sensitive
      && var_can_have_subvars (decl)
      /* ???  Force us to not use subfields for global initializers
	 in IPA mode.  Else we'd have to parse arbitrary initializers.  */
      && !(in_ipa_mode
	   && is_global_var (decl)
	   && DECL_INITIAL (decl)))
5373
    {
5374 5375 5376 5377
      fieldoff_s *fo = NULL;
      bool notokay = false;
      unsigned int i;

5378
      push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5379 5380 5381 5382 5383 5384 5385 5386 5387 5388 5389 5390 5391 5392 5393 5394 5395 5396 5397 5398 5399 5400 5401 5402 5403 5404 5405 5406 5407 5408 5409 5410 5411 5412

      for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
	if (fo->has_unknown_size
	    || fo->offset < 0)
	  {
	    notokay = true;
	    break;
	  }

      /* We can't sort them if we have a field with a variable sized type,
	 which will make notokay = true.  In that case, we are going to return
	 without creating varinfos for the fields anyway, so sorting them is a
	 waste to boot.  */
      if (!notokay)
	{
	  sort_fieldstack (fieldstack);
	  /* Due to some C++ FE issues, like PR 22488, we might end up
	     what appear to be overlapping fields even though they,
	     in reality, do not overlap.  Until the C++ FE is fixed,
	     we will simply disable field-sensitivity for these cases.  */
	  notokay = check_for_overlaps (fieldstack);
	}

      if (notokay)
	VEC_free (fieldoff_s, heap, fieldstack);
    }

  /* If we didn't end up collecting sub-variables create a full
     variable for the decl.  */
  if (VEC_length (fieldoff_s, fieldstack) <= 1
      || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
    {
      vi = new_var_info (decl, name);
      vi->offset = 0;
5413
      vi->may_have_pointers = true;
5414
      vi->fullsize = TREE_INT_CST_LOW (declsize);
5415
      vi->size = vi->fullsize;
5416 5417 5418
      vi->is_full_var = true;
      VEC_free (fieldoff_s, heap, fieldstack);
      return vi;
5419
    }
5420

5421 5422 5423 5424 5425 5426 5427 5428 5429 5430 5431 5432 5433 5434 5435 5436 5437 5438 5439 5440 5441 5442 5443 5444 5445 5446 5447
  vi = new_var_info (decl, name);
  vi->fullsize = TREE_INT_CST_LOW (declsize);
  for (i = 0, newvi = vi;
       VEC_iterate (fieldoff_s, fieldstack, i, fo);
       ++i, newvi = newvi->next)
    {
      const char *newname = "NULL";
      char *tempname;

      if (dump_file)
	{
	  asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
		    "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
	  newname = ggc_strdup (tempname);
	  free (tempname);
	}
      newvi->name = newname;
      newvi->offset = fo->offset;
      newvi->size = fo->size;
      newvi->fullsize = vi->fullsize;
      newvi->may_have_pointers = fo->may_have_pointers;
      newvi->only_restrict_pointers = fo->only_restrict_pointers;
      if (i + 1 < VEC_length (fieldoff_s, fieldstack))
	newvi->next = new_var_info (decl, name);
    }

  VEC_free (fieldoff_s, heap, fieldstack);
5448

5449 5450 5451 5452 5453 5454 5455 5456 5457 5458 5459 5460 5461
  return vi;
}

static unsigned int
create_variable_info_for (tree decl, const char *name)
{
  varinfo_t vi = create_variable_info_for_1 (decl, name);
  unsigned int id = vi->id;

  insert_vi_for_tree (decl, vi);

  /* Create initial constraints for globals.  */
  for (; vi; vi = vi->next)
5462
    {
5463 5464 5465 5466
      if (!vi->may_have_pointers
	  || !vi->is_global_var)
	continue;

5467
      /* Mark global restrict qualified pointers.  */
5468 5469 5470
      if ((POINTER_TYPE_P (TREE_TYPE (decl))
	   && TYPE_RESTRICT (TREE_TYPE (decl)))
	  || vi->only_restrict_pointers)
5471
	make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
5472 5473 5474 5475 5476 5477 5478 5479 5480 5481

      /* For escaped variables initialize them from nonlocal.  */
      if (!in_ipa_mode
	  || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
	make_copy_constraint (vi, nonlocal_id);

      /* If this is a global variable with an initializer and we are in
	 IPA mode generate constraints for it.  In non-IPA mode
	 the initializer from nonlocal is all we need.  */
      if (in_ipa_mode
5482
	  && DECL_INITIAL (decl))
5483 5484 5485 5486
	{
	  VEC (ce_s, heap) *rhsc = NULL;
	  struct constraint_expr lhs, *rhsp;
	  unsigned i;
5487
	  get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5488 5489 5490
	  lhs.var = vi->id;
	  lhs.offset = 0;
	  lhs.type = SCALAR;
5491
	  FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
5492 5493 5494 5495 5496 5497 5498 5499
	    process_constraint (new_constraint (lhs, *rhsp));
	  /* If this is a variable that escapes from the unit
	     the initializer escapes as well.  */
	  if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
	    {
	      lhs.var = escaped_id;
	      lhs.offset = 0;
	      lhs.type = SCALAR;
5500
	      FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
5501 5502 5503 5504
		process_constraint (new_constraint (lhs, *rhsp));
	    }
	  VEC_free (ce_s, heap, rhsc);
	}
5505
    }
5506

5507
  return id;
5508 5509 5510 5511
}

/* Print out the points-to solution for VAR to FILE.  */

5512
static void
5513 5514 5515 5516
dump_solution_for_var (FILE *file, unsigned int var)
{
  varinfo_t vi = get_varinfo (var);
  unsigned int i;
5517 5518
  bitmap_iterator bi;

5519 5520 5521 5522 5523 5524 5525 5526 5527 5528 5529 5530 5531
  /* Dump the solution for unified vars anyway, this avoids difficulties
     in scanning dumps in the testsuite.  */
  fprintf (file, "%s = { ", vi->name);
  vi = get_varinfo (find (var));
  EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
    fprintf (file, "%s ", get_varinfo (i)->name);
  fprintf (file, "}");

  /* But note when the variable was unified.  */
  if (vi->id != var)
    fprintf (file, " same as %s", vi->name);

  fprintf (file, "\n");
5532 5533 5534 5535
}

/* Print the points-to solution for VAR to stdout.  */

5536
DEBUG_FUNCTION void
5537 5538 5539 5540 5541 5542 5543 5544 5545 5546 5547 5548
debug_solution_for_var (unsigned int var)
{
  dump_solution_for_var (stdout, var);
}

/* Create varinfo structures for all of the variables in the
   function for intraprocedural mode.  */

static void
intra_create_variable_infos (void)
{
  tree t;
5549

5550
  /* For each incoming pointer argument arg, create the constraint ARG
5551 5552
     = NONLOCAL or a dummy variable if it is a restrict qualified
     passed-by-reference argument.  */
5553
  for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5554 5555
    {
      varinfo_t p;
5556

5557 5558 5559 5560 5561 5562 5563 5564 5565 5566 5567 5568 5569 5570 5571 5572 5573 5574 5575 5576 5577 5578 5579 5580 5581 5582 5583 5584 5585 5586 5587 5588
      /* For restrict qualified pointers to objects passed by
         reference build a real representative for the pointed-to object.  */
      if (DECL_BY_REFERENCE (t)
	  && POINTER_TYPE_P (TREE_TYPE (t))
	  && TYPE_RESTRICT (TREE_TYPE (t)))
	{
	  struct constraint_expr lhsc, rhsc;
	  varinfo_t vi;
	  tree heapvar = heapvar_lookup (t, 0);
	  if (heapvar == NULL_TREE)
	    {
	      var_ann_t ann;
	      heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
					    "PARM_NOALIAS");
	      DECL_EXTERNAL (heapvar) = 1;
	      heapvar_insert (t, 0, heapvar);
	      ann = get_var_ann (heapvar);
	      ann->is_heapvar = 1;
	    }
	  if (gimple_referenced_vars (cfun))
	    add_referenced_var (heapvar);
	  lhsc.var = get_vi_for_tree (t)->id;
	  lhsc.type = SCALAR;
	  lhsc.offset = 0;
	  rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
	  rhsc.type = ADDRESSOF;
	  rhsc.offset = 0;
	  process_constraint (new_constraint (lhsc, rhsc));
	  vi->is_restrict_var = 1;
	  continue;
	}

5589
      for (p = get_vi_for_tree (t); p; p = p->next)
5590 5591 5592 5593 5594 5595
	{
	  if (p->may_have_pointers)
	    make_constraint_from (p, nonlocal_id);
	  if (p->only_restrict_pointers)
	    make_constraint_from_restrict (p, "PARM_RESTRICT");
	}
5596 5597 5598
      if (POINTER_TYPE_P (TREE_TYPE (t))
	  && TYPE_RESTRICT (TREE_TYPE (t)))
	make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
5599
    }
5600

5601 5602 5603 5604 5605 5606 5607
  /* Add a constraint for a result decl that is passed by reference.  */
  if (DECL_RESULT (cfun->decl)
      && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
    {
      varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));

      for (p = result_vi; p; p = p->next)
5608
	make_constraint_from (p, nonlocal_id);
5609 5610
    }

5611 5612 5613 5614 5615 5616 5617 5618
  /* Add a constraint for the incoming static chain parameter.  */
  if (cfun->static_chain_decl != NULL_TREE)
    {
      varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);

      for (p = chain_vi; p; p = p->next)
	make_constraint_from (p, nonlocal_id);
    }
5619 5620
}

5621 5622 5623 5624 5625 5626 5627 5628
/* Structure used to put solution bitmaps in a hashtable so they can
   be shared among variables with the same points-to set.  */

typedef struct shared_bitmap_info
{
  bitmap pt_vars;
  hashval_t hashcode;
} *shared_bitmap_info_t;
5629
typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5630 5631 5632 5633 5634 5635 5636 5637

static htab_t shared_bitmap_table;

/* Hash function for a shared_bitmap_info_t */

static hashval_t
shared_bitmap_hash (const void *p)
{
5638
  const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5639 5640 5641 5642 5643 5644 5645 5646
  return bi->hashcode;
}

/* Equality function for two shared_bitmap_info_t's. */

static int
shared_bitmap_eq (const void *p1, const void *p2)
{
5647 5648
  const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
  const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5649 5650 5651 5652 5653 5654 5655 5656 5657 5658 5659 5660 5661 5662
  return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
}

/* Lookup a bitmap in the shared bitmap hashtable, and return an already
   existing instance if there is one, NULL otherwise.  */

static bitmap
shared_bitmap_lookup (bitmap pt_vars)
{
  void **slot;
  struct shared_bitmap_info sbi;

  sbi.pt_vars = pt_vars;
  sbi.hashcode = bitmap_hash (pt_vars);
5663

5664 5665 5666 5667 5668 5669 5670 5671 5672 5673 5674 5675 5676 5677 5678 5679
  slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
				   sbi.hashcode, NO_INSERT);
  if (!slot)
    return NULL;
  else
    return ((shared_bitmap_info_t) *slot)->pt_vars;
}


/* Add a bitmap to the shared bitmap hashtable.  */

static void
shared_bitmap_add (bitmap pt_vars)
{
  void **slot;
  shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5680

5681 5682
  sbi->pt_vars = pt_vars;
  sbi->hashcode = bitmap_hash (pt_vars);
5683

5684 5685 5686 5687 5688 5689 5690
  slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
				   sbi->hashcode, INSERT);
  gcc_assert (!*slot);
  *slot = (void *) sbi;
}


5691
/* Set bits in INTO corresponding to the variable uids in solution set FROM.  */
5692

H.J. Lu committed
5693
static void
5694
set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5695 5696 5697
{
  unsigned int i;
  bitmap_iterator bi;
5698

5699 5700 5701
  EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
    {
      varinfo_t vi = get_varinfo (i);
5702

5703 5704 5705 5706
      /* The only artificial variables that are allowed in a may-alias
	 set are heap variables.  */
      if (vi->is_artificial_var && !vi->is_heap_var)
	continue;
5707

5708 5709 5710
      if (TREE_CODE (vi->decl) == VAR_DECL
	  || TREE_CODE (vi->decl) == PARM_DECL
	  || TREE_CODE (vi->decl) == RESULT_DECL)
5711
	{
5712 5713 5714 5715 5716 5717
	  /* If we are in IPA mode we will not recompute points-to
	     sets after inlining so make sure they stay valid.  */
	  if (in_ipa_mode
	      && !DECL_PT_UID_SET_P (vi->decl))
	    SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));

5718 5719
	  /* Add the decl to the points-to set.  Note that the points-to
	     set contains global variables.  */
5720
	  bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5721
	  if (vi->is_global_var)
5722
	    pt->vars_contains_global = true;
5723
	}
5724 5725
    }
}
5726 5727


5728
/* Compute the points-to solution *PT for the variable VI.  */
5729 5730

static void
5731
find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5732
{
5733
  unsigned int i;
5734 5735 5736
  bitmap_iterator bi;
  bitmap finished_solution;
  bitmap result;
5737
  varinfo_t vi;
5738 5739 5740 5741 5742

  memset (pt, 0, sizeof (struct pt_solution));

  /* This variable may have been collapsed, let's get the real
     variable.  */
5743
  vi = get_varinfo (find (orig_vi->id));
5744 5745 5746 5747 5748 5749 5750 5751 5752 5753 5754 5755

  /* Translate artificial variables into SSA_NAME_PTR_INFO
     attributes.  */
  EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
    {
      varinfo_t vi = get_varinfo (i);

      if (vi->is_artificial_var)
	{
	  if (vi->id == nothing_id)
	    pt->null = 1;
	  else if (vi->id == escaped_id)
5756 5757 5758 5759 5760 5761
	    {
	      if (in_ipa_mode)
		pt->ipa_escaped = 1;
	      else
		pt->escaped = 1;
	    }
5762 5763 5764 5765 5766
	  else if (vi->id == nonlocal_id)
	    pt->nonlocal = 1;
	  else if (vi->is_heap_var)
	    /* We represent heapvars in the points-to set properly.  */
	    ;
5767 5768 5769
	  else if (vi->id == readonly_id)
	    /* Nobody cares.  */
	    ;
5770 5771 5772 5773
	  else if (vi->id == anything_id
		   || vi->id == integer_id)
	    pt->anything = 1;
	}
5774 5775
      if (vi->is_restrict_var)
	pt->vars_contains_restrict = true;
5776 5777 5778 5779
    }

  /* Instead of doing extra work, simply do not create
     elaborate points-to information for pt_anything pointers.  */
5780
  if (pt->anything
5781
      && (orig_vi->is_artificial_var
5782
	  || !pt->vars_contains_restrict))
5783
    return;
5784 5785 5786 5787 5788

  /* Share the final set of variables when possible.  */
  finished_solution = BITMAP_GGC_ALLOC ();
  stats.points_to_sets_created++;

5789
  set_uids_in_ptset (finished_solution, vi->solution, pt);
5790 5791 5792 5793 5794 5795 5796 5797 5798 5799 5800 5801 5802
  result = shared_bitmap_lookup (finished_solution);
  if (!result)
    {
      shared_bitmap_add (finished_solution);
      pt->vars = finished_solution;
    }
  else
    {
      pt->vars = result;
      bitmap_clear (finished_solution);
    }
}

5803
/* Given a pointer variable P, fill in its points-to set.  */
5804 5805

static void
5806
find_what_p_points_to (tree p)
5807 5808
{
  struct ptr_info_def *pi;
5809
  tree lookup_p = p;
5810
  varinfo_t vi;
5811

5812 5813
  /* For parameters, get at the points-to set for the actual parm
     decl.  */
5814
  if (TREE_CODE (p) == SSA_NAME
5815 5816
      && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
	  || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
Diego Novillo committed
5817
      && SSA_NAME_IS_DEFAULT_DEF (p))
5818 5819
    lookup_p = SSA_NAME_VAR (p);

5820
  vi = lookup_vi_for_tree (lookup_p);
5821 5822 5823 5824
  if (!vi)
    return;

  pi = get_ptr_info (p);
5825
  find_what_var_points_to (vi, &pi->pt);
5826
}
5827

5828

5829
/* Query statistics for points-to solutions.  */
5830

5831 5832 5833 5834 5835 5836
static struct {
  unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
  unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
  unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
  unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
} pta_stats;
5837

5838 5839 5840 5841 5842 5843 5844 5845 5846 5847 5848 5849 5850 5851 5852 5853 5854
void
dump_pta_stats (FILE *s)
{
  fprintf (s, "\nPTA query stats:\n");
  fprintf (s, "  pt_solution_includes: "
	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
	   HOST_WIDE_INT_PRINT_DEC" queries\n",
	   pta_stats.pt_solution_includes_no_alias,
	   pta_stats.pt_solution_includes_no_alias
	   + pta_stats.pt_solution_includes_may_alias);
  fprintf (s, "  pt_solutions_intersect: "
	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
	   HOST_WIDE_INT_PRINT_DEC" queries\n",
	   pta_stats.pt_solutions_intersect_no_alias,
	   pta_stats.pt_solutions_intersect_no_alias
	   + pta_stats.pt_solutions_intersect_may_alias);
}
5855

5856

5857 5858
/* Reset the points-to solution *PT to a conservative default
   (point to anything).  */
5859

5860 5861 5862 5863 5864 5865
void
pt_solution_reset (struct pt_solution *pt)
{
  memset (pt, 0, sizeof (struct pt_solution));
  pt->anything = true;
}
5866

5867
/* Set the points-to solution *PT to point only to the variables
5868 5869 5870
   in VARS.  VARS_CONTAINS_GLOBAL specifies whether that contains
   global variables and VARS_CONTAINS_RESTRICT specifies whether
   it contains restrict tag variables.  */
5871 5872

void
5873 5874
pt_solution_set (struct pt_solution *pt, bitmap vars,
		 bool vars_contains_global, bool vars_contains_restrict)
5875 5876 5877
{
  memset (pt, 0, sizeof (struct pt_solution));
  pt->vars = vars;
5878 5879 5880 5881
  pt->vars_contains_global = vars_contains_global;
  pt->vars_contains_restrict = vars_contains_restrict;
}

5882 5883 5884 5885 5886 5887 5888
/* Set the points-to solution *PT to point only to the variable VAR.  */

void
pt_solution_set_var (struct pt_solution *pt, tree var)
{
  memset (pt, 0, sizeof (struct pt_solution));
  pt->vars = BITMAP_GGC_ALLOC ();
5889
  bitmap_set_bit (pt->vars, DECL_PT_UID (var));
5890 5891 5892
  pt->vars_contains_global = is_global_var (var);
}

5893 5894 5895 5896 5897 5898 5899 5900 5901 5902 5903
/* Computes the union of the points-to solutions *DEST and *SRC and
   stores the result in *DEST.  This changes the points-to bitmap
   of *DEST and thus may not be used if that might be shared.
   The points-to bitmap of *SRC and *DEST will not be shared after
   this function if they were not before.  */

static void
pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
{
  dest->anything |= src->anything;
  if (dest->anything)
5904
    {
5905 5906
      pt_solution_reset (dest);
      return;
5907
    }
5908 5909 5910 5911 5912 5913 5914 5915 5916 5917 5918 5919 5920

  dest->nonlocal |= src->nonlocal;
  dest->escaped |= src->escaped;
  dest->ipa_escaped |= src->ipa_escaped;
  dest->null |= src->null;
  dest->vars_contains_global |= src->vars_contains_global;
  dest->vars_contains_restrict |= src->vars_contains_restrict;
  if (!src->vars)
    return;

  if (!dest->vars)
    dest->vars = BITMAP_GGC_ALLOC ();
  bitmap_ior_into (dest->vars, src->vars);
5921 5922
}

5923
/* Return true if the points-to solution *PT is empty.  */
5924

5925
bool
5926 5927 5928 5929 5930
pt_solution_empty_p (struct pt_solution *pt)
{
  if (pt->anything
      || pt->nonlocal)
    return false;
5931

5932 5933 5934
  if (pt->vars
      && !bitmap_empty_p (pt->vars))
    return false;
5935

5936 5937 5938 5939 5940
  /* If the solution includes ESCAPED, check if that is empty.  */
  if (pt->escaped
      && !pt_solution_empty_p (&cfun->gimple_df->escaped))
    return false;

5941 5942 5943 5944 5945
  /* If the solution includes ESCAPED, check if that is empty.  */
  if (pt->ipa_escaped
      && !pt_solution_empty_p (&ipa_escaped_pt))
    return false;

5946
  return true;
5947 5948
}

5949
/* Return true if the points-to solution *PT includes global memory.  */
5950

5951
bool
5952
pt_solution_includes_global (struct pt_solution *pt)
5953
{
5954 5955 5956 5957
  if (pt->anything
      || pt->nonlocal
      || pt->vars_contains_global)
    return true;
5958

5959 5960
  if (pt->escaped)
    return pt_solution_includes_global (&cfun->gimple_df->escaped);
5961

5962 5963 5964 5965 5966 5967 5968 5969 5970
  if (pt->ipa_escaped)
    return pt_solution_includes_global (&ipa_escaped_pt);

  /* ???  This predicate is not correct for the IPA-PTA solution
     as we do not properly distinguish between unit escape points
     and global variables.  */
  if (cfun->gimple_df->ipa_pta)
    return true;

5971 5972
  return false;
}
5973

5974 5975
/* Return true if the points-to solution *PT includes the variable
   declaration DECL.  */
5976

5977 5978 5979 5980 5981
static bool
pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
{
  if (pt->anything)
    return true;
5982

5983 5984 5985
  if (pt->nonlocal
      && is_global_var (decl))
    return true;
5986

5987
  if (pt->vars
5988
      && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
5989
    return true;
5990

5991 5992 5993 5994
  /* If the solution includes ESCAPED, check it.  */
  if (pt->escaped
      && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
    return true;
5995

5996 5997 5998 5999 6000
  /* If the solution includes ESCAPED, check it.  */
  if (pt->ipa_escaped
      && pt_solution_includes_1 (&ipa_escaped_pt, decl))
    return true;

6001
  return false;
6002
}
6003

6004 6005
bool
pt_solution_includes (struct pt_solution *pt, const_tree decl)
6006
{
6007 6008 6009 6010 6011 6012 6013
  bool res = pt_solution_includes_1 (pt, decl);
  if (res)
    ++pta_stats.pt_solution_includes_may_alias;
  else
    ++pta_stats.pt_solution_includes_no_alias;
  return res;
}
6014

6015 6016
/* Return true if both points-to solutions PT1 and PT2 have a non-empty
   intersection.  */
6017

6018 6019 6020 6021 6022
static bool
pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
{
  if (pt1->anything || pt2->anything)
    return true;
6023

6024 6025 6026 6027 6028 6029 6030 6031
  /* If either points to unknown global memory and the other points to
     any global memory they alias.  */
  if ((pt1->nonlocal
       && (pt2->nonlocal
	   || pt2->vars_contains_global))
      || (pt2->nonlocal
	  && pt1->vars_contains_global))
    return true;
6032

6033 6034 6035 6036 6037 6038 6039 6040
  /* Check the escaped solution if required.  */
  if ((pt1->escaped || pt2->escaped)
      && !pt_solution_empty_p (&cfun->gimple_df->escaped))
    {
      /* If both point to escaped memory and that solution
	 is not empty they alias.  */
      if (pt1->escaped && pt2->escaped)
	return true;
6041

6042 6043 6044 6045 6046 6047 6048
      /* If either points to escaped memory see if the escaped solution
	 intersects with the other.  */
      if ((pt1->escaped
	   && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
	  || (pt2->escaped
	      && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
	return true;
6049 6050
    }

6051 6052 6053 6054 6055 6056 6057 6058 6059 6060 6061 6062 6063 6064 6065 6066 6067 6068 6069
  /* Check the escaped solution if required.
     ???  Do we need to check the local against the IPA escaped sets?  */
  if ((pt1->ipa_escaped || pt2->ipa_escaped)
      && !pt_solution_empty_p (&ipa_escaped_pt))
    {
      /* If both point to escaped memory and that solution
	 is not empty they alias.  */
      if (pt1->ipa_escaped && pt2->ipa_escaped)
	return true;

      /* If either points to escaped memory see if the escaped solution
	 intersects with the other.  */
      if ((pt1->ipa_escaped
	   && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
	  || (pt2->ipa_escaped
	      && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
	return true;
    }

6070 6071 6072 6073 6074 6075 6076 6077 6078 6079 6080 6081 6082 6083 6084
  /* Now both pointers alias if their points-to solution intersects.  */
  return (pt1->vars
	  && pt2->vars
	  && bitmap_intersect_p (pt1->vars, pt2->vars));
}

bool
pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
{
  bool res = pt_solutions_intersect_1 (pt1, pt2);
  if (res)
    ++pta_stats.pt_solutions_intersect_may_alias;
  else
    ++pta_stats.pt_solutions_intersect_no_alias;
  return res;
6085 6086
}

6087 6088 6089 6090 6091 6092 6093 6094 6095 6096 6097 6098 6099 6100 6101 6102 6103 6104 6105 6106 6107
/* Return true if both points-to solutions PT1 and PT2 for two restrict
   qualified pointers are possibly based on the same pointer.  */

bool
pt_solutions_same_restrict_base (struct pt_solution *pt1,
				 struct pt_solution *pt2)
{
  /* If we deal with points-to solutions of two restrict qualified
     pointers solely rely on the pointed-to variable bitmap intersection.
     For two pointers that are based on each other the bitmaps will
     intersect.  */
  if (pt1->vars_contains_restrict
      && pt2->vars_contains_restrict)
    {
      gcc_assert (pt1->vars && pt2->vars);
      return bitmap_intersect_p (pt1->vars, pt2->vars);
    }

  return true;
}

6108

6109 6110
/* Dump points-to information to OUTFILE.  */

6111
static void
6112 6113 6114
dump_sa_points_to_info (FILE *outfile)
{
  unsigned int i;
6115

6116
  fprintf (outfile, "\nPoints-to sets\n\n");
6117

6118 6119 6120
  if (dump_flags & TDF_STATS)
    {
      fprintf (outfile, "Stats:\n");
6121
      fprintf (outfile, "Total vars:               %d\n", stats.total_vars);
6122 6123
      fprintf (outfile, "Non-pointer vars:          %d\n",
	       stats.nonpointer_vars);
6124 6125 6126 6127 6128
      fprintf (outfile, "Statically unified vars:  %d\n",
	       stats.unified_vars_static);
      fprintf (outfile, "Dynamically unified vars: %d\n",
	       stats.unified_vars_dynamic);
      fprintf (outfile, "Iterations:               %d\n", stats.iterations);
6129
      fprintf (outfile, "Number of edges:          %d\n", stats.num_edges);
6130 6131
      fprintf (outfile, "Number of implicit edges: %d\n",
	       stats.num_implicit_edges);
6132
    }
6133

6134
  for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
6135 6136 6137
    {
      varinfo_t vi = get_varinfo (i);
      if (!vi->may_have_pointers)
6138
	continue;
6139 6140
      dump_solution_for_var (outfile, i);
    }
6141 6142 6143
}


6144 6145
/* Debug points-to information to stderr.  */

6146
DEBUG_FUNCTION void
6147 6148 6149 6150 6151 6152
debug_sa_points_to_info (void)
{
  dump_sa_points_to_info (stderr);
}


6153 6154 6155 6156 6157 6158 6159
/* Initialize the always-existing constraint variables for NULL
   ANYTHING, READONLY, and INTEGER */

static void
init_base_vars (void)
{
  struct constraint_expr lhs, rhs;
6160 6161 6162 6163 6164 6165 6166
  varinfo_t var_anything;
  varinfo_t var_nothing;
  varinfo_t var_readonly;
  varinfo_t var_escaped;
  varinfo_t var_nonlocal;
  varinfo_t var_storedanything;
  varinfo_t var_integer;
6167 6168 6169

  /* Create the NULL variable, used to represent that a variable points
     to NULL.  */
6170 6171
  var_nothing = new_var_info (NULL_TREE, "NULL");
  gcc_assert (var_nothing->id == nothing_id);
6172 6173 6174 6175
  var_nothing->is_artificial_var = 1;
  var_nothing->offset = 0;
  var_nothing->size = ~0;
  var_nothing->fullsize = ~0;
6176
  var_nothing->is_special_var = 1;
6177 6178
  var_nothing->may_have_pointers = 0;
  var_nothing->is_global_var = 0;
6179 6180 6181

  /* Create the ANYTHING variable, used to represent that a variable
     points to some unknown piece of memory.  */
6182 6183
  var_anything = new_var_info (NULL_TREE, "ANYTHING");
  gcc_assert (var_anything->id == anything_id);
6184 6185 6186 6187 6188
  var_anything->is_artificial_var = 1;
  var_anything->size = ~0;
  var_anything->offset = 0;
  var_anything->next = NULL;
  var_anything->fullsize = ~0;
6189
  var_anything->is_special_var = 1;
6190 6191

  /* Anything points to anything.  This makes deref constraints just
6192
     work in the presence of linked list and other p = *p type loops,
6193 6194 6195 6196
     by saying that *ANYTHING = ANYTHING. */
  lhs.type = SCALAR;
  lhs.var = anything_id;
  lhs.offset = 0;
6197
  rhs.type = ADDRESSOF;
6198 6199
  rhs.var = anything_id;
  rhs.offset = 0;
6200

6201 6202 6203
  /* This specifically does not use process_constraint because
     process_constraint ignores all anything = anything constraints, since all
     but this one are redundant.  */
6204
  VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
6205

6206 6207
  /* Create the READONLY variable, used to represent that a variable
     points to readonly memory.  */
6208 6209
  var_readonly = new_var_info (NULL_TREE, "READONLY");
  gcc_assert (var_readonly->id == readonly_id);
6210 6211 6212 6213 6214
  var_readonly->is_artificial_var = 1;
  var_readonly->offset = 0;
  var_readonly->size = ~0;
  var_readonly->fullsize = ~0;
  var_readonly->next = NULL;
6215
  var_readonly->is_special_var = 1;
6216 6217 6218 6219

  /* readonly memory points to anything, in order to make deref
     easier.  In reality, it points to anything the particular
     readonly variable can point to, but we don't track this
6220
     separately. */
6221 6222 6223
  lhs.type = SCALAR;
  lhs.var = readonly_id;
  lhs.offset = 0;
6224
  rhs.type = ADDRESSOF;
6225
  rhs.var = readonly_id;  /* FIXME */
6226
  rhs.offset = 0;
6227
  process_constraint (new_constraint (lhs, rhs));
6228

6229 6230
  /* Create the ESCAPED variable, used to represent the set of escaped
     memory.  */
6231 6232
  var_escaped = new_var_info (NULL_TREE, "ESCAPED");
  gcc_assert (var_escaped->id == escaped_id);
6233 6234 6235 6236 6237 6238 6239 6240
  var_escaped->is_artificial_var = 1;
  var_escaped->offset = 0;
  var_escaped->size = ~0;
  var_escaped->fullsize = ~0;
  var_escaped->is_special_var = 0;

  /* Create the NONLOCAL variable, used to represent the set of nonlocal
     memory.  */
6241 6242
  var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
  gcc_assert (var_nonlocal->id == nonlocal_id);
6243 6244 6245 6246 6247 6248
  var_nonlocal->is_artificial_var = 1;
  var_nonlocal->offset = 0;
  var_nonlocal->size = ~0;
  var_nonlocal->fullsize = ~0;
  var_nonlocal->is_special_var = 1;

6249 6250 6251 6252 6253 6254 6255 6256 6257 6258 6259 6260 6261 6262 6263 6264 6265 6266 6267 6268 6269 6270 6271 6272 6273 6274 6275 6276 6277 6278 6279 6280
  /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc.  */
  lhs.type = SCALAR;
  lhs.var = escaped_id;
  lhs.offset = 0;
  rhs.type = DEREF;
  rhs.var = escaped_id;
  rhs.offset = 0;
  process_constraint (new_constraint (lhs, rhs));

  /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
     whole variable escapes.  */
  lhs.type = SCALAR;
  lhs.var = escaped_id;
  lhs.offset = 0;
  rhs.type = SCALAR;
  rhs.var = escaped_id;
  rhs.offset = UNKNOWN_OFFSET;
  process_constraint (new_constraint (lhs, rhs));

  /* *ESCAPED = NONLOCAL.  This is true because we have to assume
     everything pointed to by escaped points to what global memory can
     point to.  */
  lhs.type = DEREF;
  lhs.var = escaped_id;
  lhs.offset = 0;
  rhs.type = SCALAR;
  rhs.var = nonlocal_id;
  rhs.offset = 0;
  process_constraint (new_constraint (lhs, rhs));

  /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED.  This is true because
     global memory may point to global memory and escaped memory.  */
6281 6282 6283 6284
  lhs.type = SCALAR;
  lhs.var = nonlocal_id;
  lhs.offset = 0;
  rhs.type = ADDRESSOF;
6285 6286 6287 6288
  rhs.var = nonlocal_id;
  rhs.offset = 0;
  process_constraint (new_constraint (lhs, rhs));
  rhs.type = ADDRESSOF;
6289 6290
  rhs.var = escaped_id;
  rhs.offset = 0;
6291
  process_constraint (new_constraint (lhs, rhs));
6292

6293 6294
  /* Create the STOREDANYTHING variable, used to represent the set of
     variables stored to *ANYTHING.  */
6295 6296
  var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
  gcc_assert (var_storedanything->id == storedanything_id);
6297 6298 6299 6300 6301 6302
  var_storedanything->is_artificial_var = 1;
  var_storedanything->offset = 0;
  var_storedanything->size = ~0;
  var_storedanything->fullsize = ~0;
  var_storedanything->is_special_var = 0;

6303
  /* Create the INTEGER variable, used to represent that a variable points
6304
     to what an INTEGER "points to".  */
6305 6306
  var_integer = new_var_info (NULL_TREE, "INTEGER");
  gcc_assert (var_integer->id == integer_id);
6307 6308 6309 6310 6311
  var_integer->is_artificial_var = 1;
  var_integer->size = ~0;
  var_integer->fullsize = ~0;
  var_integer->offset = 0;
  var_integer->next = NULL;
6312
  var_integer->is_special_var = 1;
6313

6314 6315
  /* INTEGER = ANYTHING, because we don't know where a dereference of
     a random integer will point to.  */
6316 6317 6318
  lhs.type = SCALAR;
  lhs.var = integer_id;
  lhs.offset = 0;
6319
  rhs.type = ADDRESSOF;
6320 6321 6322
  rhs.var = anything_id;
  rhs.offset = 0;
  process_constraint (new_constraint (lhs, rhs));
6323
}
6324

6325
/* Initialize things necessary to perform PTA */
6326

6327 6328
static void
init_alias_vars (void)
6329
{
6330 6331
  use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);

6332 6333
  bitmap_obstack_initialize (&pta_obstack);
  bitmap_obstack_initialize (&oldpta_obstack);
6334
  bitmap_obstack_initialize (&predbitmap_obstack);
6335

6336
  constraint_pool = create_alloc_pool ("Constraint pool",
6337 6338 6339
				       sizeof (struct constraint), 30);
  variable_info_pool = create_alloc_pool ("Variable info pool",
					  sizeof (struct variable_info), 30);
6340 6341
  constraints = VEC_alloc (constraint_t, heap, 8);
  varmap = VEC_alloc (varinfo_t, heap, 8);
6342
  vi_for_tree = pointer_map_create ();
6343
  call_stmt_vars = pointer_map_create ();
6344

6345
  memset (&stats, 0, sizeof (stats));
6346 6347
  shared_bitmap_table = htab_create (511, shared_bitmap_hash,
				     shared_bitmap_eq, free);
6348
  init_base_vars ();
6349 6350
}

6351 6352 6353 6354 6355 6356 6357 6358 6359 6360 6361 6362 6363 6364 6365 6366 6367 6368 6369 6370 6371 6372 6373 6374 6375 6376 6377
/* Remove the REF and ADDRESS edges from GRAPH, as well as all the
   predecessor edges.  */

static void
remove_preds_and_fake_succs (constraint_graph_t graph)
{
  unsigned int i;

  /* Clear the implicit ref and address nodes from the successor
     lists.  */
  for (i = 0; i < FIRST_REF_NODE; i++)
    {
      if (graph->succs[i])
	bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
			    FIRST_REF_NODE * 2);
    }

  /* Free the successor list for the non-ref nodes.  */
  for (i = FIRST_REF_NODE; i < graph->size; i++)
    {
      if (graph->succs[i])
	BITMAP_FREE (graph->succs[i]);
    }

  /* Now reallocate the size of the successor list as, and blow away
     the predecessor bitmaps.  */
  graph->size = VEC_length (varinfo_t, varmap);
6378
  graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6379 6380 6381 6382 6383 6384 6385 6386

  free (graph->implicit_preds);
  graph->implicit_preds = NULL;
  free (graph->preds);
  graph->preds = NULL;
  bitmap_obstack_release (&predbitmap_obstack);
}

6387 6388 6389 6390 6391 6392
/* Initialize the heapvar for statement mapping.  */

static void
init_alias_heapvars (void)
{
  if (!heapvar_for_stmt)
6393
    heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
6394 6395 6396 6397 6398 6399 6400 6401 6402 6403 6404 6405 6406
					NULL);
}

/* Delete the heapvar for statement mapping.  */

void
delete_alias_heapvars (void)
{
  if (heapvar_for_stmt)
    htab_delete (heapvar_for_stmt);
  heapvar_for_stmt = NULL;
}

6407
/* Solve the constraint set.  */
6408

6409
static void
6410
solve_constraints (void)
6411
{
6412
  struct scc_info *si;
6413 6414

  if (dump_file)
6415 6416
    fprintf (dump_file,
	     "\nCollapsing static cycles and doing variable "
6417 6418 6419
	     "substitution\n");

  init_graph (VEC_length (varinfo_t, varmap) * 2);
H.J. Lu committed
6420

6421 6422
  if (dump_file)
    fprintf (dump_file, "Building predecessor graph\n");
6423
  build_pred_graph ();
H.J. Lu committed
6424

6425 6426 6427
  if (dump_file)
    fprintf (dump_file, "Detecting pointer and location "
	     "equivalences\n");
6428
  si = perform_var_substitution (graph);
H.J. Lu committed
6429

6430 6431 6432 6433
  if (dump_file)
    fprintf (dump_file, "Rewriting constraints and unifying "
	     "variables\n");
  rewrite_constraints (graph, si);
6434 6435

  build_succ_graph ();
6436
  free_var_substitution_info (si);
6437 6438 6439 6440

  if (dump_file && (dump_flags & TDF_GRAPH))
    dump_constraint_graph (dump_file);

6441 6442 6443 6444 6445 6446 6447 6448 6449
  move_complex_constraints (graph);

  if (dump_file)
    fprintf (dump_file, "Uniting pointer but not location equivalent "
	     "variables\n");
  unite_pointer_equivalences (graph);

  if (dump_file)
    fprintf (dump_file, "Finding indirect cycles\n");
6450
  find_indirect_cycles (graph);
6451

6452 6453 6454
  /* Implicit nodes and predecessors are no longer necessary at this
     point. */
  remove_preds_and_fake_succs (graph);
6455

6456
  if (dump_file)
6457
    fprintf (dump_file, "Solving graph\n");
6458

6459
  solve_graph (graph);
6460

6461 6462
  if (dump_file)
    dump_sa_points_to_info (dump_file);
6463 6464 6465 6466 6467 6468 6469 6470 6471 6472 6473 6474 6475 6476 6477 6478 6479 6480 6481
}

/* Create points-to sets for the current function.  See the comments
   at the start of the file for an algorithmic overview.  */

static void
compute_points_to_sets (void)
{
  basic_block bb;
  unsigned i;
  varinfo_t vi;

  timevar_push (TV_TREE_PTA);

  init_alias_vars ();
  init_alias_heapvars ();

  intra_create_variable_infos ();

6482
  /* Now walk all statements and build the constraint set.  */
6483 6484 6485 6486 6487 6488 6489 6490 6491 6492 6493 6494 6495 6496 6497 6498 6499 6500 6501 6502
  FOR_EACH_BB (bb)
    {
      gimple_stmt_iterator gsi;

      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple phi = gsi_stmt (gsi);

	  if (is_gimple_reg (gimple_phi_result (phi)))
	    find_func_aliases (phi);
	}

      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);

	  find_func_aliases (stmt);
	}
    }

6503 6504 6505 6506 6507 6508
  if (dump_file)
    {
      fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
      dump_constraints (dump_file, 0);
    }

6509 6510
  /* From the constraints compute the points-to sets.  */
  solve_constraints ();
6511

6512
  /* Compute the points-to set for ESCAPED used for call-clobber analysis.  */
6513 6514
  find_what_var_points_to (get_varinfo (escaped_id),
			   &cfun->gimple_df->escaped);
6515 6516 6517 6518 6519 6520

  /* Make sure the ESCAPED solution (which is used as placeholder in
     other solutions) does not reference itself.  This simplifies
     points-to solution queries.  */
  cfun->gimple_df->escaped.escaped = 0;

6521
  /* Mark escaped HEAP variables as global.  */
6522
  FOR_EACH_VEC_ELT (varinfo_t, varmap, i, vi)
6523
    if (vi->is_heap_var
6524
	&& !vi->is_restrict_var
6525
	&& !vi->is_global_var)
6526 6527
      DECL_EXTERNAL (vi->decl) = vi->is_global_var
	= pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6528

6529 6530 6531 6532 6533 6534
  /* Compute the points-to sets for pointer SSA_NAMEs.  */
  for (i = 0; i < num_ssa_names; ++i)
    {
      tree ptr = ssa_name (i);
      if (ptr
	  && POINTER_TYPE_P (TREE_TYPE (ptr)))
6535
	find_what_p_points_to (ptr);
6536
    }
6537

6538 6539 6540 6541 6542 6543 6544 6545 6546 6547 6548 6549 6550 6551 6552
  /* Compute the call-used/clobbered sets.  */
  FOR_EACH_BB (bb)
    {
      gimple_stmt_iterator gsi;

      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  struct pt_solution *pt;
	  if (!is_gimple_call (stmt))
	    continue;

	  pt = gimple_call_use_set (stmt);
	  if (gimple_call_flags (stmt) & ECF_CONST)
	    memset (pt, 0, sizeof (struct pt_solution));
6553
	  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6554
	    {
6555 6556 6557
	      find_what_var_points_to (vi, pt);
	      /* Escaped (and thus nonlocal) variables are always
	         implicitly used by calls.  */
6558 6559 6560 6561 6562 6563 6564
	      /* ???  ESCAPED can be empty even though NONLOCAL
		 always escaped.  */
	      pt->nonlocal = 1;
	      pt->escaped = 1;
	    }
	  else
	    {
6565 6566
	      /* If there is nothing special about this call then
		 we have made everything that is used also escape.  */
6567 6568 6569 6570 6571 6572 6573
	      *pt = cfun->gimple_df->escaped;
	      pt->nonlocal = 1;
	    }

	  pt = gimple_call_clobber_set (stmt);
	  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
	    memset (pt, 0, sizeof (struct pt_solution));
6574 6575 6576 6577 6578 6579 6580 6581 6582 6583
	  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
	    {
	      find_what_var_points_to (vi, pt);
	      /* Escaped (and thus nonlocal) variables are always
	         implicitly clobbered by calls.  */
	      /* ???  ESCAPED can be empty even though NONLOCAL
		 always escaped.  */
	      pt->nonlocal = 1;
	      pt->escaped = 1;
	    }
6584 6585
	  else
	    {
6586 6587
	      /* If there is nothing special about this call then
		 we have made everything that is used also escape.  */
6588 6589 6590 6591 6592 6593
	      *pt = cfun->gimple_df->escaped;
	      pt->nonlocal = 1;
	    }
	}
    }

6594
  timevar_pop (TV_TREE_PTA);
6595 6596 6597 6598 6599
}


/* Delete created points-to sets.  */

6600
static void
6601
delete_points_to_sets (void)
6602
{
6603
  unsigned int i;
6604

6605
  htab_delete (shared_bitmap_table);
6606 6607 6608 6609
  if (dump_file && (dump_flags & TDF_STATS))
    fprintf (dump_file, "Points to sets created:%d\n",
	     stats.points_to_sets_created);

6610
  pointer_map_destroy (vi_for_tree);
6611
  pointer_map_destroy (call_stmt_vars);
6612
  bitmap_obstack_release (&pta_obstack);
6613
  VEC_free (constraint_t, heap, constraints);
6614

6615
  for (i = 0; i < graph->size; i++)
6616
    VEC_free (constraint_t, heap, graph->complex[i]);
6617
  free (graph->complex);
6618

6619
  free (graph->rep);
6620
  free (graph->succs);
6621 6622
  free (graph->pe);
  free (graph->pe_rep);
6623
  free (graph->indirect_cycles);
6624 6625 6626
  free (graph);

  VEC_free (varinfo_t, heap, varmap);
6627
  free_alloc_pool (variable_info_pool);
6628
  free_alloc_pool (constraint_pool);
6629
}
6630

6631 6632 6633 6634 6635 6636 6637 6638

/* Compute points-to information for every SSA_NAME pointer in the
   current function and compute the transitive closure of escaped
   variables to re-initialize the call-clobber states of local variables.  */

unsigned int
compute_may_aliases (void)
{
6639 6640 6641 6642 6643 6644 6645 6646 6647 6648 6649 6650 6651 6652 6653 6654 6655
  if (cfun->gimple_df->ipa_pta)
    {
      if (dump_file)
	{
	  fprintf (dump_file, "\nNot re-computing points-to information "
		   "because IPA points-to information is available.\n\n");

	  /* But still dump what we have remaining it.  */
	  dump_alias_info (dump_file);

	  if (dump_flags & TDF_DETAILS)
	    dump_referenced_vars (dump_file);
	}

      return 0;
    }

6656 6657 6658 6659 6660 6661 6662 6663 6664 6665 6666 6667 6668 6669 6670 6671 6672 6673 6674 6675 6676 6677 6678
  /* For each pointer P_i, determine the sets of variables that P_i may
     point-to.  Compute the reachability set of escaped and call-used
     variables.  */
  compute_points_to_sets ();

  /* Debugging dumps.  */
  if (dump_file)
    {
      dump_alias_info (dump_file);

      if (dump_flags & TDF_DETAILS)
	dump_referenced_vars (dump_file);
    }

  /* Deallocate memory used by aliasing data structures and the internal
     points-to solution.  */
  delete_points_to_sets ();

  gcc_assert (!need_ssa_update_p (cfun));

  return 0;
}

6679 6680 6681 6682 6683
static bool
gate_tree_pta (void)
{
  return flag_tree_pta;
}
6684 6685 6686 6687 6688 6689 6690 6691 6692

/* A dummy pass to cause points-to information to be computed via
   TODO_rebuild_alias.  */

struct gimple_opt_pass pass_build_alias =
{
 {
  GIMPLE_PASS,
  "alias",		    /* name */
6693
  gate_tree_pta,	    /* gate */
6694 6695 6696 6697
  NULL,                     /* execute */
  NULL,                     /* sub */
  NULL,                     /* next */
  0,                        /* static_pass_number */
6698
  TV_NONE,                  /* tv_id */
6699
  PROP_cfg | PROP_ssa,      /* properties_required */
6700
  0,			    /* properties_provided */
6701 6702 6703 6704 6705 6706
  0,                        /* properties_destroyed */
  0,                        /* todo_flags_start */
  TODO_rebuild_alias | TODO_dump_func  /* todo_flags_finish */
 }
};

6707 6708 6709 6710 6711 6712 6713 6714 6715 6716 6717 6718 6719 6720 6721 6722 6723 6724 6725 6726 6727 6728
/* A dummy pass to cause points-to information to be computed via
   TODO_rebuild_alias.  */

struct gimple_opt_pass pass_build_ealias =
{
 {
  GIMPLE_PASS,
  "ealias",		    /* name */
  gate_tree_pta,	    /* gate */
  NULL,                     /* execute */
  NULL,                     /* sub */
  NULL,                     /* next */
  0,                        /* static_pass_number */
  TV_NONE,                  /* tv_id */
  PROP_cfg | PROP_ssa,      /* properties_required */
  0,			    /* properties_provided */
  0,                        /* properties_destroyed */
  0,                        /* todo_flags_start */
  TODO_rebuild_alias | TODO_dump_func  /* todo_flags_finish */
 }
};

6729

6730 6731 6732 6733
/* Return true if we should execute IPA PTA.  */
static bool
gate_ipa_pta (void)
{
6734 6735
  return (optimize
	  && flag_ipa_pta
6736
	  /* Don't bother doing anything if the program has errors.  */
Joseph Myers committed
6737
	  && !seen_error ());
6738 6739
}

6740 6741 6742 6743
/* IPA PTA solutions for ESCAPED.  */
struct pt_solution ipa_escaped_pt
  = { true, false, false, false, false, false, false, NULL };

6744
/* Execute the driver for IPA PTA.  */
6745
static unsigned int
6746 6747 6748
ipa_pta_execute (void)
{
  struct cgraph_node *node;
6749 6750
  struct varpool_node *var;
  int from;
6751

6752
  in_ipa_mode = 1;
6753

6754
  init_alias_heapvars ();
6755
  init_alias_vars ();
6756

6757
  /* Build the constraints.  */
6758 6759
  for (node = cgraph_nodes; node; node = node->next)
    {
6760 6761 6762
      struct cgraph_node *alias;
      varinfo_t vi;

6763 6764 6765 6766 6767 6768 6769
      /* Nodes without a body are not interesting.  Especially do not
         visit clones at this point for now - we get duplicate decls
	 there for inline clones at least.  */
      if (!gimple_has_body_p (node->decl)
	  || node->clone_of)
	continue;

6770 6771 6772 6773 6774 6775
      vi = create_function_info_for (node->decl,
				     alias_get_name (node->decl));

      /* Associate the varinfo node with all aliases.  */
      for (alias = node->same_body; alias; alias = alias->next)
	insert_vi_for_tree (alias->decl, vi);
6776
    }
6777

6778 6779
  /* Create constraints for global variables and their initializers.  */
  for (var = varpool_nodes; var; var = var->next)
6780 6781 6782 6783 6784 6785 6786 6787 6788 6789
    {
      struct varpool_node *alias;
      varinfo_t vi;

      vi = get_vi_for_tree (var->decl);

      /* Associate the varinfo node with all aliases.  */
      for (alias = var->extra_name; alias; alias = alias->next)
	insert_vi_for_tree (alias->decl, vi);
    }
6790 6791 6792 6793 6794 6795 6796 6797 6798 6799

  if (dump_file)
    {
      fprintf (dump_file,
	       "Generating constraints for global initializers\n\n");
      dump_constraints (dump_file, 0);
      fprintf (dump_file, "\n");
    }
  from = VEC_length (constraint_t, constraints);

6800 6801
  for (node = cgraph_nodes; node; node = node->next)
    {
6802 6803 6804
      struct function *func;
      basic_block bb;
      tree old_func_decl;
6805

6806 6807 6808 6809
      /* Nodes without a body are not interesting.  */
      if (!gimple_has_body_p (node->decl)
	  || node->clone_of)
	continue;
6810

6811
      if (dump_file)
6812 6813 6814 6815 6816 6817 6818 6819
	{
	  fprintf (dump_file,
		   "Generating constraints for %s", cgraph_node_name (node));
	  if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
	    fprintf (dump_file, " (%s)",
		     IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
	  fprintf (dump_file, "\n");
	}
6820

6821 6822 6823 6824
      func = DECL_STRUCT_FUNCTION (node->decl);
      old_func_decl = current_function_decl;
      push_cfun (func);
      current_function_decl = node->decl;
6825

6826 6827 6828 6829 6830
      /* For externally visible functions use local constraints for
	 their arguments.  For local functions we see all callers
	 and thus do not need initial constraints for parameters.  */
      if (node->local.externally_visible)
	intra_create_variable_infos ();
6831

6832 6833 6834 6835
      /* Build constriants for the function body.  */
      FOR_EACH_BB_FN (bb, func)
	{
	  gimple_stmt_iterator gsi;
6836

6837 6838 6839 6840
	  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
	       gsi_next (&gsi))
	    {
	      gimple phi = gsi_stmt (gsi);
6841

6842 6843 6844
	      if (is_gimple_reg (gimple_phi_result (phi)))
		find_func_aliases (phi);
	    }
6845

6846 6847 6848
	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	    {
	      gimple stmt = gsi_stmt (gsi);
6849

6850
	      find_func_aliases (stmt);
6851
	      find_func_clobbers (stmt);
6852 6853
	    }
	}
6854

6855 6856
      current_function_decl = old_func_decl;
      pop_cfun ();
6857 6858 6859 6860 6861 6862 6863 6864

      if (dump_file)
	{
	  fprintf (dump_file, "\n");
	  dump_constraints (dump_file, from);
	  fprintf (dump_file, "\n");
	}
      from = VEC_length (constraint_t, constraints);
6865
    }
6866

6867 6868
  /* From the constraints compute the points-to sets.  */
  solve_constraints ();
6869

6870 6871 6872 6873 6874 6875 6876 6877 6878 6879 6880 6881 6882 6883 6884 6885 6886 6887 6888 6889 6890 6891 6892 6893 6894 6895 6896 6897 6898 6899
  /* Compute the global points-to sets for ESCAPED.
     ???  Note that the computed escape set is not correct
     for the whole unit as we fail to consider graph edges to
     externally visible functions.  */
  find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);

  /* Make sure the ESCAPED solution (which is used as placeholder in
     other solutions) does not reference itself.  This simplifies
     points-to solution queries.  */
  ipa_escaped_pt.ipa_escaped = 0;

  /* Assign the points-to sets to the SSA names in the unit.  */
  for (node = cgraph_nodes; node; node = node->next)
    {
      tree ptr;
      struct function *fn;
      unsigned i;
      varinfo_t fi;
      basic_block bb;
      struct pt_solution uses, clobbers;
      struct cgraph_edge *e;

      /* Nodes without a body are not interesting.  */
      if (!gimple_has_body_p (node->decl)
	  || node->clone_of)
	continue;

      fn = DECL_STRUCT_FUNCTION (node->decl);

      /* Compute the points-to sets for pointer SSA_NAMEs.  */
6900
      FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
6901 6902 6903 6904 6905 6906 6907 6908 6909 6910 6911 6912 6913 6914 6915 6916 6917 6918 6919 6920 6921 6922 6923 6924 6925 6926 6927 6928 6929 6930 6931 6932 6933 6934 6935 6936 6937 6938 6939 6940 6941 6942 6943 6944 6945 6946 6947 6948 6949 6950 6951 6952 6953 6954 6955 6956 6957 6958 6959 6960 6961 6962 6963 6964 6965 6966 6967 6968 6969 6970 6971 6972 6973 6974 6975 6976 6977 6978 6979 6980 6981 6982 6983 6984 6985 6986 6987 6988 6989 6990 6991 6992 6993 6994 6995 6996 6997 6998 6999 7000 7001 7002 7003 7004 7005 7006 7007 7008 7009 7010 7011 7012 7013 7014 7015 7016 7017 7018 7019 7020 7021 7022 7023 7024 7025 7026 7027 7028 7029 7030 7031 7032 7033 7034 7035 7036 7037 7038 7039 7040 7041 7042 7043 7044 7045 7046 7047 7048
	{
	  if (ptr
	      && POINTER_TYPE_P (TREE_TYPE (ptr)))
	    find_what_p_points_to (ptr);
	}

      /* Compute the call-use and call-clobber sets for all direct calls.  */
      fi = lookup_vi_for_tree (node->decl);
      gcc_assert (fi->is_fn_info);
      find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
			       &clobbers);
      find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
      for (e = node->callers; e; e = e->next_caller)
	{
	  if (!e->call_stmt)
	    continue;

	  *gimple_call_clobber_set (e->call_stmt) = clobbers;
	  *gimple_call_use_set (e->call_stmt) = uses;
	}

      /* Compute the call-use and call-clobber sets for indirect calls
	 and calls to external functions.  */
      FOR_EACH_BB_FN (bb, fn)
	{
	  gimple_stmt_iterator gsi;

	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	    {
	      gimple stmt = gsi_stmt (gsi);
	      struct pt_solution *pt;
	      varinfo_t vi;
	      tree decl;

	      if (!is_gimple_call (stmt))
		continue;

	      /* Handle direct calls to external functions.  */
	      decl = gimple_call_fndecl (stmt);
	      if (decl
		  && (!(fi = lookup_vi_for_tree (decl))
		      || !fi->is_fn_info))
		{
		  pt = gimple_call_use_set (stmt);
		  if (gimple_call_flags (stmt) & ECF_CONST)
		    memset (pt, 0, sizeof (struct pt_solution));
		  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
		    {
		      find_what_var_points_to (vi, pt);
		      /* Escaped (and thus nonlocal) variables are always
			 implicitly used by calls.  */
		      /* ???  ESCAPED can be empty even though NONLOCAL
			 always escaped.  */
		      pt->nonlocal = 1;
		      pt->ipa_escaped = 1;
		    }
		  else
		    {
		      /* If there is nothing special about this call then
			 we have made everything that is used also escape.  */
		      *pt = ipa_escaped_pt;
		      pt->nonlocal = 1;
		    }

		  pt = gimple_call_clobber_set (stmt);
		  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
		    memset (pt, 0, sizeof (struct pt_solution));
		  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
		    {
		      find_what_var_points_to (vi, pt);
		      /* Escaped (and thus nonlocal) variables are always
			 implicitly clobbered by calls.  */
		      /* ???  ESCAPED can be empty even though NONLOCAL
			 always escaped.  */
		      pt->nonlocal = 1;
		      pt->ipa_escaped = 1;
		    }
		  else
		    {
		      /* If there is nothing special about this call then
			 we have made everything that is used also escape.  */
		      *pt = ipa_escaped_pt;
		      pt->nonlocal = 1;
		    }
		}

	      /* Handle indirect calls.  */
	      if (!decl
		  && (fi = get_fi_for_callee (stmt)))
		{
		  /* We need to accumulate all clobbers/uses of all possible
		     callees.  */
		  fi = get_varinfo (find (fi->id));
		  /* If we cannot constrain the set of functions we'll end up
		     calling we end up using/clobbering everything.  */
		  if (bitmap_bit_p (fi->solution, anything_id)
		      || bitmap_bit_p (fi->solution, nonlocal_id)
		      || bitmap_bit_p (fi->solution, escaped_id))
		    {
		      pt_solution_reset (gimple_call_clobber_set (stmt));
		      pt_solution_reset (gimple_call_use_set (stmt));
		    }
		  else
		    {
		      bitmap_iterator bi;
		      unsigned i;
		      struct pt_solution *uses, *clobbers;

		      uses = gimple_call_use_set (stmt);
		      clobbers = gimple_call_clobber_set (stmt);
		      memset (uses, 0, sizeof (struct pt_solution));
		      memset (clobbers, 0, sizeof (struct pt_solution));
		      EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
			{
			  struct pt_solution sol;

			  vi = get_varinfo (i);
			  if (!vi->is_fn_info)
			    {
			      /* ???  We could be more precise here?  */
			      uses->nonlocal = 1;
			      uses->ipa_escaped = 1;
			      clobbers->nonlocal = 1;
			      clobbers->ipa_escaped = 1;
			      continue;
			    }

			  if (!uses->anything)
			    {
			      find_what_var_points_to
				  (first_vi_for_offset (vi, fi_uses), &sol);
			      pt_solution_ior_into (uses, &sol);
			    }
			  if (!clobbers->anything)
			    {
			      find_what_var_points_to
				  (first_vi_for_offset (vi, fi_clobbers), &sol);
			      pt_solution_ior_into (clobbers, &sol);
			    }
			}
		    }
		}
	    }
	}

      fn->gimple_df->ipa_pta = true;
    }

7049
  delete_points_to_sets ();
7050

7051
  in_ipa_mode = 0;
7052

7053
  return 0;
7054
}
7055

7056
struct simple_ipa_opt_pass pass_ipa_pta =
7057
{
7058 7059
 {
  SIMPLE_IPA_PASS,
7060 7061 7062 7063 7064 7065 7066 7067 7068 7069 7070
  "pta",		                /* name */
  gate_ipa_pta,			/* gate */
  ipa_pta_execute,			/* execute */
  NULL,					/* sub */
  NULL,					/* next */
  0,					/* static_pass_number */
  TV_IPA_PTA,		        /* tv_id */
  0,	                                /* properties_required */
  0,					/* properties_provided */
  0,					/* properties_destroyed */
  0,					/* todo_flags_start */
7071 7072
  TODO_update_ssa                       /* todo_flags_finish */
 }
7073 7074
};

7075 7076

#include "gt-tree-ssa-structalias.h"