tree-streamer.c 12.2 KB
Newer Older
Diego Novillo committed
1 2 3
/* Miscellaneous utilities for tree streaming.  Things that are used
   in both input and output are here.

4
   Copyright (C) 2011-2019 Free Software Foundation, Inc.
Diego Novillo committed
5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
   Contributed by Diego Novillo <dnovillo@google.com>

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */

#include "config.h"
#include "system.h"
#include "coretypes.h"
26
#include "backend.h"
27
#include "tree.h"
28
#include "gimple.h"
29
#include "tree-streamer.h"
30
#include "cgraph.h"
Diego Novillo committed
31

32 33 34 35 36 37 38 39
/* Table indexed by machine_mode, used for 2 different purposes.
   During streaming out we record there non-zero value for all modes
   that were streamed out.
   During streaming in, we translate the on the disk mode using this
   table.  For normal LTO it is set to identity, for ACCEL_COMPILER
   depending on the mode_table content.  */
unsigned char streamer_mode_table[1 << 8];

40 41
/* Check that all the TS_* structures handled by the streamer_write_* and
   streamer_read_* routines are exactly ALL the structures defined in
Diego Novillo committed
42 43 44
   treestruct.def.  */

void
45
streamer_check_handled_ts_structures (void)
Diego Novillo committed
46 47 48 49 50 51 52 53 54 55 56 57
{
  bool handled_p[LAST_TS_ENUM];
  unsigned i;

  memset (&handled_p, 0, sizeof (handled_p));

  /* These are the TS_* structures that are either handled or
     explicitly ignored by the streamer routines.  */
  handled_p[TS_BASE] = true;
  handled_p[TS_TYPED] = true;
  handled_p[TS_COMMON] = true;
  handled_p[TS_INT_CST] = true;
58
  handled_p[TS_POLY_INT_CST] = true;
Diego Novillo committed
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101
  handled_p[TS_REAL_CST] = true;
  handled_p[TS_FIXED_CST] = true;
  handled_p[TS_VECTOR] = true;
  handled_p[TS_STRING] = true;
  handled_p[TS_COMPLEX] = true;
  handled_p[TS_IDENTIFIER] = true;
  handled_p[TS_DECL_MINIMAL] = true;
  handled_p[TS_DECL_COMMON] = true;
  handled_p[TS_DECL_WRTL] = true;
  handled_p[TS_DECL_NON_COMMON] = true;
  handled_p[TS_DECL_WITH_VIS] = true;
  handled_p[TS_FIELD_DECL] = true;
  handled_p[TS_VAR_DECL] = true;
  handled_p[TS_PARM_DECL] = true;
  handled_p[TS_LABEL_DECL] = true;
  handled_p[TS_RESULT_DECL] = true;
  handled_p[TS_CONST_DECL] = true;
  handled_p[TS_TYPE_DECL] = true;
  handled_p[TS_FUNCTION_DECL] = true;
  handled_p[TS_TYPE_COMMON] = true;
  handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
  handled_p[TS_TYPE_NON_COMMON] = true;
  handled_p[TS_LIST] = true;
  handled_p[TS_VEC] = true;
  handled_p[TS_EXP] = true;
  handled_p[TS_SSA_NAME] = true;
  handled_p[TS_BLOCK] = true;
  handled_p[TS_BINFO] = true;
  handled_p[TS_STATEMENT_LIST] = true;
  handled_p[TS_CONSTRUCTOR] = true;
  handled_p[TS_OMP_CLAUSE] = true;
  handled_p[TS_OPTIMIZATION] = true;
  handled_p[TS_TARGET_OPTION] = true;
  handled_p[TS_TRANSLATION_UNIT_DECL] = true;

  /* Anything not marked above will trigger the following assertion.
     If this assertion triggers, it means that there is a new TS_*
     structure that should be handled by the streamer.  */
  for (i = 0; i < LAST_TS_ENUM; i++)
    gcc_assert (handled_p[i]);
}


102
/* Helper for streamer_tree_cache_insert_1.  Add T to CACHE->NODES at
Diego Novillo committed
103 104 105
   slot IX.  */

static void
106
streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
107
				       unsigned ix, tree t, hashval_t hash)
Diego Novillo committed
108
{
109 110
  /* We're either replacing an old element or appending consecutively.  */
  if (cache->nodes.exists ())
111
    {
112 113 114 115
      if (cache->nodes.length () == ix)
	cache->nodes.safe_push (t);
      else
	cache->nodes[ix] = t;
116
    }
117
  if (cache->hashes.exists ())
118
    {
119 120 121
      if (cache->hashes.length () == ix)
	cache->hashes.safe_push (hash);
      else
122 123
	cache->hashes[ix] = hash;
    }
Diego Novillo committed
124 125 126
}


127 128
/* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
   CACHE, T, and IX_P are as in streamer_tree_cache_insert.
Diego Novillo committed
129 130 131 132 133 134 135 136 137

   If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
   slot in the cache.  Otherwise, T is inserted at the position indicated
   in *IX_P.

   If T already existed in CACHE, return true.  Otherwise,
   return false.  */

static bool
138
streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
139
			      tree t, hashval_t hash, unsigned *ix_p,
140
			      bool insert_at_next_slot_p)
Diego Novillo committed
141 142 143 144 145
{
  bool existed_p;

  gcc_assert (t);

Trevor Saunders committed
146
  unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
147
  if (!existed_p)
Diego Novillo committed
148 149 150
    {
      /* Determine the next slot to use in the cache.  */
      if (insert_at_next_slot_p)
151
	ix = cache->next_idx++;
Diego Novillo committed
152 153 154
      else
	ix = *ix_p;

155
      streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
Diego Novillo committed
156 157 158 159 160 161 162 163 164
    }
  else
    {
      if (!insert_at_next_slot_p && ix != *ix_p)
	{
	  /* If the caller wants to insert T at a specific slot
	     location, and ENTRY->TO does not match *IX_P, add T to
	     the requested location slot.  */
	  ix = *ix_p;
165
	  streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
Diego Novillo committed
166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182
	}
    }

  if (ix_p)
    *ix_p = ix;

  return existed_p;
}


/* Insert tree node T in CACHE.  If T already existed in the cache
   return true.  Otherwise, return false.

   If IX_P is non-null, update it with the index into the cache where
   T has been stored.  */

bool
183
streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
184
			    hashval_t hash, unsigned *ix_p)
Diego Novillo committed
185
{
186
  return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true);
Diego Novillo committed
187 188 189
}


190
/* Replace the tree node with T in CACHE at slot IX.  */
Diego Novillo committed
191

192 193 194
void
streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
				  tree t, unsigned ix)
Diego Novillo committed
195
{
196 197 198
  hashval_t hash = 0;
  if (cache->hashes.exists ())
    hash = streamer_tree_cache_get_hash (cache, ix);
199 200 201 202
  if (!cache->node_map)
    streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
  else
    streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
Diego Novillo committed
203 204 205 206 207 208
}


/* Appends tree node T to CACHE, even if T already existed in it.  */

void
209 210
streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
			    tree t, hashval_t hash)
Diego Novillo committed
211
{
212
  unsigned ix = cache->next_idx++;
213 214 215 216
  if (!cache->node_map)
    streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
  else
    streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
Diego Novillo committed
217 218 219 220 221 222 223
}

/* Return true if tree node T exists in CACHE, otherwise false.  If IX_P is
   not NULL, write to *IX_P the index into the cache where T is stored
   ((unsigned)-1 if T is not found).  */

bool
224 225
streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
			    unsigned *ix_p)
Diego Novillo committed
226
{
227
  unsigned *slot;
Diego Novillo committed
228 229 230 231 232
  bool retval;
  unsigned ix;

  gcc_assert (t);

Trevor Saunders committed
233
  slot = cache->node_map->get (t);
Diego Novillo committed
234 235 236 237 238 239 240 241
  if (slot == NULL)
    {
      retval = false;
      ix = -1;
    }
  else
    {
      retval = true;
242
      ix = *slot;
Diego Novillo committed
243 244 245 246 247 248 249 250 251
    }

  if (ix_p)
    *ix_p = ix;

  return retval;
}


252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277
/* Verify that NODE is in CACHE.  */

static void
verify_common_node_recorded (struct streamer_tree_cache_d *cache, tree node)
{
  /* Restrict this to flag_checking only because in general violating it is
     harmless plus we never know what happens on all targets/frontend/flag(!)
     combinations.  */
  if (!flag_checking)
    return;

  if (cache->node_map)
    gcc_assert (streamer_tree_cache_lookup (cache, node, NULL));
  else
    {
      bool found = false;
      gcc_assert (cache->nodes.exists ());
      /* Linear search...  */
      for (unsigned i = 0; !found && i < cache->nodes.length (); ++i)
	if (cache->nodes[i] == node)
	  found = true;
      gcc_assert (found);
    }
}


278 279 280
/* Record NODE in CACHE.  */

static void
281
record_common_node (struct streamer_tree_cache_d *cache, tree node)
282
{
283 284 285 286 287 288 289 290
  /* If we recursively end up at nodes we do not want to preload simply don't.
     ???  We'd want to verify that this doesn't happen, or alternatively
     do not recurse at all.  */
  if (node == char_type_node)
    return;

  gcc_checking_assert (node != boolean_type_node
		       && node != boolean_true_node
291
		       && node != boolean_false_node);
292

293 294 295 296 297 298 299 300 301
  /* We have to make sure to fill exactly the same number of
     elements for all frontends.  That can include NULL trees.
     As our hash table can't deal with zero entries we'll simply stream
     a random other tree.  A NULL tree never will be looked up so it
     doesn't matter which tree we replace it with, just to be sure
     use error_mark_node.  */
  if (!node)
    node = error_mark_node;

302 303 304 305
  /* ???  FIXME, devise a better hash value.  But the hash needs to be equal
     for all frontend and lto1 invocations.  So just use the position
     in the cache as hash value.  */
  streamer_tree_cache_append (cache, node, cache->nodes.length ());
306

307
  switch (TREE_CODE (node))
308
    {
309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325
    case ERROR_MARK:
    case FIELD_DECL:
    case FIXED_POINT_TYPE:
    case IDENTIFIER_NODE:
    case INTEGER_CST:
    case INTEGER_TYPE:
    case REAL_TYPE:
    case TREE_LIST:
    case VOID_CST:
    case VOID_TYPE:
      /* No recursive trees.  */
      break;
    case ARRAY_TYPE:
    case POINTER_TYPE:
    case REFERENCE_TYPE:
      record_common_node (cache, TREE_TYPE (node));
      break;
326 327 328 329 330
    case COMPLEX_TYPE:
      /* Verify that a complex type's component type (node_type) has been
	 handled already (and we thus don't need to recurse here).  */
      verify_common_node_recorded (cache, TREE_TYPE (node));
      break;
331
    case RECORD_TYPE:
332 333 334
      /* The FIELD_DECLs of structures should be shared, so that every
	 COMPONENT_REF uses the same tree node when referencing a field.
	 Pointer equality between FIELD_DECLs is used by the alias
335 336 337 338
	 machinery to compute overlapping component references (see
	 nonoverlapping_component_refs_p and
	 nonoverlapping_component_refs_of_decl_p).  */
      for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
339
	record_common_node (cache, f);
340 341 342 343
      break;
    default:
      /* Unexpected tree code.  */
      gcc_unreachable ();
344 345 346 347 348 349 350 351
    }
}


/* Preload common nodes into CACHE and make sure they are merged
   properly according to the gimple type table.  */

static void
352
preload_common_nodes (struct streamer_tree_cache_d *cache)
353 354 355 356 357 358
{
  unsigned i;

  for (i = 0; i < itk_none; i++)
    /* Skip itk_char.  char_type_node is dependent on -f[un]signed-char.  */
    if (i != itk_char)
359
      record_common_node (cache, integer_types[i]);
360

361
  for (i = 0; i < stk_type_kind_last; i++)
362
    record_common_node (cache, sizetype_tab[i]);
363 364

  for (i = 0; i < TI_MAX; i++)
365
    /* Skip boolean type and constants, they are frontend dependent.  */
366 367
    if (i != TI_BOOLEAN_TYPE
	&& i != TI_BOOLEAN_FALSE
368 369 370 371 372 373 374 375 376 377 378
	&& i != TI_BOOLEAN_TRUE
	/* MAIN_IDENTIFIER is not always initialized by Fortran FE.  */
	&& i != TI_MAIN_IDENTIFIER
	/* PID_TYPE is initialized only by C family front-ends.  */
	&& i != TI_PID_TYPE
	/* Skip optimization and target option nodes; they depend on flags.  */
	&& i != TI_OPTIMIZATION_DEFAULT
	&& i != TI_OPTIMIZATION_CURRENT
	&& i != TI_TARGET_OPTION_DEFAULT
	&& i != TI_TARGET_OPTION_CURRENT
	&& i != TI_CURRENT_TARGET_PRAGMA
379 380 381 382 383 384 385 386
	&& i != TI_CURRENT_OPTIMIZE_PRAGMA
	/* Skip va_list* related nodes if offloading.  For native LTO
	   we want them to be merged for the stdarg pass, for offloading
	   they might not be identical between host and offloading target.  */
	&& (!lto_stream_offload_p
	    || (i != TI_VA_LIST_TYPE
		&& i != TI_VA_LIST_GPR_COUNTER_FIELD
		&& i != TI_VA_LIST_FPR_COUNTER_FIELD)))
387
      record_common_node (cache, global_trees[i]);
388 389 390
}


Diego Novillo committed
391 392
/* Create a cache of pickled nodes.  */

393
struct streamer_tree_cache_d *
394
streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
Diego Novillo committed
395
{
396
  struct streamer_tree_cache_d *cache;
Diego Novillo committed
397

398
  cache = XCNEW (struct streamer_tree_cache_d);
Diego Novillo committed
399

400
  if (with_map)
Trevor Saunders committed
401
    cache->node_map = new hash_map<tree, unsigned> (251);
402 403 404
  cache->next_idx = 0;
  if (with_vec)
    cache->nodes.create (165);
405 406
  if (with_hashes)
    cache->hashes.create (165);
Diego Novillo committed
407 408 409 410

  /* Load all the well-known tree nodes that are always created by
     the compiler on startup.  This prevents writing them out
     unnecessarily.  */
411
  preload_common_nodes (cache);
Diego Novillo committed
412 413 414 415 416 417 418 419

  return cache;
}


/* Delete the streamer cache C.  */

void
420
streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
Diego Novillo committed
421 422 423 424
{
  if (c == NULL)
    return;

Trevor Saunders committed
425 426
  delete c->node_map;
  c->node_map = NULL;
427
  c->nodes.release ();
428
  c->hashes.release ();
Diego Novillo committed
429 430
  free (c);
}