lto-streamer-in.c 49.4 KB
Newer Older
1 2
/* Read the GIMPLE representation from a file stream.

3
   Copyright (C) 2009-2019 Free Software Foundation, Inc.
4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
   Re-implemented by Diego Novillo <dnovillo@google.com>

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */

#include "config.h"
#include "system.h"
#include "coretypes.h"
26
#include "backend.h"
27 28
#include "target.h"
#include "rtl.h"
29 30
#include "tree.h"
#include "gimple.h"
31 32
#include "cfghooks.h"
#include "tree-pass.h"
33
#include "ssa.h"
34
#include "gimple-streamer.h"
35
#include "toplev.h"
36
#include "gimple-iterator.h"
37 38 39
#include "tree-cfg.h"
#include "tree-into-ssa.h"
#include "tree-dfa.h"
Andrew MacLeod committed
40
#include "tree-ssa.h"
41
#include "except.h"
Andrew MacLeod committed
42
#include "cgraph.h"
43
#include "cfgloop.h"
44
#include "debug.h"
45
#include "alloc-pool.h"
46

47 48
/* Allocator used to hold string slot entries for line map streaming.  */
static struct object_allocator<struct string_slot> *string_slot_allocator;
49

50
/* The table to hold the file names.  */
51 52 53 54 55 56
static hash_table<string_slot_hasher> *file_name_hash_table;

/* This obstack holds file names used in locators. Line map datastructures
   points here and thus it needs to be kept allocated as long as linemaps
   exists.  */
static struct obstack file_name_obstack;
57 58 59 60 61


/* Check that tag ACTUAL has one of the given values.  NUM_TAGS is the
   number of valid tag values to check.  */

Diego Novillo committed
62
void
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
{
  va_list ap;
  int i;

  va_start (ap, ntags);
  for (i = 0; i < ntags; i++)
    if ((unsigned) actual == va_arg (ap, unsigned))
      {
	va_end (ap);
	return;
      }

  va_end (ap);
  internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
}


81 82 83
/* Read LENGTH bytes from STREAM to ADDR.  */

void
84
lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
85 86
{
  size_t i;
87
  unsigned char *const buffer = (unsigned char *) addr;
88 89

  for (i = 0; i < length; i++)
90
    buffer[i] = streamer_read_uchar (ib);
91 92 93
}


94 95 96 97 98 99
/* Lookup STRING in file_name_hash_table.  If found, return the existing
   string, otherwise insert STRING as the canonical version.  */

static const char *
canon_file_name (const char *string)
{
100
  string_slot **slot;
101
  struct string_slot s_slot;
102 103
  size_t len = strlen (string);

104
  s_slot.s = string;
105
  s_slot.len = len;
106

107
  slot = file_name_hash_table->find_slot (&s_slot, INSERT);
108 109 110 111 112
  if (*slot == NULL)
    {
      char *saved_string;
      struct string_slot *new_slot;

113 114
      saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
      new_slot = string_slot_allocator->allocate ();
115
      memcpy (saved_string, string, len + 1);
116
      new_slot->s = saved_string;
117
      new_slot->len = len;
118 119 120 121 122
      *slot = new_slot;
      return saved_string;
    }
  else
    {
123
      struct string_slot *old_slot = *slot;
124 125 126 127
      return old_slot->s;
    }
}

128
/* Pointer to currently alive instance of lto_location_cache.  */
129

130
lto_location_cache *lto_location_cache::current_cache;
131

132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154
/* Sort locations in source order. Start with file from last application.  */

int
lto_location_cache::cmp_loc (const void *pa, const void *pb)
{
  const cached_location *a = ((const cached_location *)pa);
  const cached_location *b = ((const cached_location *)pb);
  const char *current_file = current_cache->current_file;
  int current_line = current_cache->current_line;

  if (a->file == current_file && b->file != current_file)
    return -1;
  if (a->file != current_file && b->file == current_file)
    return 1;
  if (a->file == current_file && b->file == current_file)
    {
      if (a->line == current_line && b->line != current_line)
	return -1;
      if (a->line != current_line && b->line == current_line)
	return 1;
    }
  if (a->file != b->file)
    return strcmp (a->file, b->file);
155 156
  if (a->sysp != b->sysp)
    return a->sysp ? 1 : -1;
157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179
  if (a->line != b->line)
    return a->line - b->line;
  return a->col - b->col;
}

/* Apply all changes in location cache.  Add locations into linemap and patch
   trees.  */

bool
lto_location_cache::apply_location_cache ()
{
  static const char *prev_file;
  if (!loc_cache.length ())
    return false;
  if (loc_cache.length () > 1)
    loc_cache.qsort (cmp_loc);

  for (unsigned int i = 0; i < loc_cache.length (); i++)
    {
      struct cached_location loc = loc_cache[i];

      if (current_file != loc.file)
	linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
180
		     loc.sysp, loc.file, loc.line);
181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
      else if (current_line != loc.line)
	{
	  int max = loc.col;

	  for (unsigned int j = i + 1; j < loc_cache.length (); j++)
	    if (loc.file != loc_cache[j].file
		|| loc.line != loc_cache[j].line)
	      break;
	    else if (max < loc_cache[j].col)
	      max = loc_cache[j].col;
	  linemap_line_start (line_table, loc.line, max + 1);
	}
      gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
      if (current_file == loc.file && current_line == loc.line
	  && current_col == loc.col)
	*loc.loc = current_loc;
      else
        current_loc = *loc.loc = linemap_position_for_column (line_table,
							      loc.col);
      current_line = loc.line;
      prev_file = current_file = loc.file;
      current_col = loc.col;
    }
  loc_cache.truncate (0);
  accepted_length = 0;
  return true;
}

/* Tree merging did not suceed; mark all changes in the cache as accepted.  */

void
lto_location_cache::accept_location_cache ()
{
  gcc_assert (current_cache == this);
  accepted_length = loc_cache.length ();
}

/* Tree merging did suceed; throw away recent changes.  */

void
lto_location_cache::revert_location_cache ()
{
  loc_cache.truncate (accepted_length);
}

/* Read a location bitpack from input block IB and either update *LOC directly
   or add it to the location cache.
   It is neccesary to call apply_location_cache to get *LOC updated.  */

void
lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
232
				    class data_in *data_in)
233
{
234 235 236
  static const char *stream_file;
  static int stream_line;
  static int stream_col;
237
  static bool stream_sysp;
238
  bool file_change, line_change, column_change;
239 240

  gcc_assert (current_cache == this);
241

242 243 244 245 246 247 248
  *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);

  if (*loc < RESERVED_LOCATION_COUNT)
    return;

  /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
     ICE on it.  */
249

250
  file_change = bp_unpack_value (bp, 1);
251 252 253
  line_change = bp_unpack_value (bp, 1);
  column_change = bp_unpack_value (bp, 1);

254
  if (file_change)
255 256 257 258
    {
      stream_file = canon_file_name (bp_unpack_string (data_in, bp));
      stream_sysp = bp_unpack_value (bp, 1);
    }
259 260

  if (line_change)
261
    stream_line = bp_unpack_var_len_unsigned (bp);
262

263
  if (column_change)
264
    stream_col = bp_unpack_var_len_unsigned (bp);
265

266 267 268 269
  /* This optimization saves location cache operations druing gimple
     streaming.  */
     
  if (current_file == stream_file && current_line == stream_line
270
      && current_col == stream_col && current_sysp == stream_sysp)
271
    {
272 273
      *loc = current_loc;
      return;
274
    }
275

276 277
  struct cached_location entry
    = {stream_file, loc, stream_line, stream_col, stream_sysp};
278 279 280 281 282 283 284 285 286
  loc_cache.safe_push (entry);
}

/* Read a location bitpack from input block IB and either update *LOC directly
   or add it to the location cache.
   It is neccesary to call apply_location_cache to get *LOC updated.  */

void
lto_input_location (location_t *loc, struct bitpack_d *bp,
287
		    class data_in *data_in)
288 289
{
  data_in->location_cache.input_location (loc, bp, data_in);
290 291
}

292 293 294 295 296
/* Read location and return it instead of going through location caching.
   This should be used only when the resulting location is not going to be
   discarded.  */

location_t
297
stream_input_location_now (struct bitpack_d *bp, class data_in *data_in)
298 299 300 301 302 303
{
  location_t loc;
  stream_input_location (&loc, bp, data_in);
  data_in->location_cache.apply_location_cache ();
  return loc;
}
304

305 306 307 308
/* Read a reference to a tree node from DATA_IN using input block IB.
   TAG is the expected node that should be found in IB, if TAG belongs
   to one of the indexable trees, expect to read a reference index to
   be looked up in one of the symbol tables, otherwise read the pysical
309
   representation of the tree using stream_read_tree.  FN is the
310 311
   function scope for the read tree.  */

Diego Novillo committed
312
tree
313
lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
314 315 316 317 318
		    struct function *fn, enum LTO_tags tag)
{
  unsigned HOST_WIDE_INT ix_u;
  tree result = NULL_TREE;

319
  lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
320 321 322 323

  switch (tag)
    {
    case LTO_type_ref:
324
      ix_u = streamer_read_uhwi (ib);
325 326 327 328
      result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
      break;

    case LTO_ssa_name_ref:
329
      ix_u = streamer_read_uhwi (ib);
330
      result = (*SSANAMES (fn))[ix_u];
331 332 333
      break;

    case LTO_field_decl_ref:
334
      ix_u = streamer_read_uhwi (ib);
335 336 337 338
      result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
      break;

    case LTO_function_decl_ref:
339
      ix_u = streamer_read_uhwi (ib);
340 341 342 343
      result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
      break;

    case LTO_type_decl_ref:
344
      ix_u = streamer_read_uhwi (ib);
345 346 347 348
      result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
      break;

    case LTO_namespace_decl_ref:
349
      ix_u = streamer_read_uhwi (ib);
350 351 352 353 354 355 356 357
      result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
      break;

    case LTO_global_decl_ref:
    case LTO_result_decl_ref:
    case LTO_const_decl_ref:
    case LTO_imported_decl_ref:
    case LTO_label_decl_ref:
358
    case LTO_translation_unit_decl_ref:
359
    case LTO_namelist_decl_ref:
360
      ix_u = streamer_read_uhwi (ib);
361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377
      result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
      break;

    default:
      gcc_unreachable ();
    }

  gcc_assert (result);

  return result;
}


/* Read and return a double-linked list of catch handlers from input
   block IB, using descriptors in DATA_IN.  */

static struct eh_catch_d *
378
lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
379 380 381 382 383 384
			 eh_catch *last_p)
{
  eh_catch first;
  enum LTO_tags tag;

  *last_p = first = NULL;
385
  tag = streamer_read_record_start (ib);
386 387 388 389 390 391 392 393
  while (tag)
    {
      tree list;
      eh_catch n;

      lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);

      /* Read the catch node.  */
394
      n = ggc_cleared_alloc<eh_catch_d> ();
395 396 397
      n->type_list = stream_read_tree (ib, data_in);
      n->filter_list = stream_read_tree (ib, data_in);
      n->label = stream_read_tree (ib, data_in);
398 399 400 401 402 403 404 405 406 407 408 409 410 411 412

      /* Register all the types in N->FILTER_LIST.  */
      for (list = n->filter_list; list; list = TREE_CHAIN (list))
	add_type_for_runtime (TREE_VALUE (list));

      /* Chain N to the end of the list.  */
      if (*last_p)
	(*last_p)->next_catch = n;
      n->prev_catch = *last_p;
      *last_p = n;

      /* Set the head of the list the first time through the loop.  */
      if (first == NULL)
	first = n;

413
      tag = streamer_read_record_start (ib);
414 415 416 417 418 419 420 421 422 423
    }

  return first;
}


/* Read and return EH region IX from input block IB, using descriptors
   in DATA_IN.  */

static eh_region
424
input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
425 426 427 428 429
{
  enum LTO_tags tag;
  eh_region r;

  /* Read the region header.  */
430
  tag = streamer_read_record_start (ib);
431 432 433
  if (tag == LTO_null)
    return NULL;

434
  r = ggc_cleared_alloc<eh_region_d> ();
435
  r->index = streamer_read_hwi (ib);
436 437 438 439 440

  gcc_assert (r->index == ix);

  /* Read all the region pointers as region numbers.  We'll fix up
     the pointers once the whole array has been read.  */
441 442 443
  r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
  r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
  r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465

  switch (tag)
    {
      case LTO_ert_cleanup:
	r->type = ERT_CLEANUP;
	break;

      case LTO_ert_try:
	{
	  struct eh_catch_d *last_catch;
	  r->type = ERT_TRY;
	  r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
							     &last_catch);
	  r->u.eh_try.last_catch = last_catch;
	  break;
	}

      case LTO_ert_allowed_exceptions:
	{
	  tree l;

	  r->type = ERT_ALLOWED_EXCEPTIONS;
466 467
	  r->u.allowed.type_list = stream_read_tree (ib, data_in);
	  r->u.allowed.label = stream_read_tree (ib, data_in);
468
	  r->u.allowed.filter = streamer_read_uhwi (ib);
469 470 471 472 473 474 475

	  for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
	    add_type_for_runtime (TREE_VALUE (l));
	}
	break;

      case LTO_ert_must_not_throw:
476 477 478 479 480
	{
	  r->type = ERT_MUST_NOT_THROW;
	  r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
	  bitpack_d bp = streamer_read_bitpack (ib);
	  r->u.must_not_throw.failure_loc
481
	   = stream_input_location_now (&bp, data_in);
482
	}
483 484 485 486 487 488
	break;

      default:
	gcc_unreachable ();
    }

489
  r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
490 491 492 493 494 495 496 497 498

  return r;
}


/* Read and return EH landing pad IX from input block IB, using descriptors
   in DATA_IN.  */

static eh_landing_pad
499
input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
500 501 502 503 504
{
  enum LTO_tags tag;
  eh_landing_pad lp;

  /* Read the landing pad header.  */
505
  tag = streamer_read_record_start (ib);
506 507 508 509 510
  if (tag == LTO_null)
    return NULL;

  lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);

511
  lp = ggc_cleared_alloc<eh_landing_pad_d> ();
512
  lp->index = streamer_read_hwi (ib);
513
  gcc_assert (lp->index == ix);
514 515
  lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
  lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
516
  lp->post_landing_pad = stream_read_tree (ib, data_in);
517 518 519 520 521 522 523 524 525 526 527 528 529 530

  return lp;
}


/* After reading the EH regions, pointers to peer and children regions
   are region numbers.  This converts all these region numbers into
   real pointers into the rematerialized regions for FN.  ROOT_REGION
   is the region number for the root EH region in FN.  */

static void
fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
{
  unsigned i;
531 532
  vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
  vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
533 534 535 536 537 538
  eh_region r;
  eh_landing_pad lp;

  gcc_assert (eh_array && lp_array);

  gcc_assert (root_region >= 0);
539
  fn->eh->region_tree = (*eh_array)[root_region];
540

541 542
#define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
#define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
543 544 545

  /* Convert all the index numbers stored in pointer fields into
     pointers to the corresponding slots in the EH region array.  */
546
  FOR_EACH_VEC_ELT (*eh_array, i, r)
547 548 549 550 551 552 553 554 555 556 557 558 559 560
    {
      /* The array may contain NULL regions.  */
      if (r == NULL)
	continue;

      gcc_assert (i == (unsigned) r->index);
      FIXUP_EH_REGION (r->outer);
      FIXUP_EH_REGION (r->inner);
      FIXUP_EH_REGION (r->next_peer);
      FIXUP_EH_LP (r->landing_pads);
    }

  /* Convert all the index numbers stored in pointer fields into
     pointers to the corresponding slots in the EH landing pad array.  */
561
  FOR_EACH_VEC_ELT (*lp_array, i, lp)
562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578
    {
      /* The array may contain NULL landing pads.  */
      if (lp == NULL)
	continue;

      gcc_assert (i == (unsigned) lp->index);
      FIXUP_EH_LP (lp->next_lp);
      FIXUP_EH_REGION (lp->region);
    }

#undef FIXUP_EH_REGION
#undef FIXUP_EH_LP
}


/* Initialize EH support.  */

Diego Novillo committed
579
void
580 581
lto_init_eh (void)
{
582 583 584 585 586
  static bool eh_initialized_p = false;

  if (eh_initialized_p)
    return;

587 588 589 590 591 592 593 594
  /* Contrary to most other FEs, we only initialize EH support when at
     least one of the files in the set contains exception regions in
     it.  Since this happens much later than the call to init_eh in
     lang_dependent_init, we have to set flag_exceptions and call
     init_eh again to initialize the EH tables.  */
  flag_exceptions = 1;
  init_eh ();

595
  eh_initialized_p = true;
596 597 598 599 600 601 602
}


/* Read the exception table for FN from IB using the data descriptors
   in DATA_IN.  */

static void
603
input_eh_regions (class lto_input_block *ib, class data_in *data_in,
604 605 606 607
		  struct function *fn)
{
  HOST_WIDE_INT i, root_region, len;
  enum LTO_tags tag;
H.J. Lu committed
608

609
  tag = streamer_read_record_start (ib);
610 611 612 613 614 615 616
  if (tag == LTO_null)
    return;

  lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);

  gcc_assert (fn->eh);

617
  root_region = streamer_read_hwi (ib);
618 619 620
  gcc_assert (root_region == (int) root_region);

  /* Read the EH region array.  */
621
  len = streamer_read_hwi (ib);
622 623 624
  gcc_assert (len == (int) len);
  if (len > 0)
    {
625
      vec_safe_grow_cleared (fn->eh->region_array, len);
626 627 628
      for (i = 0; i < len; i++)
	{
	  eh_region r = input_eh_region (ib, data_in, i);
629
	  (*fn->eh->region_array)[i] = r;
630 631 632 633
	}
    }

  /* Read the landing pads.  */
634
  len = streamer_read_hwi (ib);
635 636 637
  gcc_assert (len == (int) len);
  if (len > 0)
    {
638
      vec_safe_grow_cleared (fn->eh->lp_array, len);
639 640 641
      for (i = 0; i < len; i++)
	{
	  eh_landing_pad lp = input_eh_lp (ib, data_in, i);
642
	  (*fn->eh->lp_array)[i] = lp;
643 644 645 646
	}
    }

  /* Read the runtime type data.  */
647
  len = streamer_read_hwi (ib);
648 649 650
  gcc_assert (len == (int) len);
  if (len > 0)
    {
651
      vec_safe_grow_cleared (fn->eh->ttype_data, len);
652 653
      for (i = 0; i < len; i++)
	{
654
	  tree ttype = stream_read_tree (ib, data_in);
655
	  (*fn->eh->ttype_data)[i] = ttype;
656 657 658 659
	}
    }

  /* Read the table of action chains.  */
660
  len = streamer_read_hwi (ib);
661 662 663 664 665
  gcc_assert (len == (int) len);
  if (len > 0)
    {
      if (targetm.arm_eabi_unwinder)
	{
666
	  vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
667 668
	  for (i = 0; i < len; i++)
	    {
669
	      tree t = stream_read_tree (ib, data_in);
670
	      (*fn->eh->ehspec_data.arm_eabi)[i] = t;
671 672 673 674
	    }
	}
      else
	{
675
	  vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
676 677
	  for (i = 0; i < len; i++)
	    {
678
	      uchar c = streamer_read_uchar (ib);
679
	      (*fn->eh->ehspec_data.other)[i] = c;
680 681 682 683 684 685 686 687
	    }
	}
    }

  /* Reconstruct the EH region tree by fixing up the peer/children
     pointers.  */
  fixup_eh_region_pointers (fn, root_region);

688
  tag = streamer_read_record_start (ib);
689 690 691 692 693 694 695 696 697 698 699
  lto_tag_check_range (tag, LTO_null, LTO_null);
}


/* Make a new basic block with index INDEX in function FN.  */

static basic_block
make_new_block (struct function *fn, unsigned int index)
{
  basic_block bb = alloc_block ();
  bb->index = index;
700
  SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
701
  n_basic_blocks_for_fn (fn)++;
702 703 704 705 706 707
  return bb;
}


/* Read the CFG for function FN from input block IB.  */

H.J. Lu committed
708
static void
709
input_cfg (class lto_input_block *ib, class data_in *data_in,
710
	   struct function *fn)
711 712 713 714 715 716 717
{
  unsigned int bb_count;
  basic_block p_bb;
  unsigned int i;
  int index;

  init_empty_tree_cfg_for_function (fn);
718
  init_ssa_operands (fn);
719

720 721
  profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
						   PROFILE_LAST);
722

723
  bb_count = streamer_read_uhwi (ib);
724

725
  last_basic_block_for_fn (fn) = bb_count;
726 727
  if (bb_count > basic_block_info_for_fn (fn)->length ())
    vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
728

729 730
  if (bb_count > label_to_block_map_for_fn (fn)->length ())
    vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
731

732
  index = streamer_read_hwi (ib);
733 734
  while (index != -1)
    {
735
      basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
736 737 738 739 740
      unsigned int edge_count;

      if (bb == NULL)
	bb = make_new_block (fn, index);

741
      edge_count = streamer_read_uhwi (ib);
742 743 744 745 746 747 748

      /* Connect up the CFG.  */
      for (i = 0; i < edge_count; i++)
	{
	  unsigned int dest_index;
	  unsigned int edge_flags;
	  basic_block dest;
749
	  profile_probability probability;
750 751
	  edge e;

752
	  dest_index = streamer_read_uhwi (ib);
753
	  probability = profile_probability::stream_in (ib);
754
	  edge_flags = streamer_read_uhwi (ib);
755

756
	  dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
757

H.J. Lu committed
758
	  if (dest == NULL)
759 760 761 762 763 764
	    dest = make_new_block (fn, dest_index);

	  e = make_edge (bb, dest, edge_flags);
	  e->probability = probability;
	}

765
      index = streamer_read_hwi (ib);
766 767
    }

768
  p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
769
  index = streamer_read_hwi (ib);
770 771
  while (index != -1)
    {
772
      basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
773 774 775
      bb->prev_bb = p_bb;
      p_bb->next_bb = bb;
      p_bb = bb;
776
      index = streamer_read_hwi (ib);
777
    }
778 779 780 781 782 783 784 785 786

  /* ???  The cfgloop interface is tied to cfun.  */
  gcc_assert (cfun == fn);

  /* Input the loop tree.  */
  unsigned n_loops = streamer_read_uhwi (ib);
  if (n_loops == 0)
    return;

787
  struct loops *loops = ggc_cleared_alloc<struct loops> ();
788
  init_loops_structure (fn, loops, n_loops);
789
  set_loops_for_fn (fn, loops);
790 791 792 793 794 795 796 797 798 799 800 801

  /* Input each loop and associate it with its loop header so
     flow_loops_find can rebuild the loop tree.  */
  for (unsigned i = 1; i < n_loops; ++i)
    {
      int header_index = streamer_read_hwi (ib);
      if (header_index == -1)
	{
	  loops->larray->quick_push (NULL);
	  continue;
	}

802
      class loop *loop = alloc_loop ();
803
      loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
804 805 806 807 808 809
      loop->header->loop_father = loop;

      /* Read everything copy_loop_info copies.  */
      loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
      loop->any_upper_bound = streamer_read_hwi (ib);
      if (loop->any_upper_bound)
810
	loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
811 812
      loop->any_likely_upper_bound = streamer_read_hwi (ib);
      if (loop->any_likely_upper_bound)
813
	loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
814 815
      loop->any_estimate = streamer_read_hwi (ib);
      if (loop->any_estimate)
816
	loop->nb_iterations_estimate = streamer_read_widest_int (ib);
817

818 819
      /* Read OMP SIMD related info.  */
      loop->safelen = streamer_read_hwi (ib);
820
      loop->unroll = streamer_read_hwi (ib);
821
      loop->owned_clique = streamer_read_hwi (ib);
822
      loop->dont_vectorize = streamer_read_hwi (ib);
823
      loop->force_vectorize = streamer_read_hwi (ib);
824 825
      loop->simduid = stream_read_tree (ib, data_in);

826
      place_new_loop (fn, loop);
827 828 829 830 831 832 833

      /* flow_loops_find doesn't like loops not in the tree, hook them
         all as siblings of the tree root temporarily.  */
      flow_loop_tree_node_add (loops->tree_root, loop);
    }

  /* Rebuild the loop tree.  */
834
  flow_loops_find (loops);
835 836 837 838 839 840 841
}


/* Read the SSA names array for function FN from DATA_IN using input
   block IB.  */

static void
842
input_ssa_names (class lto_input_block *ib, class data_in *data_in,
843 844 845 846
		 struct function *fn)
{
  unsigned int i, size;

847
  size = streamer_read_uhwi (ib);
848 849
  init_ssanames (fn, size);

850
  i = streamer_read_uhwi (ib);
851 852 853 854 855 856
  while (i)
    {
      tree ssa_name, name;
      bool is_default_def;

      /* Skip over the elements that had been freed.  */
857 858
      while (SSANAMES (fn)->length () < i)
	SSANAMES (fn)->quick_push (NULL_TREE);
859

860
      is_default_def = (streamer_read_uchar (ib) != 0);
861
      name = stream_read_tree (ib, data_in);
862
      ssa_name = make_ssa_name_fn (fn, name, NULL);
863 864

      if (is_default_def)
865 866 867 868
	{
	  set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
	  SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
	}
869

870
      i = streamer_read_uhwi (ib);
H.J. Lu committed
871
    }
872 873 874 875 876 877 878
}


/* Go through all NODE edges and fixup call_stmt pointers
   so they point to STMTS.  */

static void
879
fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
880
			 struct function *fn)
881
{
882 883 884
#define STMT_UID_NOT_IN_RANGE(uid) \
  (gimple_stmt_max_uid (fn) < uid || uid == 0)

885
  struct cgraph_edge *cedge;
Martin Liska committed
886
  struct ipa_ref *ref = NULL;
887 888
  unsigned int i;

889
  for (cedge = node->callees; cedge; cedge = cedge->next_callee)
890
    {
891
      if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
892 893
        fatal_error (input_location,
		     "Cgraph edge statement index out of range");
894
      cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
895
      if (!cedge->call_stmt)
896 897
        fatal_error (input_location,
		     "Cgraph edge statement index not found");
898
    }
899
  for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
900
    {
901
      if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
902 903
        fatal_error (input_location,
		     "Cgraph edge statement index out of range");
904
      cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
905
      if (!cedge->call_stmt)
906
        fatal_error (input_location, "Cgraph edge statement index not found");
907
    }
Martin Liska committed
908
  for (i = 0; node->iterate_reference (i, ref); i++)
909 910
    if (ref->lto_stmt_uid)
      {
911
	if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
912 913
	  fatal_error (input_location,
		       "Reference statement index out of range");
914 915
	ref->stmt = stmts[ref->lto_stmt_uid - 1];
	if (!ref->stmt)
916
	  fatal_error (input_location, "Reference statement index not found");
917
      }
918 919
}

920

921 922 923
/* Fixup call_stmt pointers in NODE and all clones.  */

static void
924
fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
925 926
{
  struct cgraph_node *node;
927
  struct function *fn;
928 929 930

  while (orig->clone_of)
    orig = orig->clone_of;
931
  fn = DECL_STRUCT_FUNCTION (orig->decl);
932

933 934
  if (!orig->thunk.thunk_p)
    fixup_call_stmt_edges_1 (orig, stmts, fn);
935 936 937
  if (orig->clones)
    for (node = orig->clones; node != orig;)
      {
938 939
	if (!node->thunk.thunk_p)
	  fixup_call_stmt_edges_1 (node, stmts, fn);
940 941 942 943 944 945 946 947 948 949 950 951 952 953
	if (node->clones)
	  node = node->clones;
	else if (node->next_sibling_clone)
	  node = node->next_sibling_clone;
	else
	  {
	    while (node != orig && !node->next_sibling_clone)
	      node = node->clone_of;
	    if (node != orig)
	      node = node->next_sibling_clone;
	  }
      }
}

954 955 956

/* Input the base body of struct function FN from DATA_IN
   using input block IB.  */
957 958

static void
959 960
input_struct_function_base (struct function *fn, class data_in *data_in,
	                    class lto_input_block *ib)
961
{
962
  struct bitpack_d bp;
963
  int len;
964

965 966 967
  /* Read the static chain and non-local goto save area.  */
  fn->static_chain_decl = stream_read_tree (ib, data_in);
  fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
968

969 970 971 972 973
  /* Read all the local symbols.  */
  len = streamer_read_hwi (ib);
  if (len > 0)
    {
      int i;
974
      vec_safe_grow_cleared (fn->local_decls, len);
975 976 977
      for (i = 0; i < len; i++)
	{
	  tree t = stream_read_tree (ib, data_in);
978
	  (*fn->local_decls)[i] = t;
979 980 981 982 983
	}
    }

  /* Input the current IL state of the function.  */
  fn->curr_properties = streamer_read_uhwi (ib);
984 985

  /* Read all the attributes for FN.  */
986
  bp = streamer_read_bitpack (ib);
987 988 989 990 991
  fn->is_thunk = bp_unpack_value (&bp, 1);
  fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
  fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
  fn->returns_struct = bp_unpack_value (&bp, 1);
  fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
992
  fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
993 994 995 996
  fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
  fn->after_inlining = bp_unpack_value (&bp, 1);
  fn->stdarg = bp_unpack_value (&bp, 1);
  fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
997
  fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
998 999
  fn->calls_alloca = bp_unpack_value (&bp, 1);
  fn->calls_setjmp = bp_unpack_value (&bp, 1);
1000
  fn->calls_eh_return = bp_unpack_value (&bp, 1);
1001
  fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1002
  fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1003 1004
  fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
  fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1005
  fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1006 1007

  /* Input the function start and end loci.  */
1008 1009
  fn->function_start_locus = stream_input_location_now (&bp, data_in);
  fn->function_end_locus = stream_input_location_now (&bp, data_in);
1010 1011 1012 1013 1014 1015 1016 1017

  /* Restore the instance discriminators if present.  */
  int instance_number = bp_unpack_value (&bp, 1);
  if (instance_number)
    {
      instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
      maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
    }
1018
}
1019

1020

1021
/* Read the body of function FN_DECL from DATA_IN using input block IB.  */
1022

1023
static void
1024
input_function (tree fn_decl, class data_in *data_in,
1025 1026
		class lto_input_block *ib, class lto_input_block *ib_cfg,
		cgraph_node *node)
1027 1028 1029
{
  struct function *fn;
  enum LTO_tags tag;
1030
  gimple **stmts;
1031
  basic_block bb;
1032

1033
  tag = streamer_read_record_start (ib);
1034 1035 1036 1037 1038 1039
  lto_tag_check (tag, LTO_function);

  /* Read decls for parameters and args.  */
  DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
  DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);

1040 1041 1042 1043 1044 1045 1046 1047 1048 1049
  /* Read debug args if available.  */
  unsigned n_debugargs = streamer_read_uhwi (ib);
  if (n_debugargs)
    {
      vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
      vec_safe_grow (*debugargs, n_debugargs);
      for (unsigned i = 0; i < n_debugargs; ++i)
	(**debugargs)[i] = stream_read_tree (ib, data_in);
    }

1050 1051
  /* Read the tree of lexical scopes for the function.  */
  DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1052 1053 1054
  unsigned block_leaf_count = streamer_read_uhwi (ib);
  while (block_leaf_count--)
    stream_read_tree (ib, data_in);
1055 1056 1057 1058 1059 1060 1061 1062 1063

  if (!streamer_read_uhwi (ib))
    return;

  push_struct_function (fn_decl);
  fn = DECL_STRUCT_FUNCTION (fn_decl);
  init_tree_ssa (fn);
  /* We input IL in SSA form.  */
  cfun->gimple_df->in_ssa_p = true;
1064 1065 1066 1067

  gimple_register_cfg_hooks ();

  input_struct_function_base (fn, data_in, ib);
1068
  input_cfg (ib_cfg, data_in, fn);
1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079

  /* Read all the SSA names.  */
  input_ssa_names (ib, data_in, fn);

  /* Read the exception handling regions in the function.  */
  input_eh_regions (ib, data_in, fn);

  gcc_assert (DECL_INITIAL (fn_decl));
  DECL_SAVED_TREE (fn_decl) = NULL_TREE;

  /* Read all the basic blocks.  */
1080
  tag = streamer_read_record_start (ib);
1081 1082
  while (tag)
    {
1083 1084
      input_bb (ib, tag, data_in, fn,
		node->count_materialization_scale);
1085
      tag = streamer_read_record_start (ib);
1086 1087 1088 1089
    }

  /* Fix up the call statements that are mentioned in the callgraph
     edges.  */
1090
  set_gimple_stmt_max_uid (cfun, 0);
1091
  FOR_ALL_BB_FN (bb, cfun)
1092 1093
    {
      gimple_stmt_iterator gsi;
1094 1095
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
1096
	  gimple *stmt = gsi_stmt (gsi);
1097 1098
	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
	}
1099 1100
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
1101
	  gimple *stmt = gsi_stmt (gsi);
1102 1103 1104
	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
	}
    }
1105
  stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1106
  FOR_ALL_BB_FN (bb, cfun)
1107
    {
1108 1109 1110
      gimple_stmt_iterator bsi = gsi_start_phis (bb);
      while (!gsi_end_p (bsi))
	{
1111
	  gimple *stmt = gsi_stmt (bsi);
1112 1113 1114 1115
	  gsi_next (&bsi);
	  stmts[gimple_uid (stmt)] = stmt;
	}
      bsi = gsi_start_bb (bb);
1116
      while (!gsi_end_p (bsi))
1117
	{
1118
	  gimple *stmt = gsi_stmt (bsi);
1119
	  bool remove = false;
1120 1121 1122 1123
	  /* If we're recompiling LTO objects with debug stmts but
	     we're not supposed to have debug stmts, remove them now.
	     We can't remove them earlier because this would cause uid
	     mismatches in fixups, but we can do it at this point, as
1124 1125 1126 1127
	     long as debug stmts don't require fixups.
	     Similarly remove all IFN_*SAN_* internal calls   */
	  if (!flag_wpa)
	    {
1128 1129 1130 1131
	      if (is_gimple_debug (stmt)
		  && (gimple_debug_nonbind_marker_p (stmt)
		      ? !MAY_HAVE_DEBUG_MARKER_STMTS
		      : !MAY_HAVE_DEBUG_BIND_STMTS))
1132
		remove = true;
1133 1134 1135 1136 1137 1138 1139 1140
	      /* In case the linemap overflows locations can be dropped
		 to zero.  Thus do not keep nonsensical inline entry markers
		 we'd later ICE on.  */
	      tree block;
	      if (gimple_debug_inline_entry_p (stmt)
		  && (block = gimple_block (stmt))
		  && !inlined_function_outer_scope_p (block))
		remove = true;
1141 1142 1143
	      if (is_gimple_call (stmt)
		  && gimple_call_internal_p (stmt))
		{
1144
		  bool replace = false;
1145 1146 1147 1148 1149
		  switch (gimple_call_internal_fn (stmt))
		    {
		    case IFN_UBSAN_NULL:
		      if ((flag_sanitize
			  & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1150
			replace = true;
1151 1152 1153
		      break;
		    case IFN_UBSAN_BOUNDS:
		      if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1154
			replace = true;
1155 1156 1157
		      break;
		    case IFN_UBSAN_VPTR:
		      if ((flag_sanitize & SANITIZE_VPTR) == 0)
1158
			replace = true;
1159 1160 1161
		      break;
		    case IFN_UBSAN_OBJECT_SIZE:
		      if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1162
			replace = true;
1163
		      break;
1164 1165
		    case IFN_UBSAN_PTR:
		      if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1166
			replace = true;
1167
		      break;
1168 1169
		    case IFN_ASAN_MARK:
		      if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1170
			replace = true;
1171 1172 1173
		      break;
		    case IFN_TSAN_FUNC_EXIT:
		      if ((flag_sanitize & SANITIZE_THREAD) == 0)
1174
			replace = true;
1175 1176 1177 1178
		      break;
		    default:
		      break;
		    }
1179 1180 1181 1182 1183 1184
		  if (replace)
		    {
		      gimple_call_set_internal_fn (as_a <gcall *> (stmt),
						   IFN_NOP);
		      update_stmt (stmt);
		    }
1185 1186 1187
		}
	    }
	  if (remove)
1188 1189 1190
	    {
	      gimple_stmt_iterator gsi = bsi;
	      gsi_next (&bsi);
1191 1192
	      unlink_stmt_vdef (stmt);
	      release_defs (stmt);
1193 1194 1195 1196 1197 1198
	      gsi_remove (&gsi, true);
	    }
	  else
	    {
	      gsi_next (&bsi);
	      stmts[gimple_uid (stmt)] = stmt;
1199 1200 1201 1202 1203 1204 1205

	      /* Remember that the input function has begin stmt
		 markers, so that we know to expect them when emitting
		 debug info.  */
	      if (!cfun->debug_nonbind_markers
		  && gimple_debug_nonbind_marker_p (stmt))
		cfun->debug_nonbind_markers = true;
1206
	    }
1207 1208 1209 1210 1211 1212 1213 1214
	}
    }

  /* Set the gimple body to the statement sequence in the entry
     basic block.  FIXME lto, this is fairly hacky.  The existence
     of a gimple body is used by the cgraph routines, but we should
     really use the presence of the CFG.  */
  {
1215
    edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1216 1217 1218
    gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
  }

1219
  update_max_bb_count ();
1220 1221
  fixup_call_stmt_edges (node, stmts);
  execute_all_ipa_stmt_fixups (node, stmts);
1222

1223 1224
  free_dominance_info (CDI_DOMINATORS);
  free_dominance_info (CDI_POST_DOMINATORS);
1225
  free (stmts);
1226
  pop_cfun ();
1227 1228
}

1229 1230 1231
/* Read the body of function FN_DECL from DATA_IN using input block IB.  */

static void
1232 1233
input_constructor (tree var, class data_in *data_in,
		   class lto_input_block *ib)
1234 1235 1236 1237
{
  DECL_INITIAL (var) = stream_read_tree (ib, data_in);
}

1238

1239
/* Read the body from DATA for function NODE and fill it in.
1240 1241 1242 1243 1244
   FILE_DATA are the global decls and types.  SECTION_TYPE is either
   LTO_section_function_body or LTO_section_static_initializer.  If
   section type is LTO_section_function_body, FN must be the decl for
   that function.  */

H.J. Lu committed
1245
static void
1246 1247
lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
			      const char *data, enum lto_section_type section_type)
1248 1249
{
  const struct lto_function_header *header;
1250
  class data_in *data_in;
1251 1252 1253
  int cfg_offset;
  int main_offset;
  int string_offset;
1254
  tree fn_decl = node->decl;
1255 1256

  header = (const struct lto_function_header *) data;
1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267
  if (TREE_CODE (node->decl) == FUNCTION_DECL)
    {
      cfg_offset = sizeof (struct lto_function_header);
      main_offset = cfg_offset + header->cfg_size;
      string_offset = main_offset + header->main_size;
    }
  else
    {
      main_offset = sizeof (struct lto_function_header);
      string_offset = main_offset + header->main_size;
    }
H.J. Lu committed
1268

1269
  data_in = lto_data_in_create (file_data, data + string_offset,
1270
			      header->string_size, vNULL);
1271 1272 1273 1274

  if (section_type == LTO_section_function_body)
    {
      struct lto_in_decl_state *decl_state;
1275
      unsigned from;
1276

1277
      gcc_checking_assert (node);
1278

1279 1280 1281 1282 1283 1284 1285
      /* Use the function's decl state. */
      decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
      gcc_assert (decl_state);
      file_data->current_decl_state = decl_state;


      /* Set up the struct function.  */
1286
      from = data_in->reader_cache->nodes.length ();
1287 1288
      lto_input_block ib_main (data + main_offset, header->main_size,
			       file_data->mode_table);
1289
      if (TREE_CODE (node->decl) == FUNCTION_DECL)
1290
	{
1291 1292
	  lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
				  file_data->mode_table);
1293 1294
	  input_function (fn_decl, data_in, &ib_main, &ib_cfg,
			  dyn_cast <cgraph_node *>(node));
1295
	}
1296 1297
      else
        input_constructor (fn_decl, data_in, &ib_main);
1298
      data_in->location_cache.apply_location_cache ();
1299 1300 1301
      /* And fixup types we streamed locally.  */
	{
	  struct streamer_tree_cache_d *cache = data_in->reader_cache;
1302
	  unsigned len = cache->nodes.length ();
1303 1304 1305
	  unsigned i;
	  for (i = len; i-- > from;)
	    {
1306
	      tree t = streamer_tree_cache_get_tree (cache, i);
1307 1308 1309 1310 1311 1312
	      if (t == NULL_TREE)
		continue;

	      if (TYPE_P (t))
		{
		  gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1313 1314 1315
		  if (type_with_alias_set_p (t)
		      && canonical_type_used_p (t))
		    TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1316 1317 1318 1319 1320 1321 1322 1323 1324 1325
		  if (TYPE_MAIN_VARIANT (t) != t)
		    {
		      gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
		      TYPE_NEXT_VARIANT (t)
			= TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
		      TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
		    }
		}
	    }
	}
1326 1327 1328 1329 1330 1331 1332 1333 1334

      /* Restore decl state */
      file_data->current_decl_state = file_data->global_decl_state;
    }

  lto_data_in_delete (data_in);
}


1335
/* Read the body of NODE using DATA.  FILE_DATA holds the global
1336 1337
   decls and types.  */

H.J. Lu committed
1338
void
1339
lto_input_function_body (struct lto_file_decl_data *file_data,
1340
			 struct cgraph_node *node, const char *data)
1341
{
1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352
  lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
}

/* Read the body of NODE using DATA.  FILE_DATA holds the global
   decls and types.  */

void
lto_input_variable_constructor (struct lto_file_decl_data *file_data,
				struct varpool_node *node, const char *data)
{
  lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1353 1354 1355
}


1356 1357 1358 1359
/* Queue of acummulated decl -> DIE mappings.  Similar to locations those
   are only applied to prevailing tree nodes during tree merging.  */
vec<dref_entry> dref_queue;

1360 1361 1362 1363
/* Read the physical representation of a tree node EXPR from
   input block IB using the per-file context in DATA_IN.  */

static void
1364
lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378
{
  /* Read all the bitfield values in EXPR.  Note that for LTO, we
     only write language-independent bitfields, so no more unpacking is
     needed.  */
  streamer_read_tree_bitfields (ib, data_in, expr);

  /* Read all the pointer fields in EXPR.  */
  streamer_read_tree_body (ib, data_in, expr);

  /* Read any LTO-specific data not read by the tree streamer.  */
  if (DECL_P (expr)
      && TREE_CODE (expr) != FUNCTION_DECL
      && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
    DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395

  /* Stream references to early generated DIEs.  Keep in sync with the
     trees handled in dwarf2out_register_external_die.  */
  if ((DECL_P (expr)
       && TREE_CODE (expr) != FIELD_DECL
       && TREE_CODE (expr) != DEBUG_EXPR_DECL
       && TREE_CODE (expr) != TYPE_DECL)
      || TREE_CODE (expr) == BLOCK)
    {
      const char *str = streamer_read_string (data_in, ib);
      if (str)
	{
	  unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
	  dref_entry e = { expr, str, off };
	  dref_queue.safe_push (e);
	}
    }
1396 1397
}

1398 1399
/* Read the physical representation of a tree node with tag TAG from
   input block IB using the per-file context in DATA_IN.  */
1400

1401
static tree
1402
lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1403
	       enum LTO_tags tag, hashval_t hash)
1404
{
1405
  /* Instantiate a new tree node.  */
1406
  tree result = streamer_alloc_tree (ib, data_in, tag);
1407 1408 1409 1410

  /* Enter RESULT in the reader cache.  This will make RESULT
     available so that circular references in the rest of the tree
     structure can be resolved in subsequent calls to stream_read_tree.  */
1411
  streamer_tree_cache_append (data_in->reader_cache, result, hash);
1412

1413
  lto_read_tree_1 (ib, data_in, result);
1414

1415
  /* end_marker = */ streamer_read_uchar (ib);
1416

1417 1418
  return result;
}
1419 1420


1421 1422
/* Populate the reader cache with trees materialized from the SCC
   following in the IB, DATA_IN stream.  */
1423

1424
hashval_t
1425
lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1426 1427 1428 1429 1430 1431 1432
	       unsigned *len, unsigned *entry_len)
{
  /* A blob of unnamed tree nodes, fill the cache from it and
     recurse.  */
  unsigned size = streamer_read_uhwi (ib);
  hashval_t scc_hash = streamer_read_uhwi (ib);
  unsigned scc_entry_len = 1;
1433

1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473
  if (size == 1)
    {
      enum LTO_tags tag = streamer_read_record_start (ib);
      lto_input_tree_1 (ib, data_in, tag, scc_hash);
    }
  else
    {
      unsigned int first = data_in->reader_cache->nodes.length ();
      tree result;

      scc_entry_len = streamer_read_uhwi (ib);

      /* Materialize size trees by reading their headers.  */
      for (unsigned i = 0; i < size; ++i)
	{
	  enum LTO_tags tag = streamer_read_record_start (ib);
	  if (tag == LTO_null
	      || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
	      || tag == LTO_tree_pickle_reference
	      || tag == LTO_integer_cst
	      || tag == LTO_tree_scc)
	    gcc_unreachable ();

	  result = streamer_alloc_tree (ib, data_in, tag);
	  streamer_tree_cache_append (data_in->reader_cache, result, 0);
	}

      /* Read the tree bitpacks and references.  */
      for (unsigned i = 0; i < size; ++i)
	{
	  result = streamer_tree_cache_get_tree (data_in->reader_cache,
						 first + i);
	  lto_read_tree_1 (ib, data_in, result);
	  /* end_marker = */ streamer_read_uchar (ib);
	}
    }

  *len = size;
  *entry_len = scc_entry_len;
  return scc_hash;
1474 1475 1476 1477 1478 1479 1480 1481
}


/* Read a tree from input block IB using the per-file context in
   DATA_IN.  This context is used, for example, to resolve references
   to previously read nodes.  */

tree
1482
lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1483
		  enum LTO_tags tag, hashval_t hash)
1484 1485 1486 1487 1488 1489 1490
{
  tree result;

  gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);

  if (tag == LTO_null)
    result = NULL_TREE;
1491
  else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1492 1493 1494 1495 1496 1497 1498 1499 1500 1501
    {
      /* If TAG is a reference to an indexable tree, the next value
	 in IB is the index into the table where we expect to find
	 that tree.  */
      result = lto_input_tree_ref (ib, data_in, cfun, tag);
    }
  else if (tag == LTO_tree_pickle_reference)
    {
      /* If TAG is a reference to a previously read tree, look it up in
	 the reader cache.  */
1502
      result = streamer_get_pickled_tree (ib, data_in);
1503
    }
1504
  else if (tag == LTO_integer_cst)
1505
    {
Kenneth Zadeck committed
1506 1507
      /* For shared integer constants in singletons we can use the
         existing tree integer constant merging code.  */
1508
      tree type = stream_read_tree (ib, data_in);
Kenneth Zadeck committed
1509 1510 1511 1512 1513 1514 1515 1516 1517
      unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
      unsigned HOST_WIDE_INT i;
      HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];

      for (i = 0; i < len; i++)
	a[i] = streamer_read_hwi (ib);
      gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
      result = wide_int_to_tree (type, wide_int::from_array
				 (a, len, TYPE_PRECISION (type)));
1518 1519 1520
      streamer_tree_cache_append (data_in->reader_cache, result, hash);
    }
  else if (tag == LTO_tree_scc)
1521
    gcc_unreachable ();
1522 1523 1524
  else
    {
      /* Otherwise, materialize a new node from IB.  */
1525
      result = lto_read_tree (ib, data_in, tag, hash);
1526 1527 1528
    }

  return result;
1529 1530
}

1531
tree
1532
lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1533
{
1534 1535 1536 1537 1538 1539 1540
  enum LTO_tags tag;

  /* Input and skip SCCs.  */
  while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
    {
      unsigned len, entry_len;
      lto_input_scc (ib, data_in, &len, &entry_len);
1541 1542 1543 1544 1545 1546 1547

      /* Register DECLs with the debuginfo machinery.  */
      while (!dref_queue.is_empty ())
	{
	  dref_entry e = dref_queue.pop ();
	  debug_hooks->register_external_die (e.decl, e.sym, e.off);
	}
1548 1549
    }
  return lto_input_tree_1 (ib, data_in, tag, 0);
1550 1551
}

1552

1553 1554 1555
/* Input toplevel asms.  */

void
1556
lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1557 1558
{
  size_t len;
1559 1560
  const char *data
    = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1561 1562
  const struct lto_simple_header_with_strings *header
    = (const struct lto_simple_header_with_strings *) data;
1563
  int string_offset;
1564
  class data_in *data_in;
1565 1566 1567 1568 1569 1570 1571
  tree str;

  if (! data)
    return;

  string_offset = sizeof (*header) + header->main_size;

1572 1573
  lto_input_block ib (data + sizeof (*header), header->main_size,
		      file_data->mode_table);
1574 1575

  data_in = lto_data_in_create (file_data, data + string_offset,
1576
			      header->string_size, vNULL);
1577 1578

  while ((str = streamer_read_string_cst (data_in, &ib)))
1579
    {
Martin Liska committed
1580
      asm_node *node = symtab->finalize_toplevel_asm (str);
1581
      node->order = streamer_read_hwi (&ib) + order_base;
Martin Liska committed
1582 1583
      if (node->order >= symtab->order)
	symtab->order = node->order + 1;
1584
    }
1585 1586 1587 1588 1589 1590 1591

  lto_data_in_delete (data_in);

  lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
}


1592 1593 1594 1595 1596 1597
/* Input mode table.  */

void
lto_input_mode_table (struct lto_file_decl_data *file_data)
{
  size_t len;
1598 1599
  const char *data
    = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611
  if (! data)
    {
      internal_error ("cannot read LTO mode table from %s",
		      file_data->file_name);
      return;
    }

  unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
  file_data->mode_table = table;
  const struct lto_simple_header_with_strings *header
    = (const struct lto_simple_header_with_strings *) data;
  int string_offset;
1612
  class data_in *data_in;
1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626
  string_offset = sizeof (*header) + header->main_size;

  lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
  data_in = lto_data_in_create (file_data, data + string_offset,
				header->string_size, vNULL);
  bitpack_d bp = streamer_read_bitpack (&ib);

  table[VOIDmode] = VOIDmode;
  table[BLKmode] = BLKmode;
  unsigned int m;
  while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
    {
      enum mode_class mclass
	= bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1627
      poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1628
      poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1629
      machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1630
      poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657
      unsigned int ibit = 0, fbit = 0;
      unsigned int real_fmt_len = 0;
      const char *real_fmt_name = NULL;
      switch (mclass)
	{
	case MODE_FRACT:
	case MODE_UFRACT:
	case MODE_ACCUM:
	case MODE_UACCUM:
	  ibit = bp_unpack_value (&bp, 8);
	  fbit = bp_unpack_value (&bp, 8);
	  break;
	case MODE_FLOAT:
	case MODE_DECIMAL_FLOAT:
	  real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
						    &real_fmt_len);
	  break;
	default:
	  break;
	}
      /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
	 if not found, fallback to all modes.  */
      int pass;
      for (pass = 0; pass < 2; pass++)
	for (machine_mode mr = pass ? VOIDmode
				    : GET_CLASS_NARROWEST_MODE (mclass);
	     pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1658
	     pass ? mr = (machine_mode) (mr + 1)
1659
		  : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1660
	  if (GET_MODE_CLASS (mr) != mclass
1661
	      || maybe_ne (GET_MODE_SIZE (mr), size)
1662
	      || maybe_ne (GET_MODE_PRECISION (mr), prec)
1663 1664 1665
	      || (inner == m
		  ? GET_MODE_INNER (mr) != mr
		  : GET_MODE_INNER (mr) != table[(int) inner])
1666 1667
	      || GET_MODE_IBIT (mr) != ibit
	      || GET_MODE_FBIT (mr) != fbit
1668
	      || maybe_ne (GET_MODE_NUNITS (mr), nunits))
1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684
	    continue;
	  else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
		   && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
	    continue;
	  else
	    {
	      table[m] = mr;
	      pass = 2;
	      break;
	    }
      unsigned int mname_len;
      const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
      if (pass == 2)
	{
	  switch (mclass)
	    {
1685
	    case MODE_VECTOR_BOOL:
1686 1687 1688 1689 1690 1691 1692 1693
	    case MODE_VECTOR_INT:
	    case MODE_VECTOR_FLOAT:
	    case MODE_VECTOR_FRACT:
	    case MODE_VECTOR_UFRACT:
	    case MODE_VECTOR_ACCUM:
	    case MODE_VECTOR_UACCUM:
	      /* For unsupported vector modes just use BLKmode,
		 if the scalar mode is supported.  */
1694
	      if (table[(int) inner] != VOIDmode)
1695 1696 1697 1698 1699 1700
		{
		  table[m] = BLKmode;
		  break;
		}
	      /* FALLTHRU */
	    default:
1701
	      fatal_error (UNKNOWN_LOCATION, "unsupported mode %qs", mname);
1702 1703 1704 1705 1706 1707 1708 1709 1710 1711
	      break;
	    }
	}
    }
  lto_data_in_delete (data_in);

  lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
}


1712 1713 1714
/* Initialization for the LTO reader.  */

void
1715
lto_reader_init (void)
1716 1717
{
  lto_streamer_init ();
1718
  file_name_hash_table
1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735
    = new hash_table<string_slot_hasher> (37);
  string_slot_allocator = new object_allocator <struct string_slot>
				("line map file name hash");
  gcc_obstack_init (&file_name_obstack);
}

/* Free hash table used to stream in location file names.  */

void
lto_free_file_name_hash (void)
{
  delete file_name_hash_table;
  file_name_hash_table = NULL;
  delete string_slot_allocator;
  string_slot_allocator = NULL;
  /* file_name_obstack must stay allocated since it is referred to by
     line map table.  */
1736 1737 1738 1739 1740 1741 1742
}


/* Create a new data_in object for FILE_DATA. STRINGS is the string
   table to use with LEN strings.  RESOLUTIONS is the vector of linker
   resolutions (NULL if not using a linker plugin).  */

1743
class data_in *
1744 1745
lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
		    unsigned len,
1746
		    vec<ld_plugin_symbol_resolution_t> resolutions)
1747
{
1748
  class data_in *data_in = new (class data_in);
1749 1750 1751 1752
  data_in->file_data = file_data;
  data_in->strings = strings;
  data_in->strings_len = len;
  data_in->globals_resolution = resolutions;
1753
  data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1754 1755 1756 1757 1758 1759 1760
  return data_in;
}


/* Remove DATA_IN.  */

void
1761
lto_data_in_delete (class data_in *data_in)
1762
{
1763
  data_in->globals_resolution.release ();
1764
  streamer_tree_cache_delete (data_in->reader_cache);
1765
  delete data_in;
1766
}