input.c 108 KB
Newer Older
Joseph Myers committed
1
/* Data and functions related to line maps and input files.
2
   Copyright (C) 2004-2016 Free Software Foundation, Inc.
Joseph Myers committed
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */

#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "intl.h"
24
#include "diagnostic-core.h"
David Malcolm committed
25
#include "selftest.h"
26
#include "cpplib.h"
27

28 29 30 31
#ifndef HAVE_ICONV
#define HAVE_ICONV 0
#endif

32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97
/* This is a cache used by get_next_line to store the content of a
   file to be searched for file lines.  */
struct fcache
{
  /* These are information used to store a line boundary.  */
  struct line_info
  {
    /* The line number.  It starts from 1.  */
    size_t line_num;

    /* The position (byte count) of the beginning of the line,
       relative to the file data pointer.  This starts at zero.  */
    size_t start_pos;

    /* The position (byte count) of the last byte of the line.  This
       normally points to the '\n' character, or to one byte after the
       last byte of the file, if the file doesn't contain a '\n'
       character.  */
    size_t end_pos;

    line_info (size_t l, size_t s, size_t e)
      : line_num (l), start_pos (s), end_pos (e)
    {}

    line_info ()
      :line_num (0), start_pos (0), end_pos (0)
    {}
  };

  /* The number of time this file has been accessed.  This is used
     to designate which file cache to evict from the cache
     array.  */
  unsigned use_count;

  const char *file_path;

  FILE *fp;

  /* This points to the content of the file that we've read so
     far.  */
  char *data;

  /*  The size of the DATA array above.*/
  size_t size;

  /* The number of bytes read from the underlying file so far.  This
     must be less (or equal) than SIZE above.  */
  size_t nb_read;

  /* The index of the beginning of the current line.  */
  size_t line_start_idx;

  /* The number of the previous line read.  This starts at 1.  Zero
     means we've read no line so far.  */
  size_t line_num;

  /* This is the total number of lines of the current file.  At the
     moment, we try to get this information from the line map
     subsystem.  Note that this is just a hint.  When using the C++
     front-end, this hint is correct because the input file is then
     completely tokenized before parsing starts; so the line map knows
     the number of lines before compilation really starts.  For e.g,
     the C front-end, it can happen that we start emitting diagnostics
     before the line map has seen the end of the file.  */
  size_t total_lines;

98 99 100 101 102
  /* Could this file be missing a trailing newline on its final line?
     Initially true (to cope with empty files), set to true/false
     as each line is read.  */
  bool missing_trailing_newline;

103 104 105 106 107 108 109 110 111 112 113 114
  /* This is a record of the beginning and end of the lines we've seen
     while reading the file.  This is useful to avoid walking the data
     from the beginning when we are asked to read a line that is
     before LINE_START_IDX above.  Note that the maximum size of this
     record is fcache_line_record_size, so that the memory consumption
     doesn't explode.  We thus scale total_lines down to
     fcache_line_record_size.  */
  vec<line_info, va_heap> line_record;

  fcache ();
  ~fcache ();
};
Joseph Myers committed
115 116 117

/* Current position in real source file.  */

118
location_t input_location = UNKNOWN_LOCATION;
Joseph Myers committed
119 120 121

struct line_maps *line_table;

122 123 124 125 126 127 128
/* A stashed copy of "line_table" for use by selftest::line_table_test.
   This needs to be a global so that it can be a GC root, and thus
   prevent the stashed copy from being garbage-collected if the GC runs
   during a line_table_test.  */

struct line_maps *saved_line_table;

129 130 131 132 133
static fcache *fcache_tab;
static const size_t fcache_tab_size = 16;
static const size_t fcache_buffer_size = 4 * 1024;
static const size_t fcache_line_record_size = 100;

134 135
/* Expand the source location LOC into a human readable location.  If
   LOC resolves to a builtin location, the file name of the readable
136 137 138
   location is set to the string "<built-in>". If EXPANSION_POINT_P is
   TRUE and LOC is virtual, then it is resolved to the expansion
   point of the involved macro.  Otherwise, it is resolved to the
139 140 141 142 143 144 145 146
   spelling location of the token.

   When resolving to the spelling location of the token, if the
   resulting location is for a built-in location (that is, it has no
   associated line/column) in the context of a macro expansion, the
   returned location is the first one (while unwinding the macro
   location towards its expansion point) that is in real source
   code.  */
147 148 149 150

static expanded_location
expand_location_1 (source_location loc,
		   bool expansion_point_p)
Joseph Myers committed
151 152
{
  expanded_location xloc;
153
  const line_map_ordinary *map;
154
  enum location_resolution_kind lrk = LRK_MACRO_EXPANSION_POINT;
155 156 157 158 159 160 161
  tree block = NULL;

  if (IS_ADHOC_LOC (loc))
    {
      block = LOCATION_BLOCK (loc);
      loc = LOCATION_LOCUS (loc);
    }
162 163

  memset (&xloc, 0, sizeof (xloc));
164

165 166 167 168 169 170 171 172 173 174 175 176
  if (loc >= RESERVED_LOCATION_COUNT)
    {
      if (!expansion_point_p)
	{
	  /* We want to resolve LOC to its spelling location.

	     But if that spelling location is a reserved location that
	     appears in the context of a macro expansion (like for a
	     location for a built-in token), let's consider the first
	     location (toward the expansion point) that is not reserved;
	     that is, the first location that is in real source code.  */
	  loc = linemap_unwind_to_first_non_reserved_loc (line_table,
177
							  loc, NULL);
178 179 180 181 182 183
	  lrk = LRK_SPELLING_LOCATION;
	}
      loc = linemap_resolve_location (line_table, loc,
				      lrk, &map);
      xloc = linemap_expand_location (line_table, map, loc);
    }
184

185
  xloc.data = block;
Joseph Myers committed
186
  if (loc <= BUILTINS_LOCATION)
187 188
    xloc.file = loc == UNKNOWN_LOCATION ? NULL : _("<built-in>");

Joseph Myers committed
189 190
  return xloc;
}
191

192 193 194 195 196 197 198 199 200 201
/* Initialize the set of cache used for files accessed by caret
   diagnostic.  */

static void
diagnostic_file_cache_init (void)
{
  if (fcache_tab == NULL)
    fcache_tab = new fcache[fcache_tab_size];
}

202
/* Free the resources used by the set of cache used for files accessed
203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263
   by caret diagnostic.  */

void
diagnostic_file_cache_fini (void)
{
  if (fcache_tab)
    {
      delete [] (fcache_tab);
      fcache_tab = NULL;
    }
}

/* Return the total lines number that have been read so far by the
   line map (in the preprocessor) so far.  For languages like C++ that
   entirely preprocess the input file before starting to parse, this
   equals the actual number of lines of the file.  */

static size_t
total_lines_num (const char *file_path)
{
  size_t r = 0;
  source_location l = 0;
  if (linemap_get_file_highest_location (line_table, file_path, &l))
    {
      gcc_assert (l >= RESERVED_LOCATION_COUNT);
      expanded_location xloc = expand_location (l);
      r = xloc.line;
    }
  return r;
}

/* Lookup the cache used for the content of a given file accessed by
   caret diagnostic.  Return the found cached file, or NULL if no
   cached file was found.  */

static fcache*
lookup_file_in_cache_tab (const char *file_path)
{
  if (file_path == NULL)
    return NULL;

  diagnostic_file_cache_init ();

  /* This will contain the found cached file.  */
  fcache *r = NULL;
  for (unsigned i = 0; i < fcache_tab_size; ++i)
    {
      fcache *c = &fcache_tab[i];
      if (c->file_path && !strcmp (c->file_path, file_path))
	{
	  ++c->use_count;
	  r = c;
	}
    }

  if (r)
    ++r->use_count;

  return r;
}

264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287
/* Purge any mention of FILENAME from the cache of files used for
   printing source code.  For use in selftests when working
   with tempfiles.  */

void
diagnostics_file_cache_forcibly_evict_file (const char *file_path)
{
  gcc_assert (file_path);

  fcache *r = lookup_file_in_cache_tab (file_path);
  if (!r)
    /* Not found.  */
    return;

  r->file_path = NULL;
  if (r->fp)
    fclose (r->fp);
  r->fp = NULL;
  r->nb_read = 0;
  r->line_start_idx = 0;
  r->line_num = 0;
  r->line_record.truncate (0);
  r->use_count = 0;
  r->total_lines = 0;
288
  r->missing_trailing_newline = true;
289 290
}

291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339
/* Return the file cache that has been less used, recently, or the
   first empty one.  If HIGHEST_USE_COUNT is non-null,
   *HIGHEST_USE_COUNT is set to the highest use count of the entries
   in the cache table.  */

static fcache*
evicted_cache_tab_entry (unsigned *highest_use_count)
{
  diagnostic_file_cache_init ();

  fcache *to_evict = &fcache_tab[0];
  unsigned huc = to_evict->use_count;
  for (unsigned i = 1; i < fcache_tab_size; ++i)
    {
      fcache *c = &fcache_tab[i];
      bool c_is_empty = (c->file_path == NULL);

      if (c->use_count < to_evict->use_count
	  || (to_evict->file_path && c_is_empty))
	/* We evict C because it's either an entry with a lower use
	   count or one that is empty.  */
	to_evict = c;

      if (huc < c->use_count)
	huc = c->use_count;

      if (c_is_empty)
	/* We've reached the end of the cache; subsequent elements are
	   all empty.  */
	break;
    }

  if (highest_use_count)
    *highest_use_count = huc;

  return to_evict;
}

/* Create the cache used for the content of a given file to be
   accessed by caret diagnostic.  This cache is added to an array of
   cache and can be retrieved by lookup_file_in_cache_tab.  This
   function returns the created cache.  Note that only the last
   fcache_tab_size files are cached.  */

static fcache*
add_file_to_cache_tab (const char *file_path)
{

  FILE *fp = fopen (file_path, "r");
340 341
  if (fp == NULL)
    return NULL;
342 343 344 345 346 347 348 349 350 351 352 353 354 355 356

  unsigned highest_use_count = 0;
  fcache *r = evicted_cache_tab_entry (&highest_use_count);
  r->file_path = file_path;
  if (r->fp)
    fclose (r->fp);
  r->fp = fp;
  r->nb_read = 0;
  r->line_start_idx = 0;
  r->line_num = 0;
  r->line_record.truncate (0);
  /* Ensure that this cache entry doesn't get evicted next time
     add_file_to_cache_tab is called.  */
  r->use_count = ++highest_use_count;
  r->total_lines = total_lines_num (file_path);
357
  r->missing_trailing_newline = true;
358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381

  return r;
}

/* Lookup the cache used for the content of a given file accessed by
   caret diagnostic.  If no cached file was found, create a new cache
   for this file, add it to the array of cached file and return
   it.  */

static fcache*
lookup_or_add_file_to_cache_tab (const char *file_path)
{
  fcache *r = lookup_file_in_cache_tab (file_path);
  if (r == NULL)
    r = add_file_to_cache_tab (file_path);
  return r;
}

/* Default constructor for a cache of file used by caret
   diagnostic.  */

fcache::fcache ()
: use_count (0), file_path (NULL), fp (NULL), data (0),
  size (0), nb_read (0), line_start_idx (0), line_num (0),
382
  total_lines (0), missing_trailing_newline (true)
383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429
{
  line_record.create (0);
}

/* Destructor for a cache of file used by caret diagnostic.  */

fcache::~fcache ()
{
  if (fp)
    {
      fclose (fp);
      fp = NULL;
    }
  if (data)
    {
      XDELETEVEC (data);
      data = 0;
    }
  line_record.release ();
}

/* Returns TRUE iff the cache would need to be filled with data coming
   from the file.  That is, either the cache is empty or full or the
   current line is empty.  Note that if the cache is full, it would
   need to be extended and filled again.  */

static bool
needs_read (fcache *c)
{
  return (c->nb_read == 0
	  || c->nb_read == c->size
	  || (c->line_start_idx >= c->nb_read - 1));
}

/*  Return TRUE iff the cache is full and thus needs to be
    extended.  */

static bool
needs_grow (fcache *c)
{
  return c->nb_read == c->size;
}

/* Grow the cache if it needs to be extended.  */

static void
maybe_grow (fcache *c)
430
{
431 432 433 434
  if (!needs_grow (c))
    return;

  size_t size = c->size == 0 ? fcache_buffer_size : c->size * 2;
435
  c->data = XRESIZEVEC (char, c->data, size);
436 437
  c->size = size;
}
438

439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474
/*  Read more data into the cache.  Extends the cache if need be.
    Returns TRUE iff new data could be read.  */

static bool
read_data (fcache *c)
{
  if (feof (c->fp) || ferror (c->fp))
    return false;

  maybe_grow (c);

  char * from = c->data + c->nb_read;
  size_t to_read = c->size - c->nb_read;
  size_t nb_read = fread (from, 1, to_read, c->fp);

  if (ferror (c->fp))
    return false;

  c->nb_read += nb_read;
  return !!nb_read;
}

/* Read new data iff the cache needs to be filled with more data
   coming from the file FP.  Return TRUE iff the cache was filled with
   mode data.  */

static bool
maybe_read_data (fcache *c)
{
  if (!needs_read (c))
    return false;
  return read_data (c);
}

/* Read a new line from file FP, using C as a cache for the data
   coming from the file.  Upon successful completion, *LINE is set to
475 476
   the beginning of the line found.  *LINE points directly in the
   line cache and is only valid until the next call of get_next_line.
477 478 479
   *LINE_LEN is set to the length of the line.  Note that the line
   does not contain any terminal delimiter.  This function returns
   true if some data was read or process from the cache, false
480 481
   otherwise.  Note that subsequent calls to get_next_line might
   make the content of *LINE invalid.  */
482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499

static bool
get_next_line (fcache *c, char **line, ssize_t *line_len)
{
  /* Fill the cache with data to process.  */
  maybe_read_data (c);

  size_t remaining_size = c->nb_read - c->line_start_idx;
  if (remaining_size == 0)
    /* There is no more data to process.  */
    return false;

  char *line_start = c->data + c->line_start_idx;

  char *next_line_start = NULL;
  size_t len = 0;
  char *line_end = (char *) memchr (line_start, '\n', remaining_size);
  if (line_end == NULL)
500
    {
501 502 503 504 505 506 507 508 509 510 511 512 513 514 515
      /* We haven't found the end-of-line delimiter in the cache.
	 Fill the cache with more data from the file and look for the
	 '\n'.  */
      while (maybe_read_data (c))
	{
	  line_start = c->data + c->line_start_idx;
	  remaining_size = c->nb_read - c->line_start_idx;
	  line_end = (char *) memchr (line_start, '\n', remaining_size);
	  if (line_end != NULL)
	    {
	      next_line_start = line_end + 1;
	      break;
	    }
	}
      if (line_end == NULL)
516 517 518 519 520 521 522 523 524 525 526 527
	{
	  /* We've loadded all the file into the cache and still no
	     '\n'.  Let's say the line ends up at one byte passed the
	     end of the file.  This is to stay consistent with the case
	     of when the line ends up with a '\n' and line_end points to
	     that terminal '\n'.  That consistency is useful below in
	     the len calculation.  */
	  line_end = c->data + c->nb_read ;
	  c->missing_trailing_newline = true;
	}
      else
	c->missing_trailing_newline = false;
528
    }
529
  else
530 531 532 533
    {
      next_line_start = line_end + 1;
      c->missing_trailing_newline = false;
    }
534 535

  if (ferror (c->fp))
536
    return false;
537 538 539 540 541

  /* At this point, we've found the end of the of line.  It either
     points to the '\n' or to one byte after the last byte of the
     file.  */
  gcc_assert (line_end != NULL);
542

543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614
  len = line_end - line_start;

  if (c->line_start_idx < c->nb_read)
    *line = line_start;

  ++c->line_num;

  /* Before we update our line record, make sure the hint about the
     total number of lines of the file is correct.  If it's not, then
     we give up recording line boundaries from now on.  */
  bool update_line_record = true;
  if (c->line_num > c->total_lines)
    update_line_record = false;

    /* Now update our line record so that re-reading lines from the
     before c->line_start_idx is faster.  */
  if (update_line_record
      && c->line_record.length () < fcache_line_record_size)
    {
      /* If the file lines fits in the line record, we just record all
	 its lines ...*/
      if (c->total_lines <= fcache_line_record_size
	  && c->line_num > c->line_record.length ())
	c->line_record.safe_push (fcache::line_info (c->line_num,
						 c->line_start_idx,
						 line_end - c->data));
      else if (c->total_lines > fcache_line_record_size)
	{
	  /* ... otherwise, we just scale total_lines down to
	     (fcache_line_record_size lines.  */
	  size_t n = (c->line_num * fcache_line_record_size) / c->total_lines;
	  if (c->line_record.length () == 0
	      || n >= c->line_record.length ())
	    c->line_record.safe_push (fcache::line_info (c->line_num,
						     c->line_start_idx,
						     line_end - c->data));
	}
    }

  /* Update c->line_start_idx so that it points to the next line to be
     read.  */
  if (next_line_start)
    c->line_start_idx = next_line_start - c->data;
  else
    /* We didn't find any terminal '\n'.  Let's consider that the end
       of line is the end of the data in the cache.  The next
       invocation of get_next_line will either read more data from the
       underlying file or return false early because we've reached the
       end of the file.  */
    c->line_start_idx = c->nb_read;

  *line_len = len;

  return true;
}

/* Consume the next bytes coming from the cache (or from its
   underlying file if there are remaining unread bytes in the file)
   until we reach the next end-of-line (or end-of-file).  There is no
   copying from the cache involved.  Return TRUE upon successful
   completion.  */

static bool
goto_next_line (fcache *cache)
{
  char *l;
  ssize_t len;

  return get_next_line (cache, &l, &len);
}

/* Read an arbitrary line number LINE_NUM from the file cached in C.
615 616 617 618
   If the line was read successfully, *LINE points to the beginning
   of the line in the file cache and *LINE_LEN is the length of the
   line.  *LINE is not nul-terminated, but may contain zero bytes.
   *LINE is only valid until the next call of read_line_num.
619 620 621 622
   This function returns bool if a line was read.  */

static bool
read_line_num (fcache *c, size_t line_num,
623
	       char **line, ssize_t *line_len)
624 625 626 627
{
  gcc_assert (line_num > 0);

  if (line_num <= c->line_num)
628
    {
629 630 631
      /* We've been asked to read lines that are before c->line_num.
	 So lets use our line record (if it's not empty) to try to
	 avoid re-reading the file from the beginning again.  */
632

633
      if (c->line_record.is_empty ())
634
	{
635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674
	  c->line_start_idx = 0;
	  c->line_num = 0;
	}
      else
	{
	  fcache::line_info *i = NULL;
	  if (c->total_lines <= fcache_line_record_size)
	    {
	      /* In languages where the input file is not totally
		 preprocessed up front, the c->total_lines hint
		 can be smaller than the number of lines of the
		 file.  In that case, only the first
		 c->total_lines have been recorded.

		 Otherwise, the first c->total_lines we've read have
		 their start/end recorded here.  */
	      i = (line_num <= c->total_lines)
		? &c->line_record[line_num - 1]
		: &c->line_record[c->total_lines - 1];
	      gcc_assert (i->line_num <= line_num);
	    }
	  else
	    {
	      /*  So the file had more lines than our line record
		  size.  Thus the number of lines we've recorded has
		  been scaled down to fcache_line_reacord_size.  Let's
		  pick the start/end of the recorded line that is
		  closest to line_num.  */
	      size_t n = (line_num <= c->total_lines)
		? line_num * fcache_line_record_size / c->total_lines
		: c ->line_record.length () - 1;
	      if (n < c->line_record.length ())
		{
		  i = &c->line_record[n];
		  gcc_assert (i->line_num <= line_num);
		}
	    }

	  if (i && i->line_num == line_num)
	    {
675 676 677
	      /* We have the start/end of the line.  */
	      *line = c->data + i->start_pos;
	      *line_len = i->end_pos - i->start_pos;
678 679 680 681 682 683 684 685 686 687 688 689 690
	      return true;
	    }

	  if (i)
	    {
	      c->line_start_idx = i->start_pos;
	      c->line_num = i->line_num - 1;
	    }
	  else
	    {
	      c->line_start_idx = 0;
	      c->line_num = 0;
	    }
691 692
	}
    }
693 694 695 696 697 698 699 700 701

  /*  Let's walk from line c->line_num up to line_num - 1, without
      copying any line.  */
  while (c->line_num < line_num - 1)
    if (!goto_next_line (c))
      return false;

  /* The line we want is the next one.  Let's read and copy it back to
     the caller.  */
702
  return get_next_line (c, line, line_len);
703 704
}

705 706 707 708 709 710
/* Return the physical source line that corresponds to FILE_PATH/LINE.
   The line is not nul-terminated.  The returned pointer is only
   valid until the next call of location_get_source_line.
   Note that the line can contain several null characters,
   so LINE_LEN, if non-null, points to the actual length of the line.
   If the function fails, NULL is returned.  */
711 712

const char *
713
location_get_source_line (const char *file_path, int line,
714
			  int *line_len)
715
{
716
  char *buffer = NULL;
717
  ssize_t len;
718

719
  if (line == 0)
720 721
    return NULL;

722
  fcache *c = lookup_or_add_file_to_cache_tab (file_path);
723 724 725
  if (c == NULL)
    return NULL;

726
  bool read = read_line_num (c, line, &buffer, &len);
727

728 729
  if (read && line_len)
    *line_len = len;
730

731
  return read ? buffer : NULL;
732 733
}

734 735 736 737 738 739 740 741 742 743 744 745 746 747
/* Determine if FILE_PATH missing a trailing newline on its final line.
   Only valid to call once all of the file has been loaded, by
   requesting a line number beyond the end of the file.  */

bool
location_missing_trailing_newline (const char *file_path)
{
  fcache *c = lookup_or_add_file_to_cache_tab (file_path);
  if (c == NULL)
    return false;

  return c->missing_trailing_newline;
}

748 749 750 751 752 753 754 755 756 757
/* Test if the location originates from the spelling location of a
   builtin-tokens.  That is, return TRUE if LOC is a (possibly
   virtual) location of a built-in token that appears in the expansion
   list of a macro.  Please note that this function also works on
   tokens that result from built-in tokens.  For instance, the
   function would return true if passed a token "4" that is the result
   of the expansion of the built-in __LINE__ macro.  */
bool
is_location_from_builtin_token (source_location loc)
{
758
  const line_map_ordinary *map = NULL;
759 760 761 762 763
  loc = linemap_resolve_location (line_table, loc,
				  LRK_SPELLING_LOCATION, &map);
  return loc == BUILTINS_LOCATION;
}

764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783
/* Expand the source location LOC into a human readable location.  If
   LOC is virtual, it resolves to the expansion point of the involved
   macro.  If LOC resolves to a builtin location, the file name of the
   readable location is set to the string "<built-in>".  */

expanded_location
expand_location (source_location loc)
{
  return expand_location_1 (loc, /*expansion_point_p=*/true);
}

/* Expand the source location LOC into a human readable location.  If
   LOC is virtual, it resolves to the expansion location of the
   relevant macro.  If LOC resolves to a builtin location, the file
   name of the readable location is set to the string
   "<built-in>".  */

expanded_location
expand_location_to_spelling_point (source_location loc)
{
784
  return expand_location_1 (loc, /*expansion_point_p=*/false);
785 786
}

787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802
/* The rich_location class within libcpp requires a way to expand
   source_location instances, and relies on the client code
   providing a symbol named
     linemap_client_expand_location_to_spelling_point
   to do this.

   This is the implementation for libcommon.a (all host binaries),
   which simply calls into expand_location_to_spelling_point.  */

expanded_location
linemap_client_expand_location_to_spelling_point (source_location loc)
{
  return expand_location_to_spelling_point (loc);
}


803 804 805
/* If LOCATION is in a system header and if it is a virtual location for
   a token coming from the expansion of a macro, unwind it to the
   location of the expansion point of the macro.  Otherwise, just return
806 807 808
   LOCATION.

   This is used for instance when we want to emit diagnostics about a
809 810 811 812 813
   token that may be located in a macro that is itself defined in a
   system header, for example, for the NULL macro.  In such a case, if
   LOCATION were passed directly to diagnostic functions such as
   warning_at, the diagnostic would be suppressed (unless
   -Wsystem-headers).  */
814 815 816 817 818 819 820 821 822 823

source_location
expansion_point_location_if_in_system_header (source_location location)
{
  if (in_system_header_at (location))
    location = linemap_resolve_location (line_table, location,
					 LRK_MACRO_EXPANSION_POINT,
					 NULL);
  return location;
}
824

825 826 827 828 829 830 831 832 833 834
/* If LOCATION is a virtual location for a token coming from the expansion
   of a macro, unwind to the location of the expansion point of the macro.  */

source_location
expansion_point_location (source_location location)
{
  return linemap_resolve_location (line_table, location,
				   LRK_MACRO_EXPANSION_POINT, NULL);
}

835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853
/* Construct a location with caret at CARET, ranging from START to
   finish e.g.

                 11111111112
        12345678901234567890
     522
     523   return foo + bar;
                  ~~~~^~~~~
     524

   The location's caret is at the "+", line 523 column 15, but starts
   earlier, at the "f" of "foo" at column 11.  The finish is at the "r"
   of "bar" at column 19.  */

location_t
make_location (location_t caret, location_t start, location_t finish)
{
  location_t pure_loc = get_pure_location (caret);
  source_range src_range;
854 855
  src_range.m_start = get_start (start);
  src_range.m_finish = get_finish (finish);
856 857 858 859 860 861 862
  location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
						   pure_loc,
						   src_range,
						   NULL);
  return combined_loc;
}

863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895
#define ONE_K 1024
#define ONE_M (ONE_K * ONE_K)

/* Display a number as an integer multiple of either:
   - 1024, if said integer is >= to 10 K (in base 2)
   - 1024 * 1024, if said integer is >= 10 M in (base 2)
 */
#define SCALE(x) ((unsigned long) ((x) < 10 * ONE_K \
		  ? (x) \
		  : ((x) < 10 * ONE_M \
		     ? (x) / ONE_K \
		     : (x) / ONE_M)))

/* For a given integer, display either:
   - the character 'k', if the number is higher than 10 K (in base 2)
     but strictly lower than 10 M (in base 2)
   - the character 'M' if the number is higher than 10 M (in base2)
   - the charcter ' ' if the number is strictly lower  than 10 K  */
#define STAT_LABEL(x) ((x) < 10 * ONE_K ? ' ' : ((x) < 10 * ONE_M ? 'k' : 'M'))

/* Display an integer amount as multiple of 1K or 1M (in base 2).
   Display the correct unit (either k, M, or ' ') after the amout, as
   well.  */
#define FORMAT_AMOUNT(size) SCALE (size), STAT_LABEL (size)

/* Dump statistics to stderr about the memory usage of the line_table
   set of line maps.  This also displays some statistics about macro
   expansion.  */

void
dump_line_table_statistics (void)
{
  struct linemap_stats s;
896
  long total_used_map_size,
897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914
    macro_maps_size,
    total_allocated_map_size;

  memset (&s, 0, sizeof (s));

  linemap_get_statistics (line_table, &s);

  macro_maps_size = s.macro_maps_used_size
    + s.macro_maps_locations_size;

  total_allocated_map_size = s.ordinary_maps_allocated_size
    + s.macro_maps_allocated_size
    + s.macro_maps_locations_size;

  total_used_map_size = s.ordinary_maps_used_size
    + s.macro_maps_used_size
    + s.macro_maps_locations_size;

915
  fprintf (stderr, "Number of expanded macros:                     %5ld\n",
916 917
           s.num_expanded_macros);
  if (s.num_expanded_macros != 0)
918
    fprintf (stderr, "Average number of tokens per macro expansion:  %5ld\n",
919 920 921 922
             s.num_macro_tokens / s.num_expanded_macros);
  fprintf (stderr,
           "\nLine Table allocations during the "
           "compilation process\n");
923
  fprintf (stderr, "Number of ordinary maps used:        %5ld%c\n",
924 925
           SCALE (s.num_ordinary_maps_used),
           STAT_LABEL (s.num_ordinary_maps_used));
926
  fprintf (stderr, "Ordinary map used size:              %5ld%c\n",
927 928
           SCALE (s.ordinary_maps_used_size),
           STAT_LABEL (s.ordinary_maps_used_size));
929
  fprintf (stderr, "Number of ordinary maps allocated:   %5ld%c\n",
930 931
           SCALE (s.num_ordinary_maps_allocated),
           STAT_LABEL (s.num_ordinary_maps_allocated));
932
  fprintf (stderr, "Ordinary maps allocated size:        %5ld%c\n",
933 934
           SCALE (s.ordinary_maps_allocated_size),
           STAT_LABEL (s.ordinary_maps_allocated_size));
935
  fprintf (stderr, "Number of macro maps used:           %5ld%c\n",
936 937
           SCALE (s.num_macro_maps_used),
           STAT_LABEL (s.num_macro_maps_used));
938
  fprintf (stderr, "Macro maps used size:                %5ld%c\n",
939 940
           SCALE (s.macro_maps_used_size),
           STAT_LABEL (s.macro_maps_used_size));
941
  fprintf (stderr, "Macro maps locations size:           %5ld%c\n",
942 943
           SCALE (s.macro_maps_locations_size),
           STAT_LABEL (s.macro_maps_locations_size));
944
  fprintf (stderr, "Macro maps size:                     %5ld%c\n",
945 946
           SCALE (macro_maps_size),
           STAT_LABEL (macro_maps_size));
947
  fprintf (stderr, "Duplicated maps locations size:      %5ld%c\n",
948 949
           SCALE (s.duplicated_macro_maps_locations_size),
           STAT_LABEL (s.duplicated_macro_maps_locations_size));
950
  fprintf (stderr, "Total allocated maps size:           %5ld%c\n",
951 952
           SCALE (total_allocated_map_size),
           STAT_LABEL (total_allocated_map_size));
953
  fprintf (stderr, "Total used maps size:                %5ld%c\n",
954 955
           SCALE (total_used_map_size),
           STAT_LABEL (total_used_map_size));
956 957 958 959 960
  fprintf (stderr, "Ad-hoc table size:                   %5ld%c\n",
	   SCALE (s.adhoc_table_size),
	   STAT_LABEL (s.adhoc_table_size));
  fprintf (stderr, "Ad-hoc table entries used:           %5ld\n",
	   s.adhoc_table_entries_used);
961 962 963 964
  fprintf (stderr, "optimized_ranges: %i\n",
	   line_table->num_optimized_ranges);
  fprintf (stderr, "unoptimized_ranges: %i\n",
	   line_table->num_unoptimized_ranges);
965

966 967
  fprintf (stderr, "\n");
}
968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994

/* Get location one beyond the final location in ordinary map IDX.  */

static source_location
get_end_location (struct line_maps *set, unsigned int idx)
{
  if (idx == LINEMAPS_ORDINARY_USED (set) - 1)
    return set->highest_location;

  struct line_map *next_map = LINEMAPS_ORDINARY_MAP_AT (set, idx + 1);
  return MAP_START_LOCATION (next_map);
}

/* Helper function for write_digit_row.  */

static void
write_digit (FILE *stream, int digit)
{
  fputc ('0' + (digit % 10), stream);
}

/* Helper function for dump_location_info.
   Write a row of numbers to STREAM, numbering a source line,
   giving the units, tens, hundreds etc of the column number.  */

static void
write_digit_row (FILE *stream, int indent,
995
		 const line_map_ordinary *map,
996 997 998 999 1000 1001
		 source_location loc, int max_col, int divisor)
{
  fprintf (stream, "%*c", indent, ' ');
  fprintf (stream, "|");
  for (int column = 1; column < max_col; column++)
    {
1002
      source_location column_loc = loc + (column << map->m_range_bits);
1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047
      write_digit (stream, column_loc / divisor);
    }
  fprintf (stream, "\n");
}

/* Write a half-closed (START) / half-open (END) interval of
   source_location to STREAM.  */

static void
dump_location_range (FILE *stream,
		     source_location start, source_location end)
{
  fprintf (stream,
	   "  source_location interval: %u <= loc < %u\n",
	   start, end);
}

/* Write a labelled description of a half-closed (START) / half-open (END)
   interval of source_location to STREAM.  */

static void
dump_labelled_location_range (FILE *stream,
			      const char *name,
			      source_location start, source_location end)
{
  fprintf (stream, "%s\n", name);
  dump_location_range (stream, start, end);
  fprintf (stream, "\n");
}

/* Write a visualization of the locations in the line_table to STREAM.  */

void
dump_location_info (FILE *stream)
{
  /* Visualize the reserved locations.  */
  dump_labelled_location_range (stream, "RESERVED LOCATIONS",
				0, RESERVED_LOCATION_COUNT);

  /* Visualize the ordinary line_map instances, rendering the sources. */
  for (unsigned int idx = 0; idx < LINEMAPS_ORDINARY_USED (line_table); idx++)
    {
      source_location end_location = get_end_location (line_table, idx);
      /* half-closed: doesn't include this one. */

1048 1049
      const line_map_ordinary *map
	= LINEMAPS_ORDINARY_MAP_AT (line_table, idx);
1050 1051 1052 1053 1054 1055
      fprintf (stream, "ORDINARY MAP: %i\n", idx);
      dump_location_range (stream,
			   MAP_START_LOCATION (map), end_location);
      fprintf (stream, "  file: %s\n", ORDINARY_MAP_FILE_NAME (map));
      fprintf (stream, "  starting at line: %i\n",
	       ORDINARY_MAP_STARTING_LINE_NUMBER (map));
1056 1057
      fprintf (stream, "  column and range bits: %i\n",
	       map->m_column_and_range_bits);
1058
      fprintf (stream, "  column bits: %i\n",
1059 1060 1061
	       map->m_column_and_range_bits - map->m_range_bits);
      fprintf (stream, "  range bits: %i\n",
	       map->m_range_bits);
1062 1063 1064 1065

      /* Render the span of source lines that this "map" covers.  */
      for (source_location loc = MAP_START_LOCATION (map);
	   loc < end_location;
1066
	   loc += (1 << map->m_range_bits) )
1067
	{
1068 1069
	  gcc_assert (pure_location_p (line_table, loc) );

1070 1071 1072 1073 1074 1075 1076 1077
	  expanded_location exploc
	    = linemap_expand_location (line_table, map, loc);

	  if (0 == exploc.column)
	    {
	      /* Beginning of a new source line: draw the line.  */

	      int line_size;
1078 1079 1080
	      const char *line_text = location_get_source_line (exploc.file,
								exploc.line,
								&line_size);
1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092
	      if (!line_text)
		break;
	      fprintf (stream,
		       "%s:%3i|loc:%5i|%.*s\n",
		       exploc.file, exploc.line,
		       loc,
		       line_size, line_text);

	      /* "loc" is at column 0, which means "the whole line".
		 Render the locations *within* the line, by underlining
		 it, showing the source_location numeric values
		 at each column.  */
1093
	      int max_col = (1 << map->m_column_and_range_bits) - 1;
1094 1095 1096 1097 1098 1099 1100
	      if (max_col > line_size)
		max_col = line_size + 1;

	      int indent = 14 + strlen (exploc.file);

	      /* Thousands.  */
	      if (end_location > 999)
1101
		write_digit_row (stream, indent, map, loc, max_col, 1000);
1102 1103 1104

	      /* Hundreds.  */
	      if (end_location > 99)
1105
		write_digit_row (stream, indent, map, loc, max_col, 100);
1106 1107

	      /* Tens.  */
1108
	      write_digit_row (stream, indent, map, loc, max_col, 10);
1109 1110

	      /* Units.  */
1111
	      write_digit_row (stream, indent, map, loc, max_col, 1);
1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132
	    }
	}
      fprintf (stream, "\n");
    }

  /* Visualize unallocated values.  */
  dump_labelled_location_range (stream, "UNALLOCATED LOCATIONS",
				line_table->highest_location,
				LINEMAPS_MACRO_LOWEST_LOCATION (line_table));

  /* Visualize the macro line_map instances, rendering the sources. */
  for (unsigned int i = 0; i < LINEMAPS_MACRO_USED (line_table); i++)
    {
      /* Each macro map that is allocated owns source_location values
	 that are *lower* that the one before them.
	 Hence it's meaningful to view them either in order of ascending
	 source locations, or in order of ascending macro map index.  */
      const bool ascending_source_locations = true;
      unsigned int idx = (ascending_source_locations
			  ? (LINEMAPS_MACRO_USED (line_table) - (i + 1))
			  : i);
1133
      const line_map_macro *map = LINEMAPS_MACRO_MAP_AT (line_table, idx);
1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199
      fprintf (stream, "MACRO %i: %s (%u tokens)\n",
	       idx,
	       linemap_map_get_macro_name (map),
	       MACRO_MAP_NUM_MACRO_TOKENS (map));
      dump_location_range (stream,
			   map->start_location,
			   (map->start_location
			    + MACRO_MAP_NUM_MACRO_TOKENS (map)));
      inform (MACRO_MAP_EXPANSION_POINT_LOCATION (map),
	      "expansion point is location %i",
	      MACRO_MAP_EXPANSION_POINT_LOCATION (map));
      fprintf (stream, "  map->start_location: %u\n",
	       map->start_location);

      fprintf (stream, "  macro_locations:\n");
      for (unsigned int i = 0; i < MACRO_MAP_NUM_MACRO_TOKENS (map); i++)
	{
	  source_location x = MACRO_MAP_LOCATIONS (map)[2 * i];
	  source_location y = MACRO_MAP_LOCATIONS (map)[(2 * i) + 1];

	  /* linemap_add_macro_token encodes token numbers in an expansion
	     by putting them after MAP_START_LOCATION. */

	  /* I'm typically seeing 4 uninitialized entries at the end of
	     0xafafafaf.
	     This appears to be due to macro.c:replace_args
	     adding 2 extra args for padding tokens; presumably there may
	     be a leading and/or trailing padding token injected,
	     each for 2 more location slots.
	     This would explain there being up to 4 source_locations slots
	     that may be uninitialized.  */

	  fprintf (stream, "    %u: %u, %u\n",
		   i,
		   x,
		   y);
	  if (x == y)
	    {
	      if (x < MAP_START_LOCATION (map))
		inform (x, "token %u has x-location == y-location == %u", i, x);
	      else
		fprintf (stream,
			 "x-location == y-location == %u encodes token # %u\n",
			 x, x - MAP_START_LOCATION (map));
		}
	  else
	    {
	      inform (x, "token %u has x-location == %u", i, x);
	      inform (x, "token %u has y-location == %u", i, y);
	    }
	}
      fprintf (stream, "\n");
    }

  /* It appears that MAX_SOURCE_LOCATION itself is never assigned to a
     macro map, presumably due to an off-by-one error somewhere
     between the logic in linemap_enter_macro and
     LINEMAPS_MACRO_LOWEST_LOCATION.  */
  dump_labelled_location_range (stream, "MAX_SOURCE_LOCATION",
				MAX_SOURCE_LOCATION,
				MAX_SOURCE_LOCATION + 1);

  /* Visualize ad-hoc values.  */
  dump_labelled_location_range (stream, "AD-HOC LOCATIONS",
				MAX_SOURCE_LOCATION + 1, UINT_MAX);
}
David Malcolm committed
1200

1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409
/* string_concat's constructor.  */

string_concat::string_concat (int num, location_t *locs)
  : m_num (num)
{
  m_locs = ggc_vec_alloc <location_t> (num);
  for (int i = 0; i < num; i++)
    m_locs[i] = locs[i];
}

/* string_concat_db's constructor.  */

string_concat_db::string_concat_db ()
{
  m_table = hash_map <location_hash, string_concat *>::create_ggc (64);
}

/* Record that a string concatenation occurred, covering NUM
   string literal tokens.  LOCS is an array of size NUM, containing the
   locations of the tokens.  A copy of LOCS is taken.  */

void
string_concat_db::record_string_concatenation (int num, location_t *locs)
{
  gcc_assert (num > 1);
  gcc_assert (locs);

  location_t key_loc = get_key_loc (locs[0]);

  string_concat *concat
    = new (ggc_alloc <string_concat> ()) string_concat (num, locs);
  m_table->put (key_loc, concat);
}

/* Determine if LOC was the location of the the initial token of a
   concatenation of string literal tokens.
   If so, *OUT_NUM is written to with the number of tokens, and
   *OUT_LOCS with the location of an array of locations of the
   tokens, and return true.  *OUT_LOCS is a borrowed pointer to
   storage owned by the string_concat_db.
   Otherwise, return false.  */

bool
string_concat_db::get_string_concatenation (location_t loc,
					    int *out_num,
					    location_t **out_locs)
{
  gcc_assert (out_num);
  gcc_assert (out_locs);

  location_t key_loc = get_key_loc (loc);

  string_concat **concat = m_table->get (key_loc);
  if (!concat)
    return false;

  *out_num = (*concat)->m_num;
  *out_locs =(*concat)->m_locs;
  return true;
}

/* Internal function.  Canonicalize LOC into a form suitable for
   use as a key within the database, stripping away macro expansion,
   ad-hoc information, and range information, using the location of
   the start of LOC within an ordinary linemap.  */

location_t
string_concat_db::get_key_loc (location_t loc)
{
  loc = linemap_resolve_location (line_table, loc, LRK_SPELLING_LOCATION,
				  NULL);

  loc = get_range_from_loc (line_table, loc).m_start;

  return loc;
}

/* Helper class for use within get_substring_ranges_for_loc.
   An vec of cpp_string with responsibility for releasing all of the
   str->text for each str in the vector.  */

class auto_cpp_string_vec :  public auto_vec <cpp_string>
{
 public:
  auto_cpp_string_vec (int alloc)
    : auto_vec <cpp_string> (alloc) {}

  ~auto_cpp_string_vec ()
  {
    /* Clean up the copies within this vec.  */
    int i;
    cpp_string *str;
    FOR_EACH_VEC_ELT (*this, i, str)
      free (const_cast <unsigned char *> (str->text));
  }
};

/* Attempt to populate RANGES with source location information on the
   individual characters within the string literal found at STRLOC.
   If CONCATS is non-NULL, then any string literals that the token at
   STRLOC  was concatenated with are also added to RANGES.

   Return NULL if successful, or an error message if any errors occurred (in
   which case RANGES may be only partially populated and should not
   be used).

   This is implemented by re-parsing the relevant source line(s).  */

static const char *
get_substring_ranges_for_loc (cpp_reader *pfile,
			      string_concat_db *concats,
			      location_t strloc,
			      enum cpp_ttype type,
			      cpp_substring_ranges &ranges)
{
  gcc_assert (pfile);

  if (strloc == UNKNOWN_LOCATION)
    return "unknown location";

  /* If string concatenation has occurred at STRLOC, get the locations
     of all of the literal tokens making up the compound string.
     Otherwise, just use STRLOC.  */
  int num_locs = 1;
  location_t *strlocs = &strloc;
  if (concats)
    concats->get_string_concatenation (strloc, &num_locs, &strlocs);

  auto_cpp_string_vec strs (num_locs);
  auto_vec <cpp_string_location_reader> loc_readers (num_locs);
  for (int i = 0; i < num_locs; i++)
    {
      /* Get range of strloc.  We will use it to locate the start and finish
	 of the literal token within the line.  */
      source_range src_range = get_range_from_loc (line_table, strlocs[i]);

      if (src_range.m_start >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
	/* If the string is within a macro expansion, we can't get at the
	   end location.  */
	return "macro expansion";

      if (src_range.m_start >= LINE_MAP_MAX_LOCATION_WITH_COLS)
	/* If so, we can't reliably determine where the token started within
	   its line.  */
	return "range starts after LINE_MAP_MAX_LOCATION_WITH_COLS";

      if (src_range.m_finish >= LINE_MAP_MAX_LOCATION_WITH_COLS)
	/* If so, we can't reliably determine where the token finished within
	   its line.  */
	return "range ends after LINE_MAP_MAX_LOCATION_WITH_COLS";

      expanded_location start
	= expand_location_to_spelling_point (src_range.m_start);
      expanded_location finish
	= expand_location_to_spelling_point (src_range.m_finish);
      if (start.file != finish.file)
	return "range endpoints are in different files";
      if (start.line != finish.line)
	return "range endpoints are on different lines";
      if (start.column > finish.column)
	return "range endpoints are reversed";

      int line_width;
      const char *line = location_get_source_line (start.file, start.line,
						   &line_width);
      if (line == NULL)
	return "unable to read source line";

      /* Determine the location of the literal (including quotes
	 and leading prefix chars, such as the 'u' in a u""
	 token).  */
      const char *literal = line + start.column - 1;
      int literal_length = finish.column - start.column + 1;

      gcc_assert (line_width >= (start.column - 1 + literal_length));
      cpp_string from;
      from.len = literal_length;
      /* Make a copy of the literal, to avoid having to rely on
	 the lifetime of the copy of the line within the cache.
	 This will be released by the auto_cpp_string_vec dtor.  */
      from.text = XDUPVEC (unsigned char, literal, literal_length);
      strs.safe_push (from);

      /* For very long lines, a new linemap could have started
	 halfway through the token.
	 Ensure that the loc_reader uses the linemap of the
	 *end* of the token for its start location.  */
      const line_map_ordinary *final_ord_map;
      linemap_resolve_location (line_table, src_range.m_finish,
				LRK_MACRO_EXPANSION_POINT, &final_ord_map);
      location_t start_loc
	= linemap_position_for_line_and_column (line_table, final_ord_map,
						start.line, start.column);

      cpp_string_location_reader loc_reader (start_loc, line_table);
      loc_readers.safe_push (loc_reader);
    }

  /* Rerun cpp_interpret_string, or rather, a modified version of it.  */
  const char *err = cpp_interpret_string_ranges (pfile, strs.address (),
						 loc_readers.address (),
						 num_locs, &ranges, type);
  if (err)
    return err;

  /* Success: "ranges" should now contain information on the string.  */
  return NULL;
}

1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420
/* Attempt to populate *OUT_LOC with source location information on the
   given characters within the string literal found at STRLOC.
   CARET_IDX, START_IDX, and END_IDX refer to offsets within the execution
   character set.

   For example, given CARET_IDX = 4, START_IDX = 3, END_IDX  = 7
   and string literal "012345\n789"
   *OUT_LOC is written to with:
     "012345\n789"
         ~^~~~~

1421 1422 1423 1424 1425 1426 1427 1428 1429 1430
   If CONCATS is non-NULL, then any string literals that the token at
   STRLOC was concatenated with are also considered.

   This is implemented by re-parsing the relevant source line(s).

   Return NULL if successful, or an error message if any errors occurred.
   Error messages are intended for GCC developers (to help debugging) rather
   than for end-users.  */

const char *
1431 1432 1433 1434 1435 1436 1437 1438
get_source_location_for_substring (cpp_reader *pfile,
				   string_concat_db *concats,
				   location_t strloc,
				   enum cpp_ttype type,
				   int caret_idx, int start_idx, int end_idx,
				   source_location *out_loc)
{
  gcc_checking_assert (caret_idx >= 0);
1439 1440
  gcc_checking_assert (start_idx >= 0);
  gcc_checking_assert (end_idx >= 0);
1441
  gcc_assert (out_loc);
1442 1443 1444 1445 1446 1447 1448

  cpp_substring_ranges ranges;
  const char *err
    = get_substring_ranges_for_loc (pfile, concats, strloc, type, ranges);
  if (err)
    return err;

1449 1450
  if (caret_idx >= ranges.get_num_ranges ())
    return "caret_idx out of range";
1451 1452 1453 1454 1455
  if (start_idx >= ranges.get_num_ranges ())
    return "start_idx out of range";
  if (end_idx >= ranges.get_num_ranges ())
    return "end_idx out of range";

1456 1457 1458 1459 1460 1461
  *out_loc = make_location (ranges.get_range (caret_idx).m_start,
			    ranges.get_range (start_idx).m_start,
			    ranges.get_range (end_idx).m_finish);
  return NULL;
}

1462 1463 1464 1465 1466 1467
#if CHECKING_P

namespace selftest {

/* Selftests of location handling.  */

1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500
/* Attempt to populate *OUT_RANGE with source location information on the
   given character within the string literal found at STRLOC.
   CHAR_IDX refers to an offset within the execution character set.
   If CONCATS is non-NULL, then any string literals that the token at
   STRLOC was concatenated with are also considered.

   This is implemented by re-parsing the relevant source line(s).

   Return NULL if successful, or an error message if any errors occurred.
   Error messages are intended for GCC developers (to help debugging) rather
   than for end-users.  */

static const char *
get_source_range_for_char (cpp_reader *pfile,
			   string_concat_db *concats,
			   location_t strloc,
			   enum cpp_ttype type,
			   int char_idx,
			   source_range *out_range)
{
  gcc_checking_assert (char_idx >= 0);
  gcc_assert (out_range);

  cpp_substring_ranges ranges;
  const char *err
    = get_substring_ranges_for_loc (pfile, concats, strloc, type, ranges);
  if (err)
    return err;

  if (char_idx >= ranges.get_num_ranges ())
    return "char_idx out of range";

  *out_range = ranges.get_range (char_idx);
1501 1502 1503
  return NULL;
}

1504
/* As get_source_range_for_char, but write to *OUT the number
1505 1506
   of ranges that are available.  */

1507
static const char *
1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526
get_num_source_ranges_for_substring (cpp_reader *pfile,
				     string_concat_db *concats,
				     location_t strloc,
				     enum cpp_ttype type,
				     int *out)
{
  gcc_assert (out);

  cpp_substring_ranges ranges;
  const char *err
    = get_substring_ranges_for_loc (pfile, concats, strloc, type, ranges);

  if (err)
    return err;

  *out = ranges.get_num_ranges ();
  return NULL;
}

David Malcolm committed
1527 1528
/* Selftests of location handling.  */

1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554
/* Helper function for verifying location data: when location_t
   values are > LINE_MAP_MAX_LOCATION_WITH_COLS, they are treated
   as having column 0.  */

static bool
should_have_column_data_p (location_t loc)
{
  if (IS_ADHOC_LOC (loc))
    loc = get_location_from_adhoc_loc (line_table, loc);
  if (loc > LINE_MAP_MAX_LOCATION_WITH_COLS)
    return false;
  return true;
}

/* Selftest for should_have_column_data_p.  */

static void
test_should_have_column_data_p ()
{
  ASSERT_TRUE (should_have_column_data_p (RESERVED_LOCATION_COUNT));
  ASSERT_TRUE
    (should_have_column_data_p (LINE_MAP_MAX_LOCATION_WITH_COLS));
  ASSERT_FALSE
    (should_have_column_data_p (LINE_MAP_MAX_LOCATION_WITH_COLS + 1));
}

David Malcolm committed
1555 1556 1557 1558 1559 1560 1561 1562 1563
/* Verify the result of LOCATION_FILE/LOCATION_LINE/LOCATION_COLUMN
   on LOC.  */

static void
assert_loceq (const char *exp_filename, int exp_linenum, int exp_colnum,
	      location_t loc)
{
  ASSERT_STREQ (exp_filename, LOCATION_FILE (loc));
  ASSERT_EQ (exp_linenum, LOCATION_LINE (loc));
1564 1565 1566 1567 1568 1569 1570 1571 1572
  /* If location_t values are sufficiently high, then column numbers
     will be unavailable and LOCATION_COLUMN (loc) will be 0.
     When close to the threshold, column numbers *may* be present: if
     the final linemap before the threshold contains a line that straddles
     the threshold, locations in that line have column information.  */
  if (should_have_column_data_p (loc))
    ASSERT_EQ (exp_colnum, LOCATION_COLUMN (loc));
}

1573 1574
/* Various selftests involve constructing a line table and one or more
   line maps within it.
1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599

   For maximum test coverage we want to run these tests with a variety
   of situations:
   - line_table->default_range_bits: some frontends use a non-zero value
   and others use zero
   - the fallback modes within line-map.c: there are various threshold
   values for source_location/location_t beyond line-map.c changes
   behavior (disabling of the range-packing optimization, disabling
   of column-tracking).  We can exercise these by starting the line_table
   at interesting values at or near these thresholds.

   The following struct describes a particular case within our test
   matrix.  */

struct line_table_case
{
  line_table_case (int default_range_bits, int base_location)
  : m_default_range_bits (default_range_bits),
    m_base_location (base_location)
  {}

  int m_default_range_bits;
  int m_base_location;
};

1600 1601
/* Constructor.  Store the old value of line_table, and create a new
   one, using sane defaults.  */
1602

1603
line_table_test::line_table_test ()
1604
{
1605 1606 1607 1608 1609 1610 1611 1612 1613 1614
  gcc_assert (saved_line_table == NULL);
  saved_line_table = line_table;
  line_table = ggc_alloc<line_maps> ();
  linemap_init (line_table, BUILTINS_LOCATION);
  gcc_assert (saved_line_table->reallocator);
  line_table->reallocator = saved_line_table->reallocator;
  gcc_assert (saved_line_table->round_alloc_size);
  line_table->round_alloc_size = saved_line_table->round_alloc_size;
  line_table->default_range_bits = 0;
}
1615 1616 1617 1618

/* Constructor.  Store the old value of line_table, and create a new
   one, using the sitation described in CASE_.  */

1619
line_table_test::line_table_test (const line_table_case &case_)
1620
{
1621 1622
  gcc_assert (saved_line_table == NULL);
  saved_line_table = line_table;
1623 1624
  line_table = ggc_alloc<line_maps> ();
  linemap_init (line_table, BUILTINS_LOCATION);
1625 1626 1627 1628
  gcc_assert (saved_line_table->reallocator);
  line_table->reallocator = saved_line_table->reallocator;
  gcc_assert (saved_line_table->round_alloc_size);
  line_table->round_alloc_size = saved_line_table->round_alloc_size;
1629 1630 1631 1632 1633 1634 1635 1636 1637 1638
  line_table->default_range_bits = case_.m_default_range_bits;
  if (case_.m_base_location)
    {
      line_table->highest_location = case_.m_base_location;
      line_table->highest_line = case_.m_base_location;
    }
}

/* Destructor.  Restore the old value of line_table.  */

1639
line_table_test::~line_table_test ()
1640
{
1641 1642 1643
  gcc_assert (saved_line_table != NULL);
  line_table = saved_line_table;
  saved_line_table = NULL;
David Malcolm committed
1644 1645 1646 1647 1648
}

/* Verify basic operation of ordinary linemaps.  */

static void
1649
test_accessing_ordinary_linemaps (const line_table_case &case_)
David Malcolm committed
1650
{
1651
  line_table_test ltt (case_);
1652

David Malcolm committed
1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684
  /* Build a simple linemap describing some locations. */
  linemap_add (line_table, LC_ENTER, false, "foo.c", 0);

  linemap_line_start (line_table, 1, 100);
  location_t loc_a = linemap_position_for_column (line_table, 1);
  location_t loc_b = linemap_position_for_column (line_table, 23);

  linemap_line_start (line_table, 2, 100);
  location_t loc_c = linemap_position_for_column (line_table, 1);
  location_t loc_d = linemap_position_for_column (line_table, 17);

  /* Example of a very long line.  */
  linemap_line_start (line_table, 3, 2000);
  location_t loc_e = linemap_position_for_column (line_table, 700);

  linemap_add (line_table, LC_LEAVE, false, NULL, 0);

  /* Multiple files.  */
  linemap_add (line_table, LC_ENTER, false, "bar.c", 0);
  linemap_line_start (line_table, 1, 200);
  location_t loc_f = linemap_position_for_column (line_table, 150);
  linemap_add (line_table, LC_LEAVE, false, NULL, 0);

  /* Verify that we can recover the location info.  */
  assert_loceq ("foo.c", 1, 1, loc_a);
  assert_loceq ("foo.c", 1, 23, loc_b);
  assert_loceq ("foo.c", 2, 1, loc_c);
  assert_loceq ("foo.c", 2, 17, loc_d);
  assert_loceq ("foo.c", 3, 700, loc_e);
  assert_loceq ("bar.c", 1, 150, loc_f);

  ASSERT_FALSE (is_location_from_builtin_token (loc_a));
1685 1686 1687 1688 1689 1690 1691 1692 1693 1694
  ASSERT_TRUE (pure_location_p (line_table, loc_a));

  /* Verify using make_location to build a range, and extracting data
     back from it.  */
  location_t range_c_b_d = make_location (loc_c, loc_b, loc_d);
  ASSERT_FALSE (pure_location_p (line_table, range_c_b_d));
  ASSERT_EQ (loc_c, get_location_from_adhoc_loc (line_table, range_c_b_d));
  source_range src_range = get_range_from_loc (line_table, range_c_b_d);
  ASSERT_EQ (loc_b, src_range.m_start);
  ASSERT_EQ (loc_d, src_range.m_finish);
David Malcolm committed
1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711
}

/* Verify various properties of UNKNOWN_LOCATION.  */

static void
test_unknown_location ()
{
  ASSERT_EQ (NULL, LOCATION_FILE (UNKNOWN_LOCATION));
  ASSERT_EQ (0, LOCATION_LINE (UNKNOWN_LOCATION));
  ASSERT_EQ (0, LOCATION_COLUMN (UNKNOWN_LOCATION));
}

/* Verify various properties of BUILTINS_LOCATION.  */

static void
test_builtins ()
{
1712
  assert_loceq (_("<built-in>"), 0, 0, BUILTINS_LOCATION);
David Malcolm committed
1713 1714 1715
  ASSERT_PRED1 (is_location_from_builtin_token, BUILTINS_LOCATION);
}

1716
/* Regression test for make_location.
1717 1718
   Ensure that we use pure locations for the start/finish of the range,
   rather than storing a packed or ad-hoc range as the start/finish.  */
1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776

static void
test_make_location_nonpure_range_endpoints (const line_table_case &case_)
{
  /* Issue seen with testsuite/c-c++-common/Wlogical-not-parentheses-2.c
     with C++ frontend.
     ....................0000000001111111111222.
     ....................1234567890123456789012.  */
  const char *content = "     r += !aaa == bbb;\n";
  temp_source_file tmp (SELFTEST_LOCATION, ".C", content);
  line_table_test ltt (case_);
  linemap_add (line_table, LC_ENTER, false, tmp.get_filename (), 1);

  const location_t c11 = linemap_position_for_column (line_table, 11);
  const location_t c12 = linemap_position_for_column (line_table, 12);
  const location_t c13 = linemap_position_for_column (line_table, 13);
  const location_t c14 = linemap_position_for_column (line_table, 14);
  const location_t c21 = linemap_position_for_column (line_table, 21);

  if (c21 > LINE_MAP_MAX_LOCATION_WITH_COLS)
    return;

  /* Use column 13 for the caret location, arbitrarily, to verify that we
     handle start != caret.  */
  const location_t aaa = make_location (c13, c12, c14);
  ASSERT_EQ (c13, get_pure_location (aaa));
  ASSERT_EQ (c12, get_start (aaa));
  ASSERT_FALSE (IS_ADHOC_LOC (get_start (aaa)));
  ASSERT_EQ (c14, get_finish (aaa));
  ASSERT_FALSE (IS_ADHOC_LOC (get_finish (aaa)));

  /* Make a location using a location with a range as the start-point.  */
  const location_t not_aaa = make_location (c11, aaa, c14);
  ASSERT_EQ (c11, get_pure_location (not_aaa));
  /* It should use the start location of the range, not store the range
     itself.  */
  ASSERT_EQ (c12, get_start (not_aaa));
  ASSERT_FALSE (IS_ADHOC_LOC (get_start (not_aaa)));
  ASSERT_EQ (c14, get_finish (not_aaa));
  ASSERT_FALSE (IS_ADHOC_LOC (get_finish (not_aaa)));

  /* Similarly, make a location with a range as the end-point.  */
  const location_t aaa_eq_bbb = make_location (c12, c12, c21);
  ASSERT_EQ (c12, get_pure_location (aaa_eq_bbb));
  ASSERT_EQ (c12, get_start (aaa_eq_bbb));
  ASSERT_FALSE (IS_ADHOC_LOC (get_start (aaa_eq_bbb)));
  ASSERT_EQ (c21, get_finish (aaa_eq_bbb));
  ASSERT_FALSE (IS_ADHOC_LOC (get_finish (aaa_eq_bbb)));
  const location_t not_aaa_eq_bbb = make_location (c11, c12, aaa_eq_bbb);
  /* It should use the finish location of the range, not store the range
     itself.  */
  ASSERT_EQ (c11, get_pure_location (not_aaa_eq_bbb));
  ASSERT_EQ (c12, get_start (not_aaa_eq_bbb));
  ASSERT_FALSE (IS_ADHOC_LOC (get_start (not_aaa_eq_bbb)));
  ASSERT_EQ (c21, get_finish (not_aaa_eq_bbb));
  ASSERT_FALSE (IS_ADHOC_LOC (get_finish (not_aaa_eq_bbb)));
}

David Malcolm committed
1777 1778 1779 1780 1781
/* Verify reading of input files (e.g. for caret-based diagnostics).  */

static void
test_reading_source_line ()
{
1782
  /* Create a tempfile and write some text to it.  */
1783 1784 1785
  temp_source_file tmp (SELFTEST_LOCATION, ".txt",
			"01234567890123456789\n"
			"This is the test text\n"
1786
			"This is the 3rd line");
1787 1788

  /* Read back a specific line from the tempfile.  */
David Malcolm committed
1789
  int line_size;
1790
  const char *source_line = location_get_source_line (tmp.get_filename (),
1791 1792 1793 1794 1795 1796 1797 1798
						      3, &line_size);
  ASSERT_TRUE (source_line != NULL);
  ASSERT_EQ (20, line_size);
  ASSERT_TRUE (!strncmp ("This is the 3rd line",
			 source_line, line_size));

  source_line = location_get_source_line (tmp.get_filename (),
					  2, &line_size);
David Malcolm committed
1799
  ASSERT_TRUE (source_line != NULL);
1800
  ASSERT_EQ (21, line_size);
1801 1802
  ASSERT_TRUE (!strncmp ("This is the test text",
			 source_line, line_size));
1803

1804 1805 1806
  source_line = location_get_source_line (tmp.get_filename (),
					  4, &line_size);
  ASSERT_TRUE (source_line == NULL);
David Malcolm committed
1807 1808
}

1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868
/* Tests of lexing.  */

/* Verify that token TOK from PARSER has cpp_token_as_text
   equal to EXPECTED_TEXT.  */

#define ASSERT_TOKEN_AS_TEXT_EQ(PARSER, TOK, EXPECTED_TEXT)		\
  SELFTEST_BEGIN_STMT							\
    unsigned char *actual_txt = cpp_token_as_text ((PARSER), (TOK));	\
    ASSERT_STREQ ((EXPECTED_TEXT), (const char *)actual_txt);		\
  SELFTEST_END_STMT

/* Verify that TOK's src_loc is within EXP_FILENAME at EXP_LINENUM,
   and ranges from EXP_START_COL to EXP_FINISH_COL.
   Use LOC as the effective location of the selftest.  */

static void
assert_token_loc_eq (const location &loc,
		     const cpp_token *tok,
		     const char *exp_filename, int exp_linenum,
		     int exp_start_col, int exp_finish_col)
{
  location_t tok_loc = tok->src_loc;
  ASSERT_STREQ_AT (loc, exp_filename, LOCATION_FILE (tok_loc));
  ASSERT_EQ_AT (loc, exp_linenum, LOCATION_LINE (tok_loc));

  /* If location_t values are sufficiently high, then column numbers
     will be unavailable.  */
  if (!should_have_column_data_p (tok_loc))
    return;

  ASSERT_EQ_AT (loc, exp_start_col, LOCATION_COLUMN (tok_loc));
  source_range tok_range = get_range_from_loc (line_table, tok_loc);
  ASSERT_EQ_AT (loc, exp_start_col, LOCATION_COLUMN (tok_range.m_start));
  ASSERT_EQ_AT (loc, exp_finish_col, LOCATION_COLUMN (tok_range.m_finish));
}

/* Use assert_token_loc_eq to verify the TOK->src_loc, using
   SELFTEST_LOCATION as the effective location of the selftest.  */

#define ASSERT_TOKEN_LOC_EQ(TOK, EXP_FILENAME, EXP_LINENUM, \
			    EXP_START_COL, EXP_FINISH_COL) \
  assert_token_loc_eq (SELFTEST_LOCATION, (TOK), (EXP_FILENAME), \
		       (EXP_LINENUM), (EXP_START_COL), (EXP_FINISH_COL))

/* Test of lexing a file using libcpp, verifying tokens and their
   location information.  */

static void
test_lexer (const line_table_case &case_)
{
  /* Create a tempfile and write some text to it.  */
  const char *content =
    /*00000000011111111112222222222333333.3333444444444.455555555556
      12345678901234567890123456789012345.6789012345678.901234567890.  */
    ("test_name /* c-style comment */\n"
     "                                  \"test literal\"\n"
     " // test c++-style comment\n"
     "   42\n");
  temp_source_file tmp (SELFTEST_LOCATION, ".txt", content);

1869
  line_table_test ltt (case_);
1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906

  cpp_reader *parser = cpp_create_reader (CLK_GNUC89, NULL, line_table);

  const char *fname = cpp_read_main_file (parser, tmp.get_filename ());
  ASSERT_NE (fname, NULL);

  /* Verify that we get the expected tokens back, with the correct
     location information.  */

  location_t loc;
  const cpp_token *tok;
  tok = cpp_get_token_with_location (parser, &loc);
  ASSERT_NE (tok, NULL);
  ASSERT_EQ (tok->type, CPP_NAME);
  ASSERT_TOKEN_AS_TEXT_EQ (parser, tok, "test_name");
  ASSERT_TOKEN_LOC_EQ (tok, tmp.get_filename (), 1, 1, 9);

  tok = cpp_get_token_with_location (parser, &loc);
  ASSERT_NE (tok, NULL);
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (parser, tok, "\"test literal\"");
  ASSERT_TOKEN_LOC_EQ (tok, tmp.get_filename (), 2, 35, 48);

  tok = cpp_get_token_with_location (parser, &loc);
  ASSERT_NE (tok, NULL);
  ASSERT_EQ (tok->type, CPP_NUMBER);
  ASSERT_TOKEN_AS_TEXT_EQ (parser, tok, "42");
  ASSERT_TOKEN_LOC_EQ (tok, tmp.get_filename (), 4, 4, 5);

  tok = cpp_get_token_with_location (parser, &loc);
  ASSERT_NE (tok, NULL);
  ASSERT_EQ (tok->type, CPP_EOF);

  cpp_finish (parser, NULL);
  cpp_destroy (parser);
}

1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931
/* Forward decls.  */

struct lexer_test;
class lexer_test_options;

/* A class for specifying options of a lexer_test.
   The "apply" vfunc is called during the lexer_test constructor.  */

class lexer_test_options
{
 public:
  virtual void apply (lexer_test &) = 0;
};

/* A struct for writing lexer tests.  */

struct lexer_test
{
  lexer_test (const line_table_case &case_, const char *content,
	      lexer_test_options *options);
  ~lexer_test ();

  const cpp_token *get_token ();

  temp_source_file m_tempfile;
1932
  line_table_test m_ltt;
1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003
  cpp_reader *m_parser;
  string_concat_db m_concats;
};

/* Use an EBCDIC encoding for the execution charset, specifically
   IBM1047-encoded (aka "EBCDIC 1047", or "Code page 1047").

   This exercises iconv integration within libcpp.
   Not every build of iconv supports the given charset,
   so we need to flag this error and handle it gracefully.  */

class ebcdic_execution_charset : public lexer_test_options
{
 public:
  ebcdic_execution_charset () : m_num_iconv_errors (0)
    {
      gcc_assert (s_singleton == NULL);
      s_singleton = this;
    }
  ~ebcdic_execution_charset ()
    {
      gcc_assert (s_singleton == this);
      s_singleton = NULL;
    }

  void apply (lexer_test &test) FINAL OVERRIDE
  {
    cpp_options *cpp_opts = cpp_get_options (test.m_parser);
    cpp_opts->narrow_charset = "IBM1047";

    cpp_callbacks *callbacks = cpp_get_callbacks (test.m_parser);
    callbacks->error = on_error;
  }

  static bool on_error (cpp_reader *pfile ATTRIBUTE_UNUSED,
			int level ATTRIBUTE_UNUSED,
			int reason ATTRIBUTE_UNUSED,
			rich_location *richloc ATTRIBUTE_UNUSED,
			const char *msgid, va_list *ap ATTRIBUTE_UNUSED)
    ATTRIBUTE_FPTR_PRINTF(5,0)
  {
    gcc_assert (s_singleton);
    /* Detect and record errors emitted by libcpp/charset.c:init_iconv_desc
       when the local iconv build doesn't support the conversion.  */
    if (strstr (msgid, "not supported by iconv"))
      {
	s_singleton->m_num_iconv_errors++;
	return true;
      }

    /* Otherwise, we have an unexpected error.  */
    abort ();
  }

  bool iconv_errors_occurred_p () const { return m_num_iconv_errors > 0; }

 private:
  static ebcdic_execution_charset *s_singleton;
  int m_num_iconv_errors;
};

ebcdic_execution_charset *ebcdic_execution_charset::s_singleton;

/* Constructor.  Override line_table with a new instance based on CASE_,
   and write CONTENT to a tempfile.  Create a cpp_reader, and use it to
   start parsing the tempfile.  */

lexer_test::lexer_test (const line_table_case &case_, const char *content,
			lexer_test_options *options) :
  /* Create a tempfile and write the text to it.  */
  m_tempfile (SELFTEST_LOCATION, ".c", content),
2004
  m_ltt (case_),
2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065 2066 2067 2068 2069
  m_parser (cpp_create_reader (CLK_GNUC99, NULL, line_table)),
  m_concats ()
{
  if (options)
    options->apply (*this);

  cpp_init_iconv (m_parser);

  /* Parse the file.  */
  const char *fname = cpp_read_main_file (m_parser,
					  m_tempfile.get_filename ());
  ASSERT_NE (fname, NULL);
}

/* Destructor.  Verify that the next token in m_parser is EOF.  */

lexer_test::~lexer_test ()
{
  location_t loc;
  const cpp_token *tok;

  tok = cpp_get_token_with_location (m_parser, &loc);
  ASSERT_NE (tok, NULL);
  ASSERT_EQ (tok->type, CPP_EOF);

  cpp_finish (m_parser, NULL);
  cpp_destroy (m_parser);
}

/* Get the next token from m_parser.  */

const cpp_token *
lexer_test::get_token ()
{
  location_t loc;
  const cpp_token *tok;

  tok = cpp_get_token_with_location (m_parser, &loc);
  ASSERT_NE (tok, NULL);
  return tok;
}

/* Verify that locations within string literals are correctly handled.  */

/* Verify get_source_range_for_substring for token(s) at STRLOC,
   using the string concatenation database for TEST.

   Assert that the character at index IDX is on EXPECTED_LINE,
   and that it begins at column EXPECTED_START_COL and ends at
   EXPECTED_FINISH_COL (unless the locations are beyond
   LINE_MAP_MAX_LOCATION_WITH_COLS, in which case don't check their
   columns).  */

static void
assert_char_at_range (const location &loc,
		      lexer_test& test,
		      location_t strloc, enum cpp_ttype type, int idx,
		      int expected_line, int expected_start_col,
		      int expected_finish_col)
{
  cpp_reader *pfile = test.m_parser;
  string_concat_db *concats = &test.m_concats;

  source_range actual_range;
  const char *err
2070 2071
    = get_source_range_for_char (pfile, concats, strloc, type, idx,
				 &actual_range);
2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122
  if (should_have_column_data_p (strloc))
    ASSERT_EQ_AT (loc, NULL, err);
  else
    {
      ASSERT_STREQ_AT (loc,
		       "range starts after LINE_MAP_MAX_LOCATION_WITH_COLS",
		       err);
      return;
    }

  int actual_start_line = LOCATION_LINE (actual_range.m_start);
  ASSERT_EQ_AT (loc, expected_line, actual_start_line);
  int actual_finish_line = LOCATION_LINE (actual_range.m_finish);
  ASSERT_EQ_AT (loc, expected_line, actual_finish_line);

  if (should_have_column_data_p (actual_range.m_start))
    {
      int actual_start_col = LOCATION_COLUMN (actual_range.m_start);
      ASSERT_EQ_AT (loc, expected_start_col, actual_start_col);
    }
  if (should_have_column_data_p (actual_range.m_finish))
    {
      int actual_finish_col = LOCATION_COLUMN (actual_range.m_finish);
      ASSERT_EQ_AT (loc, expected_finish_col, actual_finish_col);
    }
}

/* Macro for calling assert_char_at_range, supplying SELFTEST_LOCATION for
   the effective location of any errors.  */

#define ASSERT_CHAR_AT_RANGE(LEXER_TEST, STRLOC, TYPE, IDX, EXPECTED_LINE, \
			     EXPECTED_START_COL, EXPECTED_FINISH_COL)	\
  assert_char_at_range (SELFTEST_LOCATION, (LEXER_TEST), (STRLOC), (TYPE), \
			(IDX), (EXPECTED_LINE), (EXPECTED_START_COL), \
			(EXPECTED_FINISH_COL))

/* Verify get_num_source_ranges_for_substring for token(s) at STRLOC,
   using the string concatenation database for TEST.

   Assert that the token(s) at STRLOC contain EXPECTED_NUM_RANGES.  */

static void
assert_num_substring_ranges (const location &loc,
			     lexer_test& test,
			     location_t strloc,
			     enum cpp_ttype type,
			     int expected_num_ranges)
{
  cpp_reader *pfile = test.m_parser;
  string_concat_db *concats = &test.m_concats;

2123
  int actual_num_ranges = -1;
2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211
  const char *err
    = get_num_source_ranges_for_substring (pfile, concats, strloc, type,
					   &actual_num_ranges);
  if (should_have_column_data_p (strloc))
    ASSERT_EQ_AT (loc, NULL, err);
  else
    {
      ASSERT_STREQ_AT (loc,
		       "range starts after LINE_MAP_MAX_LOCATION_WITH_COLS",
		       err);
      return;
    }
  ASSERT_EQ_AT (loc, expected_num_ranges, actual_num_ranges);
}

/* Macro for calling assert_num_substring_ranges, supplying
   SELFTEST_LOCATION for the effective location of any errors.  */

#define ASSERT_NUM_SUBSTRING_RANGES(LEXER_TEST, STRLOC, TYPE, \
				    EXPECTED_NUM_RANGES)		\
  assert_num_substring_ranges (SELFTEST_LOCATION, (LEXER_TEST), (STRLOC), \
			       (TYPE), (EXPECTED_NUM_RANGES))


/* Verify that get_num_source_ranges_for_substring for token(s) at STRLOC
   returns an error (using the string concatenation database for TEST).  */

static void
assert_has_no_substring_ranges (const location &loc,
				lexer_test& test,
				location_t strloc,
				enum cpp_ttype type,
				const char *expected_err)
{
  cpp_reader *pfile = test.m_parser;
  string_concat_db *concats = &test.m_concats;
  cpp_substring_ranges ranges;
  const char *actual_err
    = get_substring_ranges_for_loc (pfile, concats, strloc,
				    type, ranges);
  if (should_have_column_data_p (strloc))
    ASSERT_STREQ_AT (loc, expected_err, actual_err);
  else
    ASSERT_STREQ_AT (loc,
		     "range starts after LINE_MAP_MAX_LOCATION_WITH_COLS",
		     actual_err);
}

#define ASSERT_HAS_NO_SUBSTRING_RANGES(LEXER_TEST, STRLOC, TYPE, ERR)    \
    assert_has_no_substring_ranges (SELFTEST_LOCATION, (LEXER_TEST), \
				    (STRLOC), (TYPE), (ERR))

/* Lex a simple string literal.  Verify the substring location data, before
   and after running cpp_interpret_string on it.  */

static void
test_lexer_string_locations_simple (const line_table_case &case_)
{
  /* Digits 0-9 (with 0 at column 10), the simple way.
     ....................000000000.11111111112.2222222223333333333
     ....................123456789.01234567890.1234567890123456789
     We add a trailing comment to ensure that we correctly locate
     the end of the string literal token.  */
  const char *content = "        \"0123456789\" /* not a string */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"0123456789\"");
  ASSERT_TOKEN_LOC_EQ (tok, test.m_tempfile.get_filename (), 1, 9, 20);

  /* At this point in lexing, the quote characters are treated as part of
     the string (they are stripped off by cpp_interpret_string).  */

  ASSERT_EQ (tok->val.str.len, 12);

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("0123456789", (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify ranges of individual characters.  This no longer includes the
2212 2213
     opening quote, but does include the closing quote.  */
  for (int i = 0; i <= 10; i++)
2214 2215 2216
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1,
			  10 + i, 10 + i);

2217
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, type, 11);
2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311
}

/* As test_lexer_string_locations_simple, but use an EBCDIC execution
   encoding.  */

static void
test_lexer_string_locations_ebcdic (const line_table_case &case_)
{
  /* EBCDIC support requires iconv.  */
  if (!HAVE_ICONV)
    return;

  /* Digits 0-9 (with 0 at column 10), the simple way.
     ....................000000000.11111111112.2222222223333333333
     ....................123456789.01234567890.1234567890123456789
     We add a trailing comment to ensure that we correctly locate
     the end of the string literal token.  */
  const char *content = "        \"0123456789\" /* not a string */\n";
  ebcdic_execution_charset use_ebcdic;
  lexer_test test (case_, content, &use_ebcdic);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"0123456789\"");
  ASSERT_TOKEN_LOC_EQ (tok, test.m_tempfile.get_filename (), 1, 9, 20);

  /* At this point in lexing, the quote characters are treated as part of
     the string (they are stripped off by cpp_interpret_string).  */

  ASSERT_EQ (tok->val.str.len, 12);

  /* The remainder of the test requires an iconv implementation that
     can convert from UTF-8 to the EBCDIC encoding requested above.  */
  if (use_ebcdic.iconv_errors_occurred_p ())
    return;

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  /* We should now have EBCDIC-encoded text, specifically
     IBM1047-encoded (aka "EBCDIC 1047", or "Code page 1047").
     The digits 0-9 are encoded as 240-249 i.e. 0xf0-0xf9.  */
  ASSERT_STREQ ("\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9",
		(const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify that we don't attempt to record substring location information
     for such cases.  */
  ASSERT_HAS_NO_SUBSTRING_RANGES
    (test, tok->src_loc, type,
     "execution character set != source character set");
}

/* Lex a string literal containing a hex-escaped character.
   Verify the substring location data, before and after running
   cpp_interpret_string on it.  */

static void
test_lexer_string_locations_hex (const line_table_case &case_)
{
  /* Digits 0-9, expressing digit 5 in ASCII as "\x35"
     and with a space in place of digit 6, to terminate the escaped
     hex code.
     ....................000000000.111111.11112222.
     ....................123456789.012345.67890123.  */
  const char *content = "        \"01234\\x35 789\"\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"01234\\x35 789\"");
  ASSERT_TOKEN_LOC_EQ (tok, test.m_tempfile.get_filename (), 1, 9, 23);

  /* At this point in lexing, the quote characters are treated as part of
     the string (they are stripped off by cpp_interpret_string).  */
  ASSERT_EQ (tok->val.str.len, 15);

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("012345 789", (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify ranges of individual characters.  This no longer includes the
2312
     opening quote, but does include the closing quote.  */
2313 2314 2315
  for (int i = 0; i <= 4; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 10 + i, 10 + i);
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, 5, 1, 15, 18);
2316
  for (int i = 6; i <= 10; i++)
2317 2318
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 13 + i, 13 + i);

2319
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, type, 11);
2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352
}

/* Lex a string literal containing an octal-escaped character.
   Verify the substring location data after running cpp_interpret_string
   on it.  */

static void
test_lexer_string_locations_oct (const line_table_case &case_)
{
  /* Digits 0-9, expressing digit 5 in ASCII as "\065"
     and with a space in place of digit 6, to terminate the escaped
     octal code.
     ....................000000000.111111.11112222.2222223333333333444
     ....................123456789.012345.67890123.4567890123456789012  */
  const char *content = "        \"01234\\065 789\" /* not a string */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"01234\\065 789\"");

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("012345 789", (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify ranges of individual characters.  This no longer includes the
2353
     opening quote, but does include the closing quote.  */
2354 2355 2356
  for (int i = 0; i < 5; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 10 + i, 10 + i);
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, 5, 1, 15, 18);
2357
  for (int i = 6; i <= 10; i++)
2358 2359
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 13 + i, 13 + i);

2360
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, type, 11);
2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392
}

/* Test of string literal containing letter escapes.  */

static void
test_lexer_string_locations_letter_escape_1 (const line_table_case &case_)
{
  /* The string "\tfoo\\\nbar" i.e. tab, "foo", backslash, newline, bar.
     .....................000000000.1.11111.1.1.11222.22222223333333
     .....................123456789.0.12345.6.7.89012.34567890123456.  */
  const char *content = ("        \"\\tfoo\\\\\\nbar\" /* non-str */\n");
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected tokens back.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"\\tfoo\\\\\\nbar\"");

  /* Verify ranges of individual characters. */
  /* "\t".  */
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			0, 1, 10, 11);
  /* "foo". */
  for (int i = 1; i <= 3; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			  i, 1, 11 + i, 11 + i);
  /* "\\" and "\n".  */
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			4, 1, 15, 16);
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			5, 1, 17, 18);

2393 2394
  /* "bar" and closing quote for nul-terminator.  */
  for (int i = 6; i <= 9; i++)
2395 2396 2397
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			  i, 1, 13 + i, 13 + i);

2398
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, CPP_STRING, 10);
2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427
}

/* Another test of a string literal containing a letter escape.
   Based on string seen in
     printf ("%-%\n");
   in gcc.dg/format/c90-printf-1.c.  */

static void
test_lexer_string_locations_letter_escape_2 (const line_table_case &case_)
{
  /* .....................000000000.1111.11.1111.22222222223.
     .....................123456789.0123.45.6789.01234567890.  */
  const char *content = ("        \"%-%\\n\" /* non-str */\n");
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected tokens back.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"%-%\\n\"");

  /* Verify ranges of individual characters. */
  /* "%-%".  */
  for (int i = 0; i < 3; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			  i, 1, 10 + i, 10 + i);
  /* "\n".  */
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			3, 1, 13, 14);

2428 2429 2430 2431 2432
  /* Closing quote for nul-terminator.  */
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			4, 1, 15, 15);

  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, CPP_STRING, 5);
2433 2434 2435 2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474
}

/* Lex a string literal containing UCN 4 characters.
   Verify the substring location data after running cpp_interpret_string
   on it.  */

static void
test_lexer_string_locations_ucn4 (const line_table_case &case_)
{
  /* Digits 0-9, expressing digits 5 and 6 as Roman numerals expressed
     as UCN 4.
     ....................000000000.111111.111122.222222223.33333333344444
     ....................123456789.012345.678901.234567890.12345678901234  */
  const char *content = "        \"01234\\u2174\\u2175789\" /* non-str */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"01234\\u2174\\u2175789\"");

  /* Verify that cpp_interpret_string works.
     The string should be encoded in the execution character
     set.  Assuming that that is UTF-8, we should have the following:
     -----------  ----  -----  -------  ----------------
     Byte offset  Byte  Octal  Unicode  Source Column(s)
     -----------  ----  -----  -------  ----------------
     0            0x30         '0'      10
     1            0x31         '1'      11
     2            0x32         '2'      12
     3            0x33         '3'      13
     4            0x34         '4'      14
     5            0xE2  \342   U+2174   15-20
     6            0x85  \205    (cont)  15-20
     7            0xB4  \264    (cont)  15-20
     8            0xE2  \342   U+2175   21-26
     9            0x85  \205    (cont)  21-26
     10           0xB5  \265    (cont)  21-26
     11           0x37         '7'      27
     12           0x38         '8'      28
     13           0x39         '9'      29
2475
     14           0x00                  30 (closing quote)
2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487
     -----------  ----  -----  -------  ---------------.  */

  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("01234\342\205\264\342\205\265789",
		(const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify ranges of individual characters.  This no longer includes the
2488
     opening quote, but does include the closing quote.
2489 2490 2491 2492 2493 2494 2495 2496 2497
     '01234'.  */
  for (int i = 0; i <= 4; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 10 + i, 10 + i);
  /* U+2174.  */
  for (int i = 5; i <= 7; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 15, 20);
  /* U+2175.  */
  for (int i = 8; i <= 10; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 21, 26);
2498 2499
  /* '789' and nul terminator  */
  for (int i = 11; i <= 14; i++)
2500 2501
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 16 + i, 16 + i);

2502
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, type, 15);
2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538
}

/* Lex a string literal containing UCN 8 characters.
   Verify the substring location data after running cpp_interpret_string
   on it.  */

static void
test_lexer_string_locations_ucn8 (const line_table_case &case_)
{
  /* Digits 0-9, expressing digits 5 and 6 as Roman numerals as UCN 8.
     ....................000000000.111111.1111222222.2222333333333.344444
     ....................123456789.012345.6789012345.6789012345678.901234  */
  const char *content = "        \"01234\\U00002174\\U00002175789\" /* */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok,
			   "\"01234\\U00002174\\U00002175789\"");

  /* Verify that cpp_interpret_string works.
     The UTF-8 encoding of the string is identical to that from
     the ucn4 testcase above; the only difference is the column
     locations.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("01234\342\205\264\342\205\265789",
		(const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify ranges of individual characters.  This no longer includes the
2539
     opening quote, but does include the closing quote.
2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551
     '01234'.  */
  for (int i = 0; i <= 4; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 10 + i, 10 + i);
  /* U+2174.  */
  for (int i = 5; i <= 7; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 15, 24);
  /* U+2175.  */
  for (int i = 8; i <= 10; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 25, 34);
  /* '789' at columns 35-37  */
  for (int i = 11; i <= 13; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 24 + i, 24 + i);
2552 2553
  /* Closing quote/nul-terminator at column 38.  */
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, 14, 1, 38, 38);
2554

2555
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, type, 15);
2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600 2601 2602 2603 2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619 2620 2621 2622 2623 2624 2625 2626 2627 2628 2629 2630 2631 2632 2633 2634 2635 2636 2637 2638 2639 2640 2641 2642 2643 2644 2645 2646 2647 2648 2649 2650 2651 2652 2653 2654 2655 2656 2657 2658 2659 2660 2661 2662 2663 2664 2665 2666 2667 2668 2669 2670 2671 2672 2673 2674 2675 2676 2677 2678 2679 2680 2681 2682 2683 2684 2685 2686 2687 2688 2689 2690 2691 2692 2693 2694 2695 2696 2697 2698 2699 2700 2701 2702 2703 2704 2705 2706 2707 2708 2709 2710 2711 2712 2713 2714 2715 2716 2717 2718 2719 2720 2721 2722 2723 2724 2725 2726 2727 2728 2729 2730
}

/* Fetch a big-endian 32-bit value and convert to host endianness.  */

static uint32_t
uint32_from_big_endian (const uint32_t *ptr_be_value)
{
  const unsigned char *buf = (const unsigned char *)ptr_be_value;
  return (((uint32_t) buf[0] << 24)
	  | ((uint32_t) buf[1] << 16)
	  | ((uint32_t) buf[2] << 8)
	  | (uint32_t) buf[3]);
}

/* Lex a wide string literal and verify that attempts to read substring
   location data from it fail gracefully.  */

static void
test_lexer_string_locations_wide_string (const line_table_case &case_)
{
  /* Digits 0-9.
     ....................000000000.11111111112.22222222233333
     ....................123456789.01234567890.12345678901234  */
  const char *content = "       L\"0123456789\" /* non-str */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_WSTRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "L\"0123456789\"");

  /* Verify that cpp_interpret_string works, using CPP_WSTRING.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_WSTRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  /* The cpp_reader defaults to big-endian with
     CHAR_BIT * sizeof (int) for the wchar_precision, so dst_string should
     now be encoded as UTF-32BE.  */
  const uint32_t *be32_chars = (const uint32_t *)dst_string.text;
  ASSERT_EQ ('0', uint32_from_big_endian (&be32_chars[0]));
  ASSERT_EQ ('5', uint32_from_big_endian (&be32_chars[5]));
  ASSERT_EQ ('9', uint32_from_big_endian (&be32_chars[9]));
  ASSERT_EQ (0, uint32_from_big_endian (&be32_chars[10]));
  free (const_cast <unsigned char *> (dst_string.text));

  /* We don't yet support generating substring location information
     for L"" strings.  */
  ASSERT_HAS_NO_SUBSTRING_RANGES
    (test, tok->src_loc, type,
     "execution character set != source character set");
}

/* Fetch a big-endian 16-bit value and convert to host endianness.  */

static uint16_t
uint16_from_big_endian (const uint16_t *ptr_be_value)
{
  const unsigned char *buf = (const unsigned char *)ptr_be_value;
  return ((uint16_t) buf[0] << 8) | (uint16_t) buf[1];
}

/* Lex a u"" string literal and verify that attempts to read substring
   location data from it fail gracefully.  */

static void
test_lexer_string_locations_string16 (const line_table_case &case_)
{
  /* Digits 0-9.
     ....................000000000.11111111112.22222222233333
     ....................123456789.01234567890.12345678901234  */
  const char *content = "       u\"0123456789\" /* non-str */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING16);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "u\"0123456789\"");

  /* Verify that cpp_interpret_string works, using CPP_STRING16.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING16;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);

  /* The cpp_reader defaults to big-endian, so dst_string should
     now be encoded as UTF-16BE.  */
  const uint16_t *be16_chars = (const uint16_t *)dst_string.text;
  ASSERT_EQ ('0', uint16_from_big_endian (&be16_chars[0]));
  ASSERT_EQ ('5', uint16_from_big_endian (&be16_chars[5]));
  ASSERT_EQ ('9', uint16_from_big_endian (&be16_chars[9]));
  ASSERT_EQ (0, uint16_from_big_endian (&be16_chars[10]));
  free (const_cast <unsigned char *> (dst_string.text));

  /* We don't yet support generating substring location information
     for L"" strings.  */
  ASSERT_HAS_NO_SUBSTRING_RANGES
    (test, tok->src_loc, type,
     "execution character set != source character set");
}

/* Lex a U"" string literal and verify that attempts to read substring
   location data from it fail gracefully.  */

static void
test_lexer_string_locations_string32 (const line_table_case &case_)
{
  /* Digits 0-9.
     ....................000000000.11111111112.22222222233333
     ....................123456789.01234567890.12345678901234  */
  const char *content = "       U\"0123456789\" /* non-str */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING32);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "U\"0123456789\"");

  /* Verify that cpp_interpret_string works, using CPP_STRING32.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING32;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);

  /* The cpp_reader defaults to big-endian, so dst_string should
     now be encoded as UTF-32BE.  */
  const uint32_t *be32_chars = (const uint32_t *)dst_string.text;
  ASSERT_EQ ('0', uint32_from_big_endian (&be32_chars[0]));
  ASSERT_EQ ('5', uint32_from_big_endian (&be32_chars[5]));
  ASSERT_EQ ('9', uint32_from_big_endian (&be32_chars[9]));
  ASSERT_EQ (0, uint32_from_big_endian (&be32_chars[10]));
  free (const_cast <unsigned char *> (dst_string.text));

  /* We don't yet support generating substring location information
     for L"" strings.  */
  ASSERT_HAS_NO_SUBSTRING_RANGES
    (test, tok->src_loc, type,
     "execution character set != source character set");
}

/* Lex a u8-string literal.
   Verify the substring location data after running cpp_interpret_string
   on it.  */

static void
test_lexer_string_locations_u8 (const line_table_case &case_)
{
  /* Digits 0-9.
     ....................000000000.11111111112.22222222233333
     ....................123456789.01234567890.12345678901234  */
  const char *content = "      u8\"0123456789\" /* non-str */\n";
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_UTF8STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "u8\"0123456789\"");

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("0123456789", (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify ranges of individual characters.  This no longer includes the
2731 2732
     opening quote, but does include the closing quote.  */
  for (int i = 0; i <= 10; i++)
2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 10 + i, 10 + i);
}

/* Lex a string literal containing UTF-8 source characters.
   Verify the substring location data after running cpp_interpret_string
   on it.  */

static void
test_lexer_string_locations_utf8_source (const line_table_case &case_)
{
 /* This string literal is written out to the source file as UTF-8,
    and is of the form "before mojibake after", where "mojibake"
    is written as the following four unicode code points:
       U+6587 CJK UNIFIED IDEOGRAPH-6587
       U+5B57 CJK UNIFIED IDEOGRAPH-5B57
       U+5316 CJK UNIFIED IDEOGRAPH-5316
       U+3051 HIRAGANA LETTER KE.
     Each of these is 3 bytes wide when encoded in UTF-8, whereas the
     "before" and "after" are 1 byte per unicode character.

     The numbering shown are "columns", which are *byte* numbers within
     the line, rather than unicode character numbers.

     .................... 000000000.1111111.
     .................... 123456789.0123456.  */
  const char *content = ("        \"before "
			 /* U+6587 CJK UNIFIED IDEOGRAPH-6587
			      UTF-8: 0xE6 0x96 0x87
			      C octal escaped UTF-8: \346\226\207
			    "column" numbers: 17-19.  */
			 "\346\226\207"

			 /* U+5B57 CJK UNIFIED IDEOGRAPH-5B57
			      UTF-8: 0xE5 0xAD 0x97
			      C octal escaped UTF-8: \345\255\227
			    "column" numbers: 20-22.  */
			 "\345\255\227"

			 /* U+5316 CJK UNIFIED IDEOGRAPH-5316
			      UTF-8: 0xE5 0x8C 0x96
			      C octal escaped UTF-8: \345\214\226
			    "column" numbers: 23-25.  */
			 "\345\214\226"

			 /* U+3051 HIRAGANA LETTER KE
			      UTF-8: 0xE3 0x81 0x91
			      C octal escaped UTF-8: \343\201\221
			    "column" numbers: 26-28.  */
			 "\343\201\221"

			 /* column numbers 29 onwards
			  2333333.33334444444444
			  9012345.67890123456789. */
			 " after\" /* non-str */\n");
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back, with the correct
     location information.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ
    (test.m_parser, tok,
     "\"before \346\226\207\345\255\227\345\214\226\343\201\221 after\"");

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser, &tok->val.str, 1,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ
    ("before \346\226\207\345\255\227\345\214\226\343\201\221 after",
     (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Verify ranges of individual characters.  This no longer includes the
2809
     opening quote, but does include the closing quote.
2810
     Assuming that both source and execution encodings are UTF-8, we have
2811
     a run of 25 octets in each, plus the NUL terminator.  */
2812 2813
  for (int i = 0; i < 25; i++)
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, i, 1, 10 + i, 10 + i);
2814 2815
  /* NUL-terminator should use the closing quote at column 35.  */
  ASSERT_CHAR_AT_RANGE (test, tok->src_loc, type, 25, 1, 35, 35);
2816

2817
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, type, 26);
2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858 2859 2860 2861 2862
}

/* Test of string literal concatenation.  */

static void
test_lexer_string_locations_concatenation_1 (const line_table_case &case_)
{
  /* Digits 0-9.
     .....................000000000.111111.11112222222222
     .....................123456789.012345.67890123456789.  */
  const char *content = ("        \"01234\" /* non-str */\n"
			 "        \"56789\" /* non-str */\n");
  lexer_test test (case_, content, NULL);

  location_t input_locs[2];

  /* Verify that we get the expected tokens back.  */
  auto_vec <cpp_string> input_strings;
  const cpp_token *tok_a = test.get_token ();
  ASSERT_EQ (tok_a->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok_a, "\"01234\"");
  input_strings.safe_push (tok_a->val.str);
  input_locs[0] = tok_a->src_loc;

  const cpp_token *tok_b = test.get_token ();
  ASSERT_EQ (tok_b->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok_b, "\"56789\"");
  input_strings.safe_push (tok_b->val.str);
  input_locs[1] = tok_b->src_loc;

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser,
				      input_strings.address (), 2,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("0123456789", (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Simulate c-lex.c's lex_string in order to record concatenation.  */
  test.m_concats.record_string_concatenation (2, input_locs);

  location_t initial_loc = input_locs[0];

2863
  /* "01234" on line 1.  */
2864 2865
  for (int i = 0; i <= 4; i++)
    ASSERT_CHAR_AT_RANGE (test, initial_loc, type, i, 1, 10 + i, 10 + i);
2866 2867
  /* "56789" in line 2, plus its closing quote for the nul terminator.  */
  for (int i = 5; i <= 10; i++)
2868 2869
    ASSERT_CHAR_AT_RANGE (test, initial_loc, type, i, 2, 5 + i, 5 + i);

2870
  ASSERT_NUM_SUBSTRING_RANGES (test, initial_loc, type, 11);
2871 2872 2873 2874 2875 2876 2877 2878 2879 2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930
}

/* Another test of string literal concatenation.  */

static void
test_lexer_string_locations_concatenation_2 (const line_table_case &case_)
{
  /* Digits 0-9.
     .....................000000000.111.11111112222222
     .....................123456789.012.34567890123456.  */
  const char *content = ("        \"01\" /* non-str */\n"
			 "        \"23\" /* non-str */\n"
			 "        \"45\" /* non-str */\n"
			 "        \"67\" /* non-str */\n"
			 "        \"89\" /* non-str */\n");
  lexer_test test (case_, content, NULL);

  auto_vec <cpp_string> input_strings;
  location_t input_locs[5];

  /* Verify that we get the expected tokens back.  */
  for (int i = 0; i < 5; i++)
    {
      const cpp_token *tok = test.get_token ();
      ASSERT_EQ (tok->type, CPP_STRING);
      input_strings.safe_push (tok->val.str);
      input_locs[i] = tok->src_loc;
    }

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser,
				      input_strings.address (), 5,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("0123456789", (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Simulate c-lex.c's lex_string in order to record concatenation.  */
  test.m_concats.record_string_concatenation (5, input_locs);

  location_t initial_loc = input_locs[0];

  /* Within ASSERT_CHAR_AT_RANGE (actually assert_char_at_range), we can
     detect if the initial loc is after LINE_MAP_MAX_LOCATION_WITH_COLS
     and expect get_source_range_for_substring to fail.
     However, for a string concatenation test, we can have a case
     where the initial string is fully before LINE_MAP_MAX_LOCATION_WITH_COLS,
     but subsequent strings can be after it.
     Attempting to detect this within assert_char_at_range
     would overcomplicate the logic for the common test cases, so
     we detect it here.  */
  if (should_have_column_data_p (input_locs[0])
      && !should_have_column_data_p (input_locs[4]))
    {
      /* Verify that get_source_range_for_substring gracefully rejects
	 this case.  */
      source_range actual_range;
      const char *err
2931 2932
	= get_source_range_for_char (test.m_parser, &test.m_concats,
				     initial_loc, type, 0, &actual_range);
2933 2934 2935 2936 2937 2938 2939 2940 2941
      ASSERT_STREQ ("range starts after LINE_MAP_MAX_LOCATION_WITH_COLS", err);
      return;
    }

  for (int i = 0; i < 5; i++)
    for (int j = 0; j < 2; j++)
      ASSERT_CHAR_AT_RANGE (test, initial_loc, type, (i * 2) + j,
			    i + 1, 10 + j, 10 + j);

2942 2943 2944 2945
  /* NUL-terminator should use the final closing quote at line 5 column 12.  */
  ASSERT_CHAR_AT_RANGE (test, initial_loc, type, 10, 5, 12, 12);

  ASSERT_NUM_SUBSTRING_RANGES (test, initial_loc, type, 11);
2946 2947 2948 2949 2950 2951 2952 2953 2954 2955 2956 2957 2958 2959 2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986 2987 2988 2989 2990 2991 2992 2993 2994 2995
}

/* Another test of string literal concatenation, this time combined with
   various kinds of escaped characters.  */

static void
test_lexer_string_locations_concatenation_3 (const line_table_case &case_)
{
  /* Digits 0-9, expressing digit 5 in ASCII as hex "\x35"
     digit 6 in ASCII as octal "\066", concatenating multiple strings.  */
  const char *content
    /* .000000000.111111.111.1.2222.222.2.2233.333.3333.34444444444555
       .123456789.012345.678.9.0123.456.7.8901.234.5678.90123456789012. */
    = ("        \"01234\"  \"\\x35\"  \"\\066\"  \"789\" /* non-str */\n");
  lexer_test test (case_, content, NULL);

  auto_vec <cpp_string> input_strings;
  location_t input_locs[4];

  /* Verify that we get the expected tokens back.  */
  for (int i = 0; i < 4; i++)
    {
      const cpp_token *tok = test.get_token ();
      ASSERT_EQ (tok->type, CPP_STRING);
      input_strings.safe_push (tok->val.str);
      input_locs[i] = tok->src_loc;
    }

  /* Verify that cpp_interpret_string works.  */
  cpp_string dst_string;
  const enum cpp_ttype type = CPP_STRING;
  bool result = cpp_interpret_string (test.m_parser,
				      input_strings.address (), 4,
				      &dst_string, type);
  ASSERT_TRUE (result);
  ASSERT_STREQ ("0123456789", (const char *)dst_string.text);
  free (const_cast <unsigned char *> (dst_string.text));

  /* Simulate c-lex.c's lex_string in order to record concatenation.  */
  test.m_concats.record_string_concatenation (4, input_locs);

  location_t initial_loc = input_locs[0];

  for (int i = 0; i <= 4; i++)
    ASSERT_CHAR_AT_RANGE (test, initial_loc, type, i, 1, 10 + i, 10 + i);
  ASSERT_CHAR_AT_RANGE (test, initial_loc, type, 5, 1, 19, 22);
  ASSERT_CHAR_AT_RANGE (test, initial_loc, type, 6, 1, 27, 30);
  for (int i = 7; i <= 9; i++)
    ASSERT_CHAR_AT_RANGE (test, initial_loc, type, i, 1, 28 + i, 28 + i);

2996 2997 2998 2999
  /* NUL-terminator should use the location of the final closing quote.  */
  ASSERT_CHAR_AT_RANGE (test, initial_loc, type, 10, 1, 38, 38);

  ASSERT_NUM_SUBSTRING_RANGES (test, initial_loc, type, 11);
3000 3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021 3022 3023
}

/* Test of string literal in a macro.  */

static void
test_lexer_string_locations_macro (const line_table_case &case_)
{
  /* Digits 0-9.
     .....................0000000001111111111.22222222223.
     .....................1234567890123456789.01234567890.  */
  const char *content = ("#define MACRO     \"0123456789\" /* non-str */\n"
			 "  MACRO");
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected tokens back.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_PADDING);

  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"0123456789\"");

  /* Verify ranges of individual characters.  We ought to
     see columns within the macro definition.  */
3024
  for (int i = 0; i <= 10; i++)
3025 3026 3027
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			  i, 1, 20 + i, 20 + i);

3028
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, CPP_STRING, 11);
3029 3030 3031 3032 3033 3034 3035 3036 3037 3038 3039 3040 3041 3042 3043 3044 3045 3046 3047 3048 3049 3050 3051 3052 3053 3054 3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067 3068 3069 3070 3071 3072 3073 3074 3075 3076 3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121

  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_PADDING);
}

/* Test of stringification of a macro argument.  */

static void
test_lexer_string_locations_stringified_macro_argument
  (const line_table_case &case_)
{
  /* .....................000000000111111111122222222223.
     .....................123456789012345678901234567890.  */
  const char *content = ("#define MACRO(X) #X /* non-str */\n"
			 "MACRO(foo)\n");
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_PADDING);

  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "\"foo\"");

  /* We don't support getting the location of a stringified macro
     argument.  Verify that it fails gracefully.  */
  ASSERT_HAS_NO_SUBSTRING_RANGES (test, tok->src_loc, CPP_STRING,
				  "cpp_interpret_string_1 failed");

  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_PADDING);

  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_PADDING);
}

/* Ensure that we are fail gracefully if something attempts to pass
   in a location that isn't a string literal token.  Seen on this code:

     const char a[] = " %d ";
     __builtin_printf (a, 0.5);
                       ^

   when c-format.c erroneously used the indicated one-character
   location as the format string location, leading to a read past the
   end of a string buffer in cpp_interpret_string_1.  */

static void
test_lexer_string_locations_non_string (const line_table_case &case_)
{
  /* .....................000000000111111111122222222223.
     .....................123456789012345678901234567890.  */
  const char *content = ("         a\n");
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_NAME);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "a");

  /* At this point, libcpp is attempting to interpret the name as a
     string literal, despite it not starting with a quote.  We don't detect
     that, but we should at least fail gracefully.  */
  ASSERT_HAS_NO_SUBSTRING_RANGES (test, tok->src_loc, CPP_STRING,
				  "cpp_interpret_string_1 failed");
}

/* Ensure that we can read substring information for a token which
   starts in one linemap and ends in another .  Adapted from
   gcc.dg/cpp/pr69985.c.  */

static void
test_lexer_string_locations_long_line (const line_table_case &case_)
{
  /* .....................000000.000111111111
     .....................123456.789012346789.  */
  const char *content = ("/* A very long line, so that we start a new line map.  */\n"
			 "     \"0123456789012345678901234567890123456789"
			 "0123456789012345678901234567890123456789"
			 "0123456789012345678901234567890123456789"
			 "0123456789\"\n");

  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected token back.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_STRING);

  if (!should_have_column_data_p (line_table->highest_location))
    return;

  /* Verify ranges of individual characters.  */
3122 3123
  ASSERT_NUM_SUBSTRING_RANGES (test, tok->src_loc, CPP_STRING, 131);
  for (int i = 0; i < 131; i++)
3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160 3161 3162 3163 3164 3165 3166 3167 3168 3169 3170 3171 3172 3173 3174 3175
    ASSERT_CHAR_AT_RANGE (test, tok->src_loc, CPP_STRING,
			  i, 2, 7 + i, 7 + i);
}

/* Test of lexing char constants.  */

static void
test_lexer_char_constants (const line_table_case &case_)
{
  /* Various char constants.
     .....................0000000001111111111.22222222223.
     .....................1234567890123456789.01234567890.  */
  const char *content = ("         'a'\n"
			 "        u'a'\n"
			 "        U'a'\n"
			 "        L'a'\n"
			 "         'abc'\n");
  lexer_test test (case_, content, NULL);

  /* Verify that we get the expected tokens back.  */
  /* 'a'.  */
  const cpp_token *tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_CHAR);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "'a'");

  unsigned int chars_seen;
  int unsignedp;
  cppchar_t cc = cpp_interpret_charconst (test.m_parser, tok,
					  &chars_seen, &unsignedp);
  ASSERT_EQ (cc, 'a');
  ASSERT_EQ (chars_seen, 1);

  /* u'a'.  */
  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_CHAR16);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "u'a'");

  /* U'a'.  */
  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_CHAR32);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "U'a'");

  /* L'a'.  */
  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_WCHAR);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "L'a'");

  /* 'abc' (c-char-sequence).  */
  tok = test.get_token ();
  ASSERT_EQ (tok->type, CPP_CHAR);
  ASSERT_TOKEN_AS_TEXT_EQ (test.m_parser, tok, "'abc'");
}
3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199 3200 3201
/* A table of interesting location_t values, giving one axis of our test
   matrix.  */

static const location_t boundary_locations[] = {
  /* Zero means "don't override the default values for a new line_table".  */
  0,

  /* An arbitrary non-zero value that isn't close to one of
     the boundary values below.  */
  0x10000,

  /* Values near LINE_MAP_MAX_LOCATION_WITH_PACKED_RANGES.  */
  LINE_MAP_MAX_LOCATION_WITH_PACKED_RANGES - 0x100,
  LINE_MAP_MAX_LOCATION_WITH_PACKED_RANGES - 1,
  LINE_MAP_MAX_LOCATION_WITH_PACKED_RANGES,
  LINE_MAP_MAX_LOCATION_WITH_PACKED_RANGES + 1,
  LINE_MAP_MAX_LOCATION_WITH_PACKED_RANGES + 0x100,

  /* Values near LINE_MAP_MAX_LOCATION_WITH_COLS.  */
  LINE_MAP_MAX_LOCATION_WITH_COLS - 0x100,
  LINE_MAP_MAX_LOCATION_WITH_COLS - 1,
  LINE_MAP_MAX_LOCATION_WITH_COLS,
  LINE_MAP_MAX_LOCATION_WITH_COLS + 1,
  LINE_MAP_MAX_LOCATION_WITH_COLS + 0x100,
};

3202
/* Run TESTCASE multiple times, once for each case in our test matrix.  */
David Malcolm committed
3203 3204

void
3205
for_each_line_table_case (void (*testcase) (const line_table_case &))
David Malcolm committed
3206
{
3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223 3224 3225 3226
  /* As noted above in the description of struct line_table_case,
     we want to explore a test matrix of interesting line_table
     situations, running various selftests for each case within the
     matrix.  */

  /* Run all tests with:
     (a) line_table->default_range_bits == 0, and
     (b) line_table->default_range_bits == 5.  */
  int num_cases_tested = 0;
  for (int default_range_bits = 0; default_range_bits <= 5;
       default_range_bits += 5)
    {
      /* ...and use each of the "interesting" location values as
	 the starting location within line_table.  */
      const int num_boundary_locations
	= sizeof (boundary_locations) / sizeof (boundary_locations[0]);
      for (int loc_idx = 0; loc_idx < num_boundary_locations; loc_idx++)
	{
	  line_table_case c (default_range_bits, boundary_locations[loc_idx]);

3227
	  testcase (c);
3228 3229 3230 3231 3232 3233 3234

	  num_cases_tested++;
	}
    }

  /* Verify that we fully covered the test matrix.  */
  ASSERT_EQ (num_cases_tested, 2 * 12);
3235 3236 3237 3238 3239 3240 3241 3242 3243 3244
}

/* Run all of the selftests within this file.  */

void
input_c_tests ()
{
  test_should_have_column_data_p ();
  test_unknown_location ();
  test_builtins ();
3245
  for_each_line_table_case (test_make_location_nonpure_range_endpoints);
3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269

  for_each_line_table_case (test_accessing_ordinary_linemaps);
  for_each_line_table_case (test_lexer);
  for_each_line_table_case (test_lexer_string_locations_simple);
  for_each_line_table_case (test_lexer_string_locations_ebcdic);
  for_each_line_table_case (test_lexer_string_locations_hex);
  for_each_line_table_case (test_lexer_string_locations_oct);
  for_each_line_table_case (test_lexer_string_locations_letter_escape_1);
  for_each_line_table_case (test_lexer_string_locations_letter_escape_2);
  for_each_line_table_case (test_lexer_string_locations_ucn4);
  for_each_line_table_case (test_lexer_string_locations_ucn8);
  for_each_line_table_case (test_lexer_string_locations_wide_string);
  for_each_line_table_case (test_lexer_string_locations_string16);
  for_each_line_table_case (test_lexer_string_locations_string32);
  for_each_line_table_case (test_lexer_string_locations_u8);
  for_each_line_table_case (test_lexer_string_locations_utf8_source);
  for_each_line_table_case (test_lexer_string_locations_concatenation_1);
  for_each_line_table_case (test_lexer_string_locations_concatenation_2);
  for_each_line_table_case (test_lexer_string_locations_concatenation_3);
  for_each_line_table_case (test_lexer_string_locations_macro);
  for_each_line_table_case (test_lexer_string_locations_stringified_macro_argument);
  for_each_line_table_case (test_lexer_string_locations_non_string);
  for_each_line_table_case (test_lexer_string_locations_long_line);
  for_each_line_table_case (test_lexer_char_constants);
3270

David Malcolm committed
3271 3272 3273 3274 3275 3276
  test_reading_source_line ();
}

} // namespace selftest

#endif /* CHECKING_P */