tree.c 113 KB
Newer Older
Mike Stump committed
1
/* Language-dependent node constructors for parse phase of GNU compiler.
2
   Copyright (C) 1987-2014 Free Software Foundation, Inc.
Mike Stump committed
3 4
   Hacked by Michael Tiemann (tiemann@cygnus.com)

5
This file is part of GCC.
Mike Stump committed
6

7
GCC is free software; you can redistribute it and/or modify
Mike Stump committed
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
Mike Stump committed
10 11
any later version.

12
GCC is distributed in the hope that it will be useful,
Mike Stump committed
13 14 15 16 17
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
18 19
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */
Mike Stump committed
20 21

#include "config.h"
22
#include "system.h"
23 24
#include "coretypes.h"
#include "tm.h"
Mike Stump committed
25
#include "tree.h"
26 27 28
#include "stor-layout.h"
#include "print-tree.h"
#include "tree-iterator.h"
Mike Stump committed
29 30
#include "cp-tree.h"
#include "flags.h"
31
#include "tree-inline.h"
32
#include "debug.h"
33
#include "convert.h"
34
#include "cgraph.h"
35
#include "splay-tree.h"
36
#include "hash-table.h"
37 38
#include "gimple-expr.h"
#include "gimplify.h"
39

40 41 42 43 44
static tree bot_manip (tree *, int *, void *);
static tree bot_replace (tree *, int *, void *);
static int list_hash_eq (const void *, const void *);
static hashval_t list_hash_pieces (tree, tree, tree);
static hashval_t list_hash (const void *);
45
static tree build_target_expr (tree, tree, tsubst_flags_t);
46 47
static tree count_trees_r (tree *, int *, void *);
static tree verify_stmt_tree_r (tree *, int *, void *);
48
static tree build_local_temp (tree);
49 50 51 52

static tree handle_java_interface_attribute (tree *, tree, tree, int, bool *);
static tree handle_com_interface_attribute (tree *, tree, tree, int, bool *);
static tree handle_init_priority_attribute (tree *, tree, tree, int, bool *);
53
static tree handle_abi_tag_attribute (tree *, tree, tree, int, bool *);
Joseph Myers committed
54

55
/* If REF is an lvalue, returns the kind of lvalue that REF is.
56
   Otherwise, returns clk_none.  */
Mike Stump committed
57

58 59
cp_lvalue_kind
lvalue_kind (const_tree ref)
Mike Stump committed
60
{
61 62 63
  cp_lvalue_kind op1_lvalue_kind = clk_none;
  cp_lvalue_kind op2_lvalue_kind = clk_none;

64 65 66 67
  /* Expressions of reference type are sometimes wrapped in
     INDIRECT_REFs.  INDIRECT_REFs are just internal compiler
     representation, not part of the language, so we have to look
     through them.  */
68
  if (REFERENCE_REF_P (ref))
69
    return lvalue_kind (TREE_OPERAND (ref, 0));
70

71 72
  if (TREE_TYPE (ref)
      && TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
73 74 75 76
    {
      /* unnamed rvalue references are rvalues */
      if (TYPE_REF_IS_RVALUE (TREE_TYPE (ref))
	  && TREE_CODE (ref) != PARM_DECL
Gabriel Dos Reis committed
77
	  && !VAR_P (ref)
Jason Merrill committed
78 79 80
	  && TREE_CODE (ref) != COMPONENT_REF
	  /* Functions are always lvalues.  */
	  && TREE_CODE (TREE_TYPE (TREE_TYPE (ref))) != FUNCTION_TYPE)
81
	return clk_rvalueref;
82

83
      /* lvalue references and named rvalue references are lvalues.  */
84 85
      return clk_ordinary;
    }
Mike Stump committed
86

87
  if (ref == current_class_ptr)
88
    return clk_none;
Mike Stump committed
89 90 91

  switch (TREE_CODE (ref))
    {
92 93
    case SAVE_EXPR:
      return clk_none;
Mike Stump committed
94
      /* preincrements and predecrements are valid lvals, provided
Mike Stump committed
95
	 what they refer to are valid lvals.  */
Mike Stump committed
96 97
    case PREINCREMENT_EXPR:
    case PREDECREMENT_EXPR:
98 99
    case TRY_CATCH_EXPR:
    case WITH_CLEANUP_EXPR:
100 101
    case REALPART_EXPR:
    case IMAGPART_EXPR:
102
      return lvalue_kind (TREE_OPERAND (ref, 0));
Mike Stump committed
103

104
    case COMPONENT_REF:
105
      op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0));
106
      /* Look at the member designator.  */
107
      if (!op1_lvalue_kind)
Mike Stump committed
108
	;
109 110
      else if (is_overloaded_fn (TREE_OPERAND (ref, 1)))
	/* The "field" can be a FUNCTION_DECL or an OVERLOAD in some
111 112 113 114 115
	   situations.  If we're seeing a COMPONENT_REF, it's a non-static
	   member, so it isn't an lvalue. */
	op1_lvalue_kind = clk_none;
      else if (TREE_CODE (TREE_OPERAND (ref, 1)) != FIELD_DECL)
	/* This can be IDENTIFIER_NODE in a template.  */;
116
      else if (DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1)))
117 118 119 120
	{
	  /* Clear the ordinary bit.  If this object was a class
	     rvalue we want to preserve that information.  */
	  op1_lvalue_kind &= ~clk_ordinary;
Kazu Hirata committed
121
	  /* The lvalue is for a bitfield.  */
122 123
	  op1_lvalue_kind |= clk_bitfield;
	}
124 125
      else if (DECL_PACKED (TREE_OPERAND (ref, 1)))
	op1_lvalue_kind |= clk_packed;
126

127 128
      return op1_lvalue_kind;

Mike Stump committed
129
    case STRING_CST:
130
    case COMPOUND_LITERAL_EXPR:
131
      return clk_ordinary;
Mike Stump committed
132

133
    case CONST_DECL:
134 135 136 137 138 139
      /* CONST_DECL without TREE_STATIC are enumeration values and
	 thus not lvalues.  With TREE_STATIC they are used by ObjC++
	 in objc_build_string_object and need to be considered as
	 lvalues.  */
      if (! TREE_STATIC (ref))
	return clk_none;
Mike Stump committed
140 141 142 143
    case VAR_DECL:
      if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
	  && DECL_LANG_SPECIFIC (ref)
	  && DECL_IN_AGGR_P (ref))
144
	return clk_none;
Mike Stump committed
145
    case INDIRECT_REF:
146
    case ARROW_EXPR:
Mike Stump committed
147
    case ARRAY_REF:
148
    case ARRAY_NOTATION_REF:
Mike Stump committed
149 150
    case PARM_DECL:
    case RESULT_DECL:
151
      return clk_ordinary;
Mike Stump committed
152

153 154
      /* A scope ref in a template, left as SCOPE_REF to support later
	 access checking.  */
Mike Stump committed
155
    case SCOPE_REF:
156 157 158 159 160 161 162 163
      gcc_assert (!type_dependent_expression_p (CONST_CAST_TREE (ref)));
      {
	tree op = TREE_OPERAND (ref, 1);
	if (TREE_CODE (op) == FIELD_DECL)
	  return (DECL_C_BIT_FIELD (op) ? clk_bitfield : clk_ordinary);
	else
	  return lvalue_kind (op);
      }
164

165 166
    case MAX_EXPR:
    case MIN_EXPR:
167 168 169 170
      /* Disallow <? and >? as lvalues if either argument side-effects.  */
      if (TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 0))
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 1)))
	return clk_none;
171 172
      op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0));
      op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1));
Mike Stump committed
173 174 175
      break;

    case COND_EXPR:
176
      op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1)
177
				    ? TREE_OPERAND (ref, 1)
178
				    : TREE_OPERAND (ref, 0));
179
      op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 2));
180
      break;
Mike Stump committed
181 182

    case MODIFY_EXPR:
183
    case TYPEID_EXPR:
184
      return clk_ordinary;
Mike Stump committed
185 186

    case COMPOUND_EXPR:
187
      return lvalue_kind (TREE_OPERAND (ref, 1));
188 189

    case TARGET_EXPR:
190
      return clk_class;
191

192
    case VA_ARG_EXPR:
193
      return (CLASS_TYPE_P (TREE_TYPE (ref)) ? clk_class : clk_none);
194 195

    case CALL_EXPR:
196 197 198
      /* We can see calls outside of TARGET_EXPR in templates.  */
      if (CLASS_TYPE_P (TREE_TYPE (ref)))
	return clk_class;
199
      return clk_none;
200 201 202 203

    case FUNCTION_DECL:
      /* All functions (except non-static-member functions) are
	 lvalues.  */
204
      return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref)
205
	      ? clk_none : clk_ordinary);
206

207 208 209
    case BASELINK:
      /* We now represent a reference to a single static member function
	 with a BASELINK.  */
210 211
      /* This CONST_CAST is okay because BASELINK_FUNCTIONS returns
	 its argument unmodified and we assign it to a const_tree.  */
212
      return lvalue_kind (BASELINK_FUNCTIONS (CONST_CAST_TREE (ref)));
213

214
    case NON_DEPENDENT_EXPR:
215 216 217
      /* We just return clk_ordinary for NON_DEPENDENT_EXPR in C++98, but
	 in C++11 lvalues don't bind to rvalue references, so we need to
	 work harder to avoid bogus errors (c++/44870).  */
218
      if (cxx_dialect < cxx11)
219 220 221
	return clk_ordinary;
      else
	return lvalue_kind (TREE_OPERAND (ref, 0));
222

223
    default:
224 225 226 227
      if (!TREE_TYPE (ref))
	return clk_none;
      if (CLASS_TYPE_P (TREE_TYPE (ref)))
	return clk_class;
228
      break;
Mike Stump committed
229 230
    }

231 232 233 234 235 236 237 238
  /* If one operand is not an lvalue at all, then this expression is
     not an lvalue.  */
  if (!op1_lvalue_kind || !op2_lvalue_kind)
    return clk_none;

  /* Otherwise, it's an lvalue, and it has all the odd properties
     contributed by either operand.  */
  op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind;
239
  /* It's not an ordinary lvalue if it involves any other kind.  */
240 241
  if ((op1_lvalue_kind & ~clk_ordinary) != clk_none)
    op1_lvalue_kind &= ~clk_ordinary;
242 243 244 245 246
  /* It can't be both a pseudo-lvalue and a non-addressable lvalue.
     A COND_EXPR of those should be wrapped in a TARGET_EXPR.  */
  if ((op1_lvalue_kind & (clk_rvalueref|clk_class))
      && (op1_lvalue_kind & (clk_bitfield|clk_packed)))
    op1_lvalue_kind = clk_none;
247
  return op1_lvalue_kind;
Mike Stump committed
248 249
}

250 251 252 253 254
/* Returns the kind of lvalue that REF is, in the sense of
   [basic.lval].  This function should really be named lvalue_p; it
   computes the C++ definition of lvalue.  */

cp_lvalue_kind
255
real_lvalue_p (const_tree ref)
256
{
257
  cp_lvalue_kind kind = lvalue_kind (ref);
258 259 260 261
  if (kind & (clk_rvalueref|clk_class))
    return clk_none;
  else
    return kind;
262 263
}

264 265
/* This differs from real_lvalue_p in that class rvalues are considered
   lvalues.  */
266

267 268
bool
lvalue_p (const_tree ref)
Mike Stump committed
269
{
270
  return (lvalue_kind (ref) != clk_none);
271 272 273 274 275 276 277 278
}

/* This differs from real_lvalue_p in that rvalues formed by dereferencing
   rvalue references are considered rvalues.  */

bool
lvalue_or_rvalue_with_address_p (const_tree ref)
{
279
  cp_lvalue_kind kind = lvalue_kind (ref);
280 281 282 283
  if (kind & clk_class)
    return false;
  else
    return (kind != clk_none);
284 285
}

286 287 288 289 290 291 292 293
/* Returns true if REF is an xvalue, false otherwise.  */

bool
xvalue_p (const_tree ref)
{
  return (lvalue_kind (ref) == clk_rvalueref);
}

294 295 296 297
/* Test whether DECL is a builtin that may appear in a
   constant-expression. */

bool
298
builtin_valid_in_constant_expr_p (const_tree decl)
299 300 301
{
  /* At present BUILT_IN_CONSTANT_P is the only builtin we're allowing
     in constant-expressions.  We may want to add other builtins later. */
302
  return DECL_IS_BUILTIN_CONSTANT_P (decl);
303 304
}

305 306 307
/* Build a TARGET_EXPR, initializing the DECL with the VALUE.  */

static tree
308
build_target_expr (tree decl, tree value, tsubst_flags_t complain)
309 310
{
  tree t;
311
  tree type = TREE_TYPE (decl);
312 313 314 315

#ifdef ENABLE_CHECKING
  gcc_assert (VOID_TYPE_P (TREE_TYPE (value))
	      || TREE_TYPE (decl) == TREE_TYPE (value)
316
	      /* On ARM ctors return 'this'.  */
317
	      || (TYPE_PTR_P (TREE_TYPE (value))
318
		  && TREE_CODE (value) == CALL_EXPR)
319 320 321
	      || useless_type_conversion_p (TREE_TYPE (decl),
					    TREE_TYPE (value)));
#endif
322

323 324 325
  t = cxx_maybe_build_cleanup (decl, complain);
  if (t == error_mark_node)
    return error_mark_node;
326
  t = build4 (TARGET_EXPR, type, decl, value, t, NULL_TREE);
327 328 329 330 331 332 333 334 335
  /* We always set TREE_SIDE_EFFECTS so that expand_expr does not
     ignore the TARGET_EXPR.  If there really turn out to be no
     side-effects, then the optimizer should be able to get rid of
     whatever code is generated anyhow.  */
  TREE_SIDE_EFFECTS (t) = 1;

  return t;
}

336 337 338 339 340 341
/* Return an undeclared local temporary of type TYPE for use in building a
   TARGET_EXPR.  */

static tree
build_local_temp (tree type)
{
342 343
  tree slot = build_decl (input_location,
			  VAR_DECL, NULL_TREE, type);
344
  DECL_ARTIFICIAL (slot) = 1;
345
  DECL_IGNORED_P (slot) = 1;
346 347 348 349 350
  DECL_CONTEXT (slot) = current_function_decl;
  layout_decl (slot, 0);
  return slot;
}

351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397
/* Set various status flags when building an AGGR_INIT_EXPR object T.  */

static void
process_aggr_init_operands (tree t)
{
  bool side_effects;

  side_effects = TREE_SIDE_EFFECTS (t);
  if (!side_effects)
    {
      int i, n;
      n = TREE_OPERAND_LENGTH (t);
      for (i = 1; i < n; i++)
	{
	  tree op = TREE_OPERAND (t, i);
	  if (op && TREE_SIDE_EFFECTS (op))
	    {
	      side_effects = 1;
	      break;
	    }
	}
    }
  TREE_SIDE_EFFECTS (t) = side_effects;
}

/* Build an AGGR_INIT_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE,
   FN, and SLOT.  NARGS is the number of call arguments which are specified
   as a tree array ARGS.  */

static tree
build_aggr_init_array (tree return_type, tree fn, tree slot, int nargs,
		       tree *args)
{
  tree t;
  int i;

  t = build_vl_exp (AGGR_INIT_EXPR, nargs + 3);
  TREE_TYPE (t) = return_type;
  AGGR_INIT_EXPR_FN (t) = fn;
  AGGR_INIT_EXPR_SLOT (t) = slot;
  for (i = 0; i < nargs; i++)
    AGGR_INIT_EXPR_ARG (t, i) = args[i];
  process_aggr_init_operands (t);
  return t;
}

/* INIT is a CALL_EXPR or AGGR_INIT_EXPR which needs info about its
398
   target.  TYPE is the type to be initialized.
Mike Stump committed
399

400 401 402 403 404 405
   Build an AGGR_INIT_EXPR to represent the initialization.  This function
   differs from build_cplus_new in that an AGGR_INIT_EXPR can only be used
   to initialize another object, whereas a TARGET_EXPR can either
   initialize another object or create its own temporary object, and as a
   result building up a TARGET_EXPR requires that the type's destructor be
   callable.  */
Mike Stump committed
406

Mike Stump committed
407
tree
408
build_aggr_init_expr (tree type, tree init)
Mike Stump committed
409
{
410
  tree fn;
Mike Stump committed
411 412
  tree slot;
  tree rval;
413
  int is_ctor;
Mike Stump committed
414

415 416 417 418
  /* Don't build AGGR_INIT_EXPR in a template.  */
  if (processing_template_decl)
    return init;

419 420 421 422 423
  if (TREE_CODE (init) == CALL_EXPR)
    fn = CALL_EXPR_FN (init);
  else if (TREE_CODE (init) == AGGR_INIT_EXPR)
    fn = AGGR_INIT_EXPR_FN (init);
  else
424
    return convert (type, init);
Mike Stump committed
425

426 427 428 429
  is_ctor = (TREE_CODE (fn) == ADDR_EXPR
	     && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
	     && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)));

430 431 432 433 434 435 436 437
  /* We split the CALL_EXPR into its function and its arguments here.
     Then, in expand_expr, we put them back together.  The reason for
     this is that this expression might be a default argument
     expression.  In that case, we need a new temporary every time the
     expression is used.  That's what break_out_target_exprs does; it
     replaces every AGGR_INIT_EXPR with a copy that uses a fresh
     temporary slot.  Then, expand_expr builds up a call-expression
     using the new slot.  */
438 439 440 441 442

  /* If we don't need to use a constructor to create an object of this
     type, don't mess with AGGR_INIT_EXPR.  */
  if (is_ctor || TREE_ADDRESSABLE (type))
    {
443 444
      slot = build_local_temp (type);

445 446 447 448 449 450 451 452
      if (TREE_CODE(init) == CALL_EXPR)
	rval = build_aggr_init_array (void_type_node, fn, slot,
				      call_expr_nargs (init),
				      CALL_EXPR_ARGP (init));
      else
	rval = build_aggr_init_array (void_type_node, fn, slot,
				      aggr_init_expr_nargs (init),
				      AGGR_INIT_EXPR_ARGP (init));
453 454
      TREE_SIDE_EFFECTS (rval) = 1;
      AGGR_INIT_VIA_CTOR_P (rval) = is_ctor;
455
      TREE_NOTHROW (rval) = TREE_NOTHROW (init);
456 457 458 459
    }
  else
    rval = init;

460 461 462 463 464 465 466 467 468 469 470 471
  return rval;
}

/* INIT is a CALL_EXPR or AGGR_INIT_EXPR which needs info about its
   target.  TYPE is the type that this initialization should appear to
   have.

   Build an encapsulation of the initialization to perform
   and return it so that it can be processed by language-independent
   and language-specific expression expanders.  */

tree
472
build_cplus_new (tree type, tree init, tsubst_flags_t complain)
473
{
474
  tree rval = build_aggr_init_expr (type, init);
475 476
  tree slot;

477 478 479
  if (!complete_type_or_maybe_complain (type, init, complain))
    return error_mark_node;

480 481 482 483 484
  /* Make sure that we're not trying to create an instance of an
     abstract class.  */
  if (abstract_virtuals_error_sfinae (NULL_TREE, type, complain))
    return error_mark_node;

485 486
  if (TREE_CODE (rval) == AGGR_INIT_EXPR)
    slot = AGGR_INIT_EXPR_SLOT (rval);
487 488
  else if (TREE_CODE (rval) == CALL_EXPR
	   || TREE_CODE (rval) == CONSTRUCTOR)
489 490 491 492
    slot = build_local_temp (type);
  else
    return rval;

493
  rval = build_target_expr (slot, rval, complain);
494 495 496

  if (rval != error_mark_node)
    TARGET_EXPR_IMPLICIT_P (rval) = 1;
Mike Stump committed
497 498 499 500

  return rval;
}

501 502 503 504 505 506 507 508 509 510 511 512
/* Subroutine of build_vec_init_expr: Build up a single element
   intialization as a proxy for the full array initialization to get things
   marked as used and any appropriate diagnostics.

   Since we're deferring building the actual constructor calls until
   gimplification time, we need to build one now and throw it away so
   that the relevant constructor gets mark_used before cgraph decides
   what functions are needed.  Here we assume that init is either
   NULL_TREE, void_type_node (indicating value-initialization), or
   another array to copy.  */

static tree
513
build_vec_init_elt (tree type, tree init, tsubst_flags_t complain)
514
{
515
  tree inner_type = strip_array_types (type);
516
  vec<tree, va_gc> *argvec;
517

518 519
  if (integer_zerop (array_type_nelts_total (type))
      || !CLASS_TYPE_P (inner_type))
520 521 522
    /* No interesting initialization to do.  */
    return integer_zero_node;
  else if (init == void_type_node)
523
    return build_value_init (inner_type, complain);
524

525 526 527 528 529 530
  gcc_assert (init == NULL_TREE
	      || (same_type_ignoring_top_level_qualifiers_p
		  (type, TREE_TYPE (init))));

  argvec = make_tree_vector ();
  if (init)
531
    {
532 533
      tree init_type = strip_array_types (TREE_TYPE (init));
      tree dummy = build_dummy_object (init_type);
534 535
      if (!real_lvalue_p (init))
	dummy = move (dummy);
536
      argvec->quick_push (dummy);
537
    }
538
  init = build_special_member_call (NULL_TREE, complete_ctor_identifier,
539
				    &argvec, inner_type, LOOKUP_NORMAL,
540 541 542
				    complain);
  release_tree_vector (argvec);

543 544 545 546 547
  /* For a trivial constructor, build_over_call creates a TARGET_EXPR.  But
     we don't want one here because we aren't creating a temporary.  */
  if (TREE_CODE (init) == TARGET_EXPR)
    init = TARGET_EXPR_INITIAL (init);

548
  return init;
549 550
}

551 552 553
/* Return a TARGET_EXPR which expresses the initialization of an array to
   be named later, either default-initialization or copy-initialization
   from another array of the same type.  */
554 555

tree
556
build_vec_init_expr (tree type, tree init, tsubst_flags_t complain)
557
{
558
  tree slot;
559
  bool value_init = false;
560
  tree elt_init = build_vec_init_elt (type, init, complain);
561

562
  if (init == void_type_node)
563
    {
564 565 566
      value_init = true;
      init = NULL_TREE;
    }
567

568 569
  slot = build_local_temp (type);
  init = build2 (VEC_INIT_EXPR, type, slot, init);
570
  TREE_SIDE_EFFECTS (init) = true;
571
  SET_EXPR_LOCATION (init, input_location);
572

573
  if (cxx_dialect >= cxx11
574 575
      && potential_constant_expression (elt_init))
    VEC_INIT_EXPR_IS_CONSTEXPR (init) = true;
576 577
  VEC_INIT_EXPR_VALUE_INIT (init) = value_init;

578 579 580
  return init;
}

581 582 583 584 585 586 587 588 589
/* Give a helpful diagnostic for a non-constexpr VEC_INIT_EXPR in a context
   that requires a constant expression.  */

void
diagnose_non_constexpr_vec_init (tree expr)
{
  tree type = TREE_TYPE (VEC_INIT_EXPR_SLOT (expr));
  tree init, elt_init;
  if (VEC_INIT_EXPR_VALUE_INIT (expr))
590
    init = void_type_node;
591 592 593
  else
    init = VEC_INIT_EXPR_INIT (expr);

594
  elt_init = build_vec_init_elt (type, init, tf_warning_or_error);
595 596 597
  require_potential_constant_expression (elt_init);
}

598 599 600
tree
build_array_copy (tree init)
{
601
  return build_vec_init_expr (TREE_TYPE (init), init, tf_warning_or_error);
602 603
}

Jason Merrill committed
604
/* Build a TARGET_EXPR using INIT to initialize a new temporary of the
605
   indicated TYPE.  */
606 607

tree
608
build_target_expr_with_type (tree init, tree type, tsubst_flags_t complain)
609
{
610
  gcc_assert (!VOID_TYPE_P (type));
611

612 613
  if (TREE_CODE (init) == TARGET_EXPR
      || init == error_mark_node)
614
    return init;
615
  else if (CLASS_TYPE_P (type) && type_has_nontrivial_copy_init (type)
616
	   && !VOID_TYPE_P (TREE_TYPE (init))
617
	   && TREE_CODE (init) != COND_EXPR
618 619
	   && TREE_CODE (init) != CONSTRUCTOR
	   && TREE_CODE (init) != VA_ARG_EXPR)
620 621
    /* We need to build up a copy constructor call.  A void initializer
       means we're being called from bot_manip.  COND_EXPR is a special
622
       case because we already have copies on the arms and we don't want
623
       another one here.  A CONSTRUCTOR is aggregate initialization, which
624 625
       is handled separately.  A VA_ARG_EXPR is magic creation of an
       aggregate; there's no additional work to be done.  */
626
    return force_rvalue (init, complain);
627

628
  return force_target_expr (type, init, complain);
629
}
630

631 632
/* Like the above function, but without the checking.  This function should
   only be used by code which is deliberately trying to subvert the type
633 634
   system, such as call_builtin_trap.  Or build_over_call, to avoid
   infinite recursion.  */
635 636

tree
637
force_target_expr (tree type, tree init, tsubst_flags_t complain)
638
{
639 640
  tree slot;

641
  gcc_assert (!VOID_TYPE_P (type));
642 643

  slot = build_local_temp (type);
644
  return build_target_expr (slot, init, complain);
645 646
}

647 648 649
/* Like build_target_expr_with_type, but use the type of INIT.  */

tree
650
get_target_expr_sfinae (tree init, tsubst_flags_t complain)
651
{
652
  if (TREE_CODE (init) == AGGR_INIT_EXPR)
653
    return build_target_expr (AGGR_INIT_EXPR_SLOT (init), init, complain);
654
  else if (TREE_CODE (init) == VEC_INIT_EXPR)
655
    return build_target_expr (VEC_INIT_EXPR_SLOT (init), init, complain);
656
  else
657 658 659 660 661 662 663
    return build_target_expr_with_type (init, TREE_TYPE (init), complain);
}

tree
get_target_expr (tree init)
{
  return get_target_expr_sfinae (init, tf_warning_or_error);
664 665
}

666 667 668 669 670 671 672 673 674 675 676
/* If EXPR is a bitfield reference, convert it to the declared type of
   the bitfield, and return the resulting expression.  Otherwise,
   return EXPR itself.  */

tree
convert_bitfield_to_declared_type (tree expr)
{
  tree bitfield_type;

  bitfield_type = is_bitfield_expr_with_lowered_type (expr);
  if (bitfield_type)
677 678
    expr = convert_to_integer (TYPE_MAIN_VARIANT (bitfield_type),
			       expr);
679 680 681
  return expr;
}

682 683 684 685 686 687
/* EXPR is being used in an rvalue context.  Return a version of EXPR
   that is marked as an rvalue.  */

tree
rvalue (tree expr)
{
688 689 690 691 692
  tree type;

  if (error_operand_p (expr))
    return expr;

693 694
  expr = mark_rvalue_use (expr);

695 696 697 698
  /* [basic.lval]

     Non-class rvalues always have cv-unqualified types.  */
  type = TREE_TYPE (expr);
699 700
  if (!CLASS_TYPE_P (type) && cv_qualified_p (type))
    type = cv_unqualified (type);
701

702 703 704
  /* We need to do this for rvalue refs as well to get the right answer
     from decltype; see c++/36628.  */
  if (!processing_template_decl && lvalue_or_rvalue_with_address_p (expr))
705 706 707 708
    expr = build1 (NON_LVALUE_EXPR, type, expr);
  else if (type != TREE_TYPE (expr))
    expr = build_nop (type, expr);

709 710 711
  return expr;
}

Mike Stump committed
712

713 714 715 716 717 718
/* Hash an ARRAY_TYPE.  K is really of type `tree'.  */

static hashval_t
cplus_array_hash (const void* k)
{
  hashval_t hash;
719
  const_tree const t = (const_tree) k;
720

721 722 723
  hash = TYPE_UID (TREE_TYPE (t));
  if (TYPE_DOMAIN (t))
    hash ^= TYPE_UID (TYPE_DOMAIN (t));
724 725 726 727 728 729 730 731 732 733 734 735 736 737
  return hash;
}

typedef struct cplus_array_info {
  tree type;
  tree domain;
} cplus_array_info;

/* Compare two ARRAY_TYPEs.  K1 is really of type `tree', K2 is really
   of type `cplus_array_info*'. */

static int
cplus_array_compare (const void * k1, const void * k2)
{
738 739
  const_tree const t1 = (const_tree) k1;
  const cplus_array_info *const t2 = (const cplus_array_info*) k2;
740

741
  return (TREE_TYPE (t1) == t2->type && TYPE_DOMAIN (t1) == t2->domain);
742 743
}

744 745
/* Hash table containing dependent array types, which are unsuitable for
   the language-independent type hash table.  */
746 747
static GTY ((param_is (union tree_node))) htab_t cplus_array_htab;

748
/* Like build_array_type, but handle special C++ semantics.  */
749

750 751
tree
build_cplus_array_type (tree elt_type, tree index_type)
Mike Stump committed
752 753 754
{
  tree t;

755 756 757
  if (elt_type == error_mark_node || index_type == error_mark_node)
    return error_mark_node;

758 759 760
  if (processing_template_decl
      && (dependent_type_p (elt_type)
	  || (index_type && !TREE_CONSTANT (TYPE_MAX_VALUE (index_type)))))
Mike Stump committed
761
    {
762 763 764
      void **e;
      cplus_array_info cai;
      hashval_t hash;
765

766 767 768 769
      if (cplus_array_htab == NULL)
	cplus_array_htab = htab_create_ggc (61, &cplus_array_hash,
					    &cplus_array_compare, NULL);
      
770 771 772
      hash = TYPE_UID (elt_type);
      if (index_type)
	hash ^= TYPE_UID (index_type);
773 774 775 776 777
      cai.type = elt_type;
      cai.domain = index_type;

      e = htab_find_slot_with_hash (cplus_array_htab, &cai, hash, INSERT); 
      if (*e)
778
	/* We have found the type: we're done.  */
779 780 781
	return (tree) *e;
      else
	{
782
	  /* Build a new array type.  */
783
	  t = cxx_make_type (ARRAY_TYPE);
784 785 786
	  TREE_TYPE (t) = elt_type;
	  TYPE_DOMAIN (t) = index_type;

787 788 789 790
	  /* Store it in the hash table. */
	  *e = t;

	  /* Set the canonical type for this new node.  */
791 792 793 794 795 796
	  if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
	      || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
	    SET_TYPE_STRUCTURAL_EQUALITY (t);
	  else if (TYPE_CANONICAL (elt_type) != elt_type
		   || (index_type 
		       && TYPE_CANONICAL (index_type) != index_type))
797 798 799
	    TYPE_CANONICAL (t)
		= build_cplus_array_type 
		   (TYPE_CANONICAL (elt_type),
800
		    index_type ? TYPE_CANONICAL (index_type) : index_type);
801 802
	  else
	    TYPE_CANONICAL (t) = t;
803
	}
Mike Stump committed
804 805
    }
  else
806 807 808 809 810 811 812 813 814 815 816 817
    {
      if (!TYPE_STRUCTURAL_EQUALITY_P (elt_type)
	  && !(index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
	  && (TYPE_CANONICAL (elt_type) != elt_type
	      || (index_type && TYPE_CANONICAL (index_type) != index_type)))
	/* Make sure that the canonical type is on the appropriate
	   variants list.  */
	build_cplus_array_type
	  (TYPE_CANONICAL (elt_type),
	   index_type ? TYPE_CANONICAL (index_type) : index_type);
      t = build_array_type (elt_type, index_type);
    }
Mike Stump committed
818

819 820 821 822 823 824 825 826 827
  /* Push these needs up so that initialization takes place
     more easily.  */
  bool needs_ctor
    = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
  TYPE_NEEDS_CONSTRUCTING (t) = needs_ctor;
  bool needs_dtor
    = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
  TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) = needs_dtor;

828 829 830 831 832 833
  /* We want TYPE_MAIN_VARIANT of an array to strip cv-quals from the
     element type as well, so fix it up if needed.  */
  if (elt_type != TYPE_MAIN_VARIANT (elt_type))
    {
      tree m = build_cplus_array_type (TYPE_MAIN_VARIANT (elt_type),
				       index_type);
834

835 836
      if (TYPE_MAIN_VARIANT (t) != m)
	{
837
	  if (COMPLETE_TYPE_P (TREE_TYPE (t)) && !COMPLETE_TYPE_P (m))
838 839
	    {
	      /* m was built before the element type was complete, so we
840 841 842
		 also need to copy the layout info from t.  We might
	         end up doing this multiple times if t is an array of
	         unknown bound.  */
843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859
	      tree size = TYPE_SIZE (t);
	      tree size_unit = TYPE_SIZE_UNIT (t);
	      unsigned int align = TYPE_ALIGN (t);
	      unsigned int user_align = TYPE_USER_ALIGN (t);
	      enum machine_mode mode = TYPE_MODE (t);
	      for (tree var = m; var; var = TYPE_NEXT_VARIANT (var))
		{
		  TYPE_SIZE (var) = size;
		  TYPE_SIZE_UNIT (var) = size_unit;
		  TYPE_ALIGN (var) = align;
		  TYPE_USER_ALIGN (var) = user_align;
		  SET_TYPE_MODE (var, mode);
		  TYPE_NEEDS_CONSTRUCTING (var) = needs_ctor;
		  TYPE_HAS_NONTRIVIAL_DESTRUCTOR (var) = needs_dtor;
		}
	    }

860 861 862 863 864 865
	  TYPE_MAIN_VARIANT (t) = m;
	  TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
	  TYPE_NEXT_VARIANT (m) = t;
	}
    }

866 867 868 869
  /* Avoid spurious warnings with VLAs (c++/54583).  */
  if (TYPE_SIZE (t) && EXPR_P (TYPE_SIZE (t)))
    TREE_NO_WARNING (TYPE_SIZE (t)) = 1;

Mike Stump committed
870 871
  return t;
}
872

873 874 875 876 877 878 879 880
/* Return an ARRAY_TYPE with element type ELT and length N.  */

tree
build_array_of_n_type (tree elt, int n)
{
  return build_cplus_array_type (elt, build_index_type (size_int (n - 1)));
}

Jason Merrill committed
881 882 883 884 885 886 887 888 889 890 891
/* True iff T is a C++1y array of runtime bound (VLA).  */

bool
array_of_runtime_bound_p (tree t)
{
  if (!t || TREE_CODE (t) != ARRAY_TYPE)
    return false;
  tree dom = TYPE_DOMAIN (t);
  if (!dom)
    return false;
  tree max = TYPE_MAX_VALUE (dom);
892 893
  return (!potential_rvalue_constant_expression (max)
	  || (!value_dependent_expression_p (max) && !TREE_CONSTANT (max)));
Jason Merrill committed
894 895
}

896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919
/* Return a reference type node referring to TO_TYPE.  If RVAL is
   true, return an rvalue reference type, otherwise return an lvalue
   reference type.  If a type node exists, reuse it, otherwise create
   a new one.  */
tree
cp_build_reference_type (tree to_type, bool rval)
{
  tree lvalue_ref, t;
  lvalue_ref = build_reference_type (to_type);
  if (!rval)
    return lvalue_ref;

  /* This code to create rvalue reference types is based on and tied
     to the code creating lvalue reference types in the middle-end
     functions build_reference_type_for_mode and build_reference_type.

     It works by putting the rvalue reference type nodes after the
     lvalue reference nodes in the TYPE_NEXT_REF_TO linked list, so
     they will effectively be ignored by the middle end.  */

  for (t = lvalue_ref; (t = TYPE_NEXT_REF_TO (t)); )
    if (TYPE_REF_IS_RVALUE (t))
      return t;

920
  t = build_distinct_type_copy (lvalue_ref);
921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939

  TYPE_REF_IS_RVALUE (t) = true;
  TYPE_NEXT_REF_TO (t) = TYPE_NEXT_REF_TO (lvalue_ref);
  TYPE_NEXT_REF_TO (lvalue_ref) = t;

  if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
    SET_TYPE_STRUCTURAL_EQUALITY (t);
  else if (TYPE_CANONICAL (to_type) != to_type)
    TYPE_CANONICAL (t) 
      = cp_build_reference_type (TYPE_CANONICAL (to_type), rval);
  else
    TYPE_CANONICAL (t) = t;

  layout_type (t);

  return t;

}

940 941 942 943 944 945 946 947 948 949 950
/* Returns EXPR cast to rvalue reference type, like std::move.  */

tree
move (tree expr)
{
  tree type = TREE_TYPE (expr);
  gcc_assert (TREE_CODE (type) != REFERENCE_TYPE);
  type = cp_build_reference_type (type, /*rval*/true);
  return build_static_cast (type, expr, tf_warning_or_error);
}

951 952 953 954 955 956 957 958
/* Used by the C++ front end to build qualified array types.  However,
   the C version of this function does not properly maintain canonical
   types (which are not used in C).  */
tree
c_build_qualified_type (tree type, int type_quals)
{
  return cp_build_qualified_type (type, type_quals);
}
959

Mike Stump committed
960

961 962
/* Make a variant of TYPE, qualified with the TYPE_QUALS.  Handles
   arrays correctly.  In particular, if TYPE is an array of T's, and
963
   TYPE_QUALS is non-empty, returns an array of qualified T's.
964

965
   FLAGS determines how to deal with ill-formed qualifications. If
Nathan Sidwell committed
966 967 968 969 970 971 972 973 974 975 976 977
   tf_ignore_bad_quals is set, then bad qualifications are dropped
   (this is permitted if TYPE was introduced via a typedef or template
   type parameter). If bad qualifications are dropped and tf_warning
   is set, then a warning is issued for non-const qualifications.  If
   tf_ignore_bad_quals is not set and tf_error is not set, we
   return error_mark_node. Otherwise, we issue an error, and ignore
   the qualifications.

   Qualification of a reference type is valid when the reference came
   via a typedef or template type argument. [dcl.ref] No such
   dispensation is provided for qualifying a function type.  [dcl.fct]
   DR 295 queries this and the proposed resolution brings it into line
Kazu Hirata committed
978
   with qualifying a reference.  We implement the DR.  We also behave
Nathan Sidwell committed
979
   in a similar manner for restricting non-pointer types.  */
980

Mike Stump committed
981
tree
982
cp_build_qualified_type_real (tree type,
Mike Stump committed
983 984
			      int type_quals,
			      tsubst_flags_t complain)
Mike Stump committed
985
{
986
  tree result;
Nathan Sidwell committed
987
  int bad_quals = TYPE_UNQUALIFIED;
988

Mike Stump committed
989 990
  if (type == error_mark_node)
    return type;
991

992
  if (type_quals == cp_type_quals (type))
993 994
    return type;

Nathan Sidwell committed
995
  if (TREE_CODE (type) == ARRAY_TYPE)
Mike Stump committed
996
    {
997 998 999
      /* In C++, the qualification really applies to the array element
	 type.  Obtain the appropriately qualified element type.  */
      tree t;
1000 1001
      tree element_type
	= cp_build_qualified_type_real (TREE_TYPE (type),
1002 1003 1004 1005
					type_quals,
					complain);

      if (element_type == error_mark_node)
1006
	return error_mark_node;
Mike Stump committed
1007

1008 1009
      /* See if we already have an identically qualified type.  Tests
	 should be equivalent to those in check_qualified_type.  */
1010
      for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
1011
	if (TREE_TYPE (t) == element_type
1012
	    && TYPE_NAME (t) == TYPE_NAME (type)
1013 1014 1015
	    && TYPE_CONTEXT (t) == TYPE_CONTEXT (type)
	    && attribute_list_equal (TYPE_ATTRIBUTES (t),
				     TYPE_ATTRIBUTES (type)))
1016
	  break;
1017

1018
      if (!t)
1019 1020 1021 1022 1023 1024 1025 1026 1027 1028
	{
	  t = build_cplus_array_type (element_type, TYPE_DOMAIN (type));

	  /* Keep the typedef name.  */
	  if (TYPE_NAME (t) != TYPE_NAME (type))
	    {
	      t = build_variant_type_copy (t);
	      TYPE_NAME (t) = TYPE_NAME (type);
	    }
	}
Mike Stump committed
1029

1030
      /* Even if we already had this variant, we update
1031
	 TYPE_NEEDS_CONSTRUCTING and TYPE_HAS_NONTRIVIAL_DESTRUCTOR in case
1032 1033
	 they changed since the variant was originally created.

1034 1035 1036
	 This seems hokey; if there is some way to use a previous
	 variant *without* coming through here,
	 TYPE_NEEDS_CONSTRUCTING will never be updated.  */
1037
      TYPE_NEEDS_CONSTRUCTING (t)
1038
	= TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type));
1039
      TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t)
1040
	= TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type));
1041
      return t;
Mike Stump committed
1042
    }
1043 1044 1045 1046
  else if (TYPE_PTRMEMFUNC_P (type))
    {
      /* For a pointer-to-member type, we can't just return a
	 cv-qualified version of the RECORD_TYPE.  If we do, we
Nathan Sidwell committed
1047
	 haven't changed the field that contains the actual pointer to
1048 1049 1050 1051 1052
	 a method, and so TYPE_PTRMEMFUNC_FN_TYPE will be wrong.  */
      tree t;

      t = TYPE_PTRMEMFUNC_FN_TYPE (type);
      t = cp_build_qualified_type_real (t, type_quals, complain);
1053
      return build_ptrmemfunc_type (t);
1054
    }
1055 1056 1057 1058 1059 1060 1061
  else if (TREE_CODE (type) == TYPE_PACK_EXPANSION)
    {
      tree t = PACK_EXPANSION_PATTERN (type);

      t = cp_build_qualified_type_real (t, type_quals, complain);
      return make_pack_expansion (t);
    }
1062

1063
  /* A reference or method type shall not be cv-qualified.
1064 1065
     [dcl.ref], [dcl.fct].  This used to be an error, but as of DR 295
     (in CD1) we always ignore extra cv-quals on functions.  */
1066 1067
  if (type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE)
      && (TREE_CODE (type) == REFERENCE_TYPE
1068
	  || TREE_CODE (type) == FUNCTION_TYPE
1069 1070
	  || TREE_CODE (type) == METHOD_TYPE))
    {
1071 1072
      if (TREE_CODE (type) == REFERENCE_TYPE)
	bad_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE);
1073 1074
      type_quals &= ~(TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE);
    }
1075

1076 1077 1078 1079
  /* But preserve any function-cv-quals on a FUNCTION_TYPE.  */
  if (TREE_CODE (type) == FUNCTION_TYPE)
    type_quals |= type_memfn_quals (type);

1080
  /* A restrict-qualified type must be a pointer (or reference)
1081
     to object or incomplete type. */
1082 1083 1084 1085 1086 1087 1088 1089 1090
  if ((type_quals & TYPE_QUAL_RESTRICT)
      && TREE_CODE (type) != TEMPLATE_TYPE_PARM
      && TREE_CODE (type) != TYPENAME_TYPE
      && !POINTER_TYPE_P (type))
    {
      bad_quals |= TYPE_QUAL_RESTRICT;
      type_quals &= ~TYPE_QUAL_RESTRICT;
    }

1091 1092
  if (bad_quals == TYPE_UNQUALIFIED
      || (complain & tf_ignore_bad_quals))
1093
    /*OK*/;
1094
  else if (!(complain & tf_error))
1095 1096 1097
    return error_mark_node;
  else
    {
1098 1099 1100
      tree bad_type = build_qualified_type (ptr_type_node, bad_quals);
      error ("%qV qualifiers cannot be applied to %qT",
	     bad_type, type);
1101
    }
1102

1103 1104 1105
  /* Retrieve (or create) the appropriately qualified variant.  */
  result = build_qualified_type (type, type_quals);

1106 1107 1108 1109 1110 1111 1112 1113 1114
  /* Preserve exception specs and ref-qualifier since build_qualified_type
     doesn't know about them.  */
  if (TREE_CODE (result) == FUNCTION_TYPE
      || TREE_CODE (result) == METHOD_TYPE)
    {
      result = build_exception_variant (result, TYPE_RAISES_EXCEPTIONS (type));
      result = build_ref_qualified_type (result, type_memfn_rqual (type));
    }

1115
  /* If this was a pointer-to-method type, and we just made a copy,
Geoffrey Keating committed
1116 1117 1118
     then we need to unshare the record that holds the cached
     pointer-to-member-function type, because these will be distinct
     between the unqualified and qualified types.  */
1119
  if (result != type
1120
      && TYPE_PTR_P (type)
1121 1122
      && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE
      && TYPE_LANG_SPECIFIC (result) == TYPE_LANG_SPECIFIC (type))
Geoffrey Keating committed
1123
    TYPE_LANG_SPECIFIC (result) = NULL;
1124

1125 1126 1127 1128
  /* We may also have ended up building a new copy of the canonical
     type of a pointer-to-method type, which could have the same
     sharing problem described above.  */
  if (TYPE_CANONICAL (result) != TYPE_CANONICAL (type)
1129
      && TYPE_PTR_P (type)
1130 1131 1132 1133 1134
      && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE
      && (TYPE_LANG_SPECIFIC (TYPE_CANONICAL (result)) 
          == TYPE_LANG_SPECIFIC (TYPE_CANONICAL (type))))
    TYPE_LANG_SPECIFIC (TYPE_CANONICAL (result)) = NULL;

1135
  return result;
Mike Stump committed
1136
}
1137

1138 1139 1140 1141 1142
/* Return TYPE with const and volatile removed.  */

tree
cv_unqualified (tree type)
{
1143 1144 1145 1146 1147
  int quals;

  if (type == error_mark_node)
    return type;

1148
  quals = cp_type_quals (type);
1149 1150 1151 1152
  quals &= ~(TYPE_QUAL_CONST|TYPE_QUAL_VOLATILE);
  return cp_build_qualified_type (type, quals);
}

Dodji Seketeli committed
1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170
/* Builds a qualified variant of T that is not a typedef variant.
   E.g. consider the following declarations:
     typedef const int ConstInt;
     typedef ConstInt* PtrConstInt;
   If T is PtrConstInt, this function returns a type representing
     const int*.
   In other words, if T is a typedef, the function returns the underlying type.
   The cv-qualification and attributes of the type returned match the
   input type.
   They will always be compatible types.
   The returned type is built so that all of its subtypes
   recursively have their typedefs stripped as well.

   This is different from just returning TYPE_CANONICAL (T)
   Because of several reasons:
    * If T is a type that needs structural equality
      its TYPE_CANONICAL (T) will be NULL.
    * TYPE_CANONICAL (T) desn't carry type attributes
1171
      and loses template parameter names.   */
1172 1173

tree
Dodji Seketeli committed
1174
strip_typedefs (tree t)
1175
{
Dodji Seketeli committed
1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242
  tree result = NULL, type = NULL, t0 = NULL;

  if (!t || t == error_mark_node || t == TYPE_CANONICAL (t))
    return t;

  gcc_assert (TYPE_P (t));

  switch (TREE_CODE (t))
    {
    case POINTER_TYPE:
      type = strip_typedefs (TREE_TYPE (t));
      result = build_pointer_type (type);
      break;
    case REFERENCE_TYPE:
      type = strip_typedefs (TREE_TYPE (t));
      result = cp_build_reference_type (type, TYPE_REF_IS_RVALUE (t));
      break;
    case OFFSET_TYPE:
      t0 = strip_typedefs (TYPE_OFFSET_BASETYPE (t));
      type = strip_typedefs (TREE_TYPE (t));
      result = build_offset_type (t0, type);
      break;
    case RECORD_TYPE:
      if (TYPE_PTRMEMFUNC_P (t))
	{
	  t0 = strip_typedefs (TYPE_PTRMEMFUNC_FN_TYPE (t));
	  result = build_ptrmemfunc_type (t0);
	}
      break;
    case ARRAY_TYPE:
      type = strip_typedefs (TREE_TYPE (t));
      t0  = strip_typedefs (TYPE_DOMAIN (t));;
      result = build_cplus_array_type (type, t0);
      break;
    case FUNCTION_TYPE:
    case METHOD_TYPE:
      {
	tree arg_types = NULL, arg_node, arg_type;
	for (arg_node = TYPE_ARG_TYPES (t);
	     arg_node;
	     arg_node = TREE_CHAIN (arg_node))
	  {
	    if (arg_node == void_list_node)
	      break;
	    arg_type = strip_typedefs (TREE_VALUE (arg_node));
	    gcc_assert (arg_type);

	    arg_types =
	      tree_cons (TREE_PURPOSE (arg_node), arg_type, arg_types);
	  }

	if (arg_types)
	  arg_types = nreverse (arg_types);

	/* A list of parameters not ending with an ellipsis
	   must end with void_list_node.  */
	if (arg_node)
	  arg_types = chainon (arg_types, void_list_node);

	type = strip_typedefs (TREE_TYPE (t));
	if (TREE_CODE (t) == METHOD_TYPE)
	  {
	    tree class_type = TREE_TYPE (TREE_VALUE (arg_types));
	    gcc_assert (class_type);
	    result =
	      build_method_type_directly (class_type, type,
					  TREE_CHAIN (arg_types));
1243 1244
	    result
	      = build_ref_qualified_type (result, type_memfn_rqual (t));
Dodji Seketeli committed
1245 1246
	  }
	else
1247
	  {
Dodji Seketeli committed
1248 1249
	    result = build_function_type (type,
					  arg_types);
1250 1251 1252
	    result = apply_memfn_quals (result,
					type_memfn_quals (t),
					type_memfn_rqual (t));
1253
	  }
1254 1255 1256 1257

	if (TYPE_RAISES_EXCEPTIONS (t))
	  result = build_exception_variant (result,
					    TYPE_RAISES_EXCEPTIONS (t));
Dodji Seketeli committed
1258 1259
      }
      break;
Dodji Seketeli committed
1260
    case TYPENAME_TYPE:
1261 1262
      {
	tree fullname = TYPENAME_TYPE_FULLNAME (t);
1263 1264
	if (TREE_CODE (fullname) == TEMPLATE_ID_EXPR
	    && TREE_OPERAND (fullname, 1))
1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281
	  {
	    tree args = TREE_OPERAND (fullname, 1);
	    tree new_args = copy_node (args);
	    bool changed = false;
	    for (int i = 0; i < TREE_VEC_LENGTH (args); ++i)
	      {
		tree arg = TREE_VEC_ELT (args, i);
		tree strip_arg;
		if (TYPE_P (arg))
		  strip_arg = strip_typedefs (arg);
		else
		  strip_arg = strip_typedefs_expr (arg);
		TREE_VEC_ELT (new_args, i) = strip_arg;
		if (strip_arg != arg)
		  changed = true;
	      }
	    if (changed)
1282 1283 1284 1285 1286 1287 1288
	      {
		NON_DEFAULT_TEMPLATE_ARGS_COUNT (new_args)
		  = NON_DEFAULT_TEMPLATE_ARGS_COUNT (args);
		fullname
		  = lookup_template_function (TREE_OPERAND (fullname, 0),
					      new_args);
	      }
1289 1290 1291 1292 1293 1294
	    else
	      ggc_free (new_args);
	  }
	result = make_typename_type (strip_typedefs (TYPE_CONTEXT (t)),
				     fullname, typename_type, tf_none);
      }
Dodji Seketeli committed
1295
      break;
1296 1297 1298 1299 1300 1301 1302 1303 1304 1305
    case DECLTYPE_TYPE:
      result = strip_typedefs_expr (DECLTYPE_TYPE_EXPR (t));
      if (result == DECLTYPE_TYPE_EXPR (t))
	return t;
      else
	result = (finish_decltype_type
		  (result,
		   DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (t),
		   tf_none));
      break;
Dodji Seketeli committed
1306 1307 1308
    default:
      break;
    }
1309

Dodji Seketeli committed
1310 1311
  if (!result)
      result = TYPE_MAIN_VARIANT (t);
1312 1313 1314 1315 1316 1317 1318 1319 1320 1321
  if (TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (result)
      || TYPE_ALIGN (t) != TYPE_ALIGN (result))
    {
      gcc_assert (TYPE_USER_ALIGN (t));
      if (TYPE_ALIGN (t) == TYPE_ALIGN (result))
	result = build_variant_type_copy (result);
      else
	result = build_aligned_type (result, TYPE_ALIGN (t));
      TYPE_USER_ALIGN (result) = true;
    }
1322 1323
  if (TYPE_ATTRIBUTES (t))
    result = cp_build_type_attribute_variant (result, TYPE_ATTRIBUTES (t));
Dodji Seketeli committed
1324
  return cp_build_qualified_type (result, cp_type_quals (t));
1325
}
Dodji Seketeli committed
1326

1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379
/* Like strip_typedefs above, but works on expressions, so that in

   template<class T> struct A
   {
     typedef T TT;
     B<sizeof(TT)> b;
   };

   sizeof(TT) is replaced by sizeof(T).  */

tree
strip_typedefs_expr (tree t)
{
  unsigned i,n;
  tree r, type, *ops;
  enum tree_code code;

  if (t == NULL_TREE || t == error_mark_node)
    return t;

  if (DECL_P (t) || CONSTANT_CLASS_P (t))
    return t;

  /* Some expressions have type operands, so let's handle types here rather
     than check TYPE_P in multiple places below.  */
  if (TYPE_P (t))
    return strip_typedefs (t);

  code = TREE_CODE (t);
  switch (code)
    {
    case IDENTIFIER_NODE:
    case TEMPLATE_PARM_INDEX:
    case OVERLOAD:
    case BASELINK:
    case ARGUMENT_PACK_SELECT:
      return t;

    case TRAIT_EXPR:
      {
	tree type1 = strip_typedefs (TRAIT_EXPR_TYPE1 (t));
	tree type2 = strip_typedefs (TRAIT_EXPR_TYPE2 (t));
	if (type1 == TRAIT_EXPR_TYPE1 (t)
	    && type2 == TRAIT_EXPR_TYPE2 (t))
	  return t;
	r = copy_node (t);
	TRAIT_EXPR_TYPE1 (t) = type1;
	TRAIT_EXPR_TYPE2 (t) = type2;
	return r;
      }

    case TREE_LIST:
      {
1380
	vec<tree, va_gc> *vec = make_tree_vector ();
1381 1382 1383 1384 1385
	bool changed = false;
	tree it;
	for (it = t; it; it = TREE_CHAIN (it))
	  {
	    tree val = strip_typedefs_expr (TREE_VALUE (t));
1386
	    vec_safe_push (vec, val);
1387 1388 1389 1390 1391 1392 1393
	    if (val != TREE_VALUE (t))
	      changed = true;
	    gcc_assert (TREE_PURPOSE (it) == NULL_TREE);
	  }
	if (changed)
	  {
	    r = NULL_TREE;
1394
	    FOR_EACH_VEC_ELT_REVERSE (*vec, i, it)
1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405
	      r = tree_cons (NULL_TREE, it, r);
	  }
	else
	  r = t;
	release_tree_vector (vec);
	return r;
      }

    case TREE_VEC:
      {
	bool changed = false;
1406
	vec<tree, va_gc> *vec = make_tree_vector ();
1407
	n = TREE_VEC_LENGTH (t);
1408
	vec_safe_reserve (vec, n);
1409 1410 1411
	for (i = 0; i < n; ++i)
	  {
	    tree op = strip_typedefs_expr (TREE_VEC_ELT (t, i));
1412
	    vec->quick_push (op);
1413 1414 1415 1416 1417 1418 1419
	    if (op != TREE_VEC_ELT (t, i))
	      changed = true;
	  }
	if (changed)
	  {
	    r = copy_node (t);
	    for (i = 0; i < n; ++i)
1420
	      TREE_VEC_ELT (r, i) = (*vec)[i];
1421 1422
	    NON_DEFAULT_TEMPLATE_ARGS_COUNT (r)
	      = NON_DEFAULT_TEMPLATE_ARGS_COUNT (t);
1423 1424 1425 1426 1427 1428 1429 1430 1431 1432
	  }
	else
	  r = t;
	release_tree_vector (vec);
	return r;
      }

    case CONSTRUCTOR:
      {
	bool changed = false;
1433 1434
	vec<constructor_elt, va_gc> *vec
	  = vec_safe_copy (CONSTRUCTOR_ELTS (t));
1435 1436 1437 1438
	n = CONSTRUCTOR_NELTS (t);
	type = strip_typedefs (TREE_TYPE (t));
	for (i = 0; i < n; ++i)
	  {
1439
	    constructor_elt *e = &(*vec)[i];
1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450
	    tree op = strip_typedefs_expr (e->value);
	    if (op != e->value)
	      {
		changed = true;
		e->value = op;
	      }
	    gcc_checking_assert (e->index == strip_typedefs_expr (e->index));
	  }

	if (!changed && type == TREE_TYPE (t))
	  {
1451
	    vec_free (vec);
1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463
	    return t;
	  }
	else
	  {
	    r = copy_node (t);
	    TREE_TYPE (r) = type;
	    CONSTRUCTOR_ELTS (r) = vec;
	    return r;
	  }
      }

    case LAMBDA_EXPR:
1464 1465
      error ("lambda-expression in a constant expression");
      return error_mark_node;
1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509

    default:
      break;
    }

  gcc_assert (EXPR_P (t));

  n = TREE_OPERAND_LENGTH (t);
  ops = XALLOCAVEC (tree, n);
  type = TREE_TYPE (t);

  switch (code)
    {
    CASE_CONVERT:
    case IMPLICIT_CONV_EXPR:
    case DYNAMIC_CAST_EXPR:
    case STATIC_CAST_EXPR:
    case CONST_CAST_EXPR:
    case REINTERPRET_CAST_EXPR:
    case CAST_EXPR:
    case NEW_EXPR:
      type = strip_typedefs (type);
      /* fallthrough */

    default:
      for (i = 0; i < n; ++i)
	ops[i] = strip_typedefs_expr (TREE_OPERAND (t, i));
      break;
    }

  /* If nothing changed, return t.  */
  for (i = 0; i < n; ++i)
    if (ops[i] != TREE_OPERAND (t, i))
      break;
  if (i == n && type == TREE_TYPE (t))
    return t;

  r = copy_node (t);
  TREE_TYPE (r) = type;
  for (i = 0; i < n; ++i)
    TREE_OPERAND (r, i) = ops[i];
  return r;
}

1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520
/* Makes a copy of BINFO and TYPE, which is to be inherited into a
   graph dominated by T.  If BINFO is NULL, TYPE is a dependent base,
   and we do a shallow copy.  If BINFO is non-NULL, we do a deep copy.
   VIRT indicates whether TYPE is inherited virtually or not.
   IGO_PREV points at the previous binfo of the inheritance graph
   order chain.  The newly copied binfo's TREE_CHAIN forms this
   ordering.

   The CLASSTYPE_VBASECLASSES vector of T is constructed in the
   correct order. That is in the order the bases themselves should be
   constructed in.
1521 1522

   The BINFO_INHERITANCE of a virtual base class points to the binfo
1523 1524 1525 1526
   of the most derived type. ??? We could probably change this so that
   BINFO_INHERITANCE becomes synonymous with BINFO_PRIMARY, and hence
   remove a field.  They currently can only differ for primary virtual
   virtual bases.  */
1527 1528

tree
1529
copy_binfo (tree binfo, tree type, tree t, tree *igo_prev, int virt)
1530
{
1531
  tree new_binfo;
1532

1533 1534 1535 1536 1537 1538 1539
  if (virt)
    {
      /* See if we've already made this virtual base.  */
      new_binfo = binfo_for_vbase (type, t);
      if (new_binfo)
	return new_binfo;
    }
1540

Nathan Sidwell committed
1541
  new_binfo = make_tree_binfo (binfo ? BINFO_N_BASE_BINFOS (binfo) : 0);
1542
  BINFO_TYPE (new_binfo) = type;
1543

1544 1545 1546
  /* Chain it into the inheritance graph.  */
  TREE_CHAIN (*igo_prev) = new_binfo;
  *igo_prev = new_binfo;
1547

1548
  if (binfo && !BINFO_DEPENDENT_BASE_P (binfo))
1549
    {
Nathan Sidwell committed
1550 1551
      int ix;
      tree base_binfo;
1552

1553
      gcc_assert (SAME_BINFO_TYPE_P (BINFO_TYPE (binfo), type));
1554

1555 1556
      BINFO_OFFSET (new_binfo) = BINFO_OFFSET (binfo);
      BINFO_VIRTUALS (new_binfo) = BINFO_VIRTUALS (binfo);
1557

Nathan Sidwell committed
1558 1559
      /* We do not need to copy the accesses, as they are read only.  */
      BINFO_BASE_ACCESSES (new_binfo) = BINFO_BASE_ACCESSES (binfo);
1560

1561
      /* Recursively copy base binfos of BINFO.  */
Nathan Sidwell committed
1562
      for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++)
1563
	{
1564 1565 1566 1567
	  tree new_base_binfo;
	  new_base_binfo = copy_binfo (base_binfo, BINFO_TYPE (base_binfo),
				       t, igo_prev,
				       BINFO_VIRTUAL_P (base_binfo));
1568

1569 1570
	  if (!BINFO_INHERITANCE_CHAIN (new_base_binfo))
	    BINFO_INHERITANCE_CHAIN (new_base_binfo) = new_binfo;
Nathan Sidwell committed
1571
	  BINFO_BASE_APPEND (new_binfo, new_base_binfo);
1572
	}
1573
    }
1574 1575
  else
    BINFO_DEPENDENT_BASE_P (new_binfo) = 1;
1576

1577 1578 1579 1580
  if (virt)
    {
      /* Push it onto the list after any virtual bases it contains
	 will have been pushed.  */
1581
      CLASSTYPE_VBASECLASSES (t)->quick_push (new_binfo);
1582 1583 1584
      BINFO_VIRTUAL_P (new_binfo) = 1;
      BINFO_INHERITANCE_CHAIN (new_binfo) = TYPE_BINFO (t);
    }
1585

1586
  return new_binfo;
1587
}
Mike Stump committed
1588 1589 1590 1591 1592 1593 1594 1595 1596 1597

/* Hashing of lists so that we don't make duplicates.
   The entry point is `list_hash_canon'.  */

/* Now here is the hash table.  When recording a list, it is added
   to the slot whose index is the hash code mod the table size.
   Note that the hash table is used for several kinds of lists.
   While all these live in the same table, they are completely independent,
   and the hash code is computed differently for each of these.  */

1598
static GTY ((param_is (union tree_node))) htab_t list_hash_table;
1599

1600
struct list_proxy
1601 1602 1603 1604 1605 1606 1607 1608 1609 1610
{
  tree purpose;
  tree value;
  tree chain;
};

/* Compare ENTRY (an entry in the hash table) with DATA (a list_proxy
   for a node we are thinking about adding).  */

static int
1611
list_hash_eq (const void* entry, const void* data)
1612
{
1613 1614
  const_tree const t = (const_tree) entry;
  const struct list_proxy *const proxy = (const struct list_proxy *) data;
1615 1616 1617 1618 1619

  return (TREE_VALUE (t) == proxy->value
	  && TREE_PURPOSE (t) == proxy->purpose
	  && TREE_CHAIN (t) == proxy->chain);
}
Mike Stump committed
1620 1621 1622 1623 1624

/* Compute a hash code for a list (chain of TREE_LIST nodes
   with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
   TREE_COMMON slots), by adding the hash codes of the individual entries.  */

1625
static hashval_t
1626
list_hash_pieces (tree purpose, tree value, tree chain)
Mike Stump committed
1627
{
1628
  hashval_t hashcode = 0;
1629

1630
  if (chain)
1631
    hashcode += TREE_HASH (chain);
1632

1633
  if (value)
1634
    hashcode += TREE_HASH (value);
Mike Stump committed
1635 1636
  else
    hashcode += 1007;
1637
  if (purpose)
1638
    hashcode += TREE_HASH (purpose);
Mike Stump committed
1639 1640 1641 1642 1643
  else
    hashcode += 1009;
  return hashcode;
}

1644
/* Hash an already existing TREE_LIST.  */
Mike Stump committed
1645

1646
static hashval_t
1647
list_hash (const void* p)
Mike Stump committed
1648
{
1649
  const_tree const t = (const_tree) p;
1650 1651
  return list_hash_pieces (TREE_PURPOSE (t),
			   TREE_VALUE (t),
1652
			   TREE_CHAIN (t));
Mike Stump committed
1653 1654
}

1655 1656 1657
/* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical
   object for an identical list if one already exists.  Otherwise, build a
   new one, and record it as the canonical object.  */
Mike Stump committed
1658 1659

tree
1660
hash_tree_cons (tree purpose, tree value, tree chain)
Mike Stump committed
1661
{
1662
  int hashcode = 0;
1663
  void **slot;
1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677
  struct list_proxy proxy;

  /* Hash the list node.  */
  hashcode = list_hash_pieces (purpose, value, chain);
  /* Create a proxy for the TREE_LIST we would like to create.  We
     don't actually create it so as to avoid creating garbage.  */
  proxy.purpose = purpose;
  proxy.value = value;
  proxy.chain = chain;
  /* See if it is already in the table.  */
  slot = htab_find_slot_with_hash (list_hash_table, &proxy, hashcode,
				   INSERT);
  /* If not, create a new node.  */
  if (!*slot)
1678
    *slot = tree_cons (purpose, value, chain);
1679
  return (tree) *slot;
Mike Stump committed
1680 1681 1682
}

/* Constructor for hashed lists.  */
Mike Stump committed
1683

Mike Stump committed
1684
tree
1685
hash_tree_chain (tree value, tree chain)
Mike Stump committed
1686
{
1687
  return hash_tree_cons (NULL_TREE, value, chain);
Mike Stump committed
1688 1689 1690
}

void
1691
debug_binfo (tree elem)
Mike Stump committed
1692
{
1693
  HOST_WIDE_INT n;
Mike Stump committed
1694 1695
  tree virtuals;

1696 1697 1698
  fprintf (stderr, "type \"%s\", offset = " HOST_WIDE_INT_PRINT_DEC
	   "\nvtable type:\n",
	   TYPE_NAME_STRING (BINFO_TYPE (elem)),
1699
	   TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
Mike Stump committed
1700 1701
  debug_tree (BINFO_TYPE (elem));
  if (BINFO_VTABLE (elem))
1702
    fprintf (stderr, "vtable decl \"%s\"\n",
1703
	     IDENTIFIER_POINTER (DECL_NAME (get_vtbl_decl_for_binfo (elem))));
Mike Stump committed
1704 1705 1706
  else
    fprintf (stderr, "no vtable decl yet\n");
  fprintf (stderr, "virtuals:\n");
1707
  virtuals = BINFO_VIRTUALS (elem);
Mark Mitchell committed
1708
  n = 0;
Mike Stump committed
1709

Mike Stump committed
1710 1711
  while (virtuals)
    {
1712
      tree fndecl = TREE_VALUE (virtuals);
1713
      fprintf (stderr, "%s [%ld =? %ld]\n",
Mike Stump committed
1714
	       IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1715
	       (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
Mike Stump committed
1716
      ++n;
Mike Stump committed
1717 1718 1719 1720
      virtuals = TREE_CHAIN (virtuals);
    }
}

1721 1722 1723 1724
/* Build a representation for the qualified name SCOPE::NAME.  TYPE is
   the type of the result expression, if known, or NULL_TREE if the
   resulting expression is type-dependent.  If TEMPLATE_P is true,
   NAME is known to be a template because the user explicitly used the
Mike Stump committed
1725
   "template" keyword after the "::".
1726 1727 1728 1729 1730 1731 1732

   All SCOPE_REFs should be built by use of this function.  */

tree
build_qualified_name (tree type, tree scope, tree name, bool template_p)
{
  tree t;
1733 1734 1735 1736
  if (type == error_mark_node
      || scope == error_mark_node
      || name == error_mark_node)
    return error_mark_node;
1737 1738
  t = build2 (SCOPE_REF, type, scope, name);
  QUALIFIED_NAME_IS_TEMPLATE (t) = template_p;
1739
  PTRMEM_OK_P (t) = true;
1740 1741
  if (type)
    t = convert_from_reference (t);
1742 1743 1744
  return t;
}

1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778
/* Like check_qualified_type, but also check ref-qualifier and exception
   specification.  */

static bool
cp_check_qualified_type (const_tree cand, const_tree base, int type_quals,
			 cp_ref_qualifier rqual, tree raises)
{
  return (check_qualified_type (cand, base, type_quals)
	  && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (cand),
				ce_exact)
	  && type_memfn_rqual (cand) == rqual);
}

/* Build the FUNCTION_TYPE or METHOD_TYPE with the ref-qualifier RQUAL.  */

tree
build_ref_qualified_type (tree type, cp_ref_qualifier rqual)
{
  tree t;

  if (rqual == type_memfn_rqual (type))
    return type;

  int type_quals = TYPE_QUALS (type);
  tree raises = TYPE_RAISES_EXCEPTIONS (type);
  for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
    if (cp_check_qualified_type (t, type, type_quals, rqual, raises))
      return t;

  t = build_variant_type_copy (type);
  switch (rqual)
    {
    case REF_QUAL_RVALUE:
      FUNCTION_RVALUE_QUALIFIED (t) = 1;
1779 1780
      FUNCTION_REF_QUALIFIED (t) = 1;
      break;
1781
    case REF_QUAL_LVALUE:
1782
      FUNCTION_RVALUE_QUALIFIED (t) = 0;
1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804
      FUNCTION_REF_QUALIFIED (t) = 1;
      break;
    default:
      FUNCTION_REF_QUALIFIED (t) = 0;
      break;
    }

  if (TYPE_STRUCTURAL_EQUALITY_P (type))
    /* Propagate structural equality. */
    SET_TYPE_STRUCTURAL_EQUALITY (t);
  else if (TYPE_CANONICAL (type) != type)
    /* Build the underlying canonical type, since it is different
       from TYPE. */
    TYPE_CANONICAL (t) = build_ref_qualified_type (TYPE_CANONICAL (type),
						   rqual);
  else
    /* T is its own canonical type. */
    TYPE_CANONICAL (t) = t;

  return t;
}

1805
/* Returns nonzero if X is an expression for a (possibly overloaded)
1806 1807 1808
   function.  If "f" is a function or function template, "f", "c->f",
   "c.f", "C::f", and "f<int>" will all be considered possibly
   overloaded functions.  Returns 2 if the function is actually
1809
   overloaded, i.e., if it is impossible to know the type of the
1810 1811
   function without performing overload resolution.  */
 
Mike Stump committed
1812
int
1813
is_overloaded_fn (tree x)
Mike Stump committed
1814
{
1815
  /* A baselink is also considered an overloaded function.  */
1816 1817
  if (TREE_CODE (x) == OFFSET_REF
      || TREE_CODE (x) == COMPONENT_REF)
1818
    x = TREE_OPERAND (x, 1);
1819
  if (BASELINK_P (x))
1820
    x = BASELINK_FUNCTIONS (x);
1821 1822 1823
  if (TREE_CODE (x) == TEMPLATE_ID_EXPR)
    x = TREE_OPERAND (x, 0);
  if (DECL_FUNCTION_TEMPLATE_P (OVL_CURRENT (x))
1824 1825 1826 1827
      || (TREE_CODE (x) == OVERLOAD && OVL_CHAIN (x)))
    return 2;
  return  (TREE_CODE (x) == FUNCTION_DECL
	   || TREE_CODE (x) == OVERLOAD);
Mike Stump committed
1828 1829
}

1830 1831 1832 1833
/* X is the CALL_EXPR_FN of a CALL_EXPR.  If X represents a dependent name
   (14.6.2), return the IDENTIFIER_NODE for that name.  Otherwise, return
   NULL_TREE.  */

1834
tree
1835 1836
dependent_name (tree x)
{
1837
  if (identifier_p (x))
1838 1839
    return x;
  if (TREE_CODE (x) != COMPONENT_REF
1840 1841
      && TREE_CODE (x) != OFFSET_REF
      && TREE_CODE (x) != BASELINK
1842 1843 1844 1845 1846
      && is_overloaded_fn (x))
    return DECL_NAME (get_first_fn (x));
  return NULL_TREE;
}

1847 1848 1849 1850 1851
/* Returns true iff X is an expression for an overloaded function
   whose type cannot be known without performing overload
   resolution.  */

bool
1852
really_overloaded_fn (tree x)
1853
{
1854
  return is_overloaded_fn (x) == 2;
Mike Stump committed
1855 1856
}

Mike Stump committed
1857
tree
1858
get_fns (tree from)
Mike Stump committed
1859
{
1860
  gcc_assert (is_overloaded_fn (from));
1861
  /* A baselink is also considered an overloaded function.  */
1862 1863
  if (TREE_CODE (from) == OFFSET_REF
      || TREE_CODE (from) == COMPONENT_REF)
1864
    from = TREE_OPERAND (from, 1);
1865
  if (BASELINK_P (from))
1866
    from = BASELINK_FUNCTIONS (from);
1867 1868
  if (TREE_CODE (from) == TEMPLATE_ID_EXPR)
    from = TREE_OPERAND (from, 0);
1869 1870 1871 1872 1873 1874 1875
  return from;
}

tree
get_first_fn (tree from)
{
  return OVL_CURRENT (get_fns (from));
1876
}
Mike Stump committed
1877

1878
/* Return a new OVL node, concatenating it with the old one.  */
1879 1880

tree
1881
ovl_cons (tree decl, tree chain)
1882 1883 1884 1885 1886
{
  tree result = make_node (OVERLOAD);
  TREE_TYPE (result) = unknown_type_node;
  OVL_FUNCTION (result) = decl;
  TREE_CHAIN (result) = chain;
1887

1888 1889 1890 1891 1892 1893 1894
  return result;
}

/* Build a new overloaded function. If this is the first one,
   just return it; otherwise, ovl_cons the _DECLs */

tree
1895
build_overload (tree decl, tree chain)
1896
{
1897
  if (! chain && TREE_CODE (decl) != TEMPLATE_DECL)
1898 1899
    return decl;
  return ovl_cons (decl, chain);
1900 1901
}

1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919
/* Return the scope where the overloaded functions OVL were found.  */

tree
ovl_scope (tree ovl)
{
  if (TREE_CODE (ovl) == OFFSET_REF
      || TREE_CODE (ovl) == COMPONENT_REF)
    ovl = TREE_OPERAND (ovl, 1);
  if (TREE_CODE (ovl) == BASELINK)
    return BINFO_TYPE (BASELINK_BINFO (ovl));
  if (TREE_CODE (ovl) == TEMPLATE_ID_EXPR)
    ovl = TREE_OPERAND (ovl, 0);
  /* Skip using-declarations.  */
  while (TREE_CODE (ovl) == OVERLOAD && OVL_USED (ovl) && OVL_CHAIN (ovl))
    ovl = OVL_CHAIN (ovl);
  return CP_DECL_CONTEXT (OVL_CURRENT (ovl));
}

1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933
/* Return TRUE if FN is a non-static member function, FALSE otherwise.
   This function looks into BASELINK and OVERLOAD nodes.  */

bool
non_static_member_function_p (tree fn)
{
  if (fn == NULL_TREE)
    return false;

  if (is_overloaded_fn (fn))
    fn = get_first_fn (fn);

  return (DECL_P (fn)
	  && DECL_NONSTATIC_MEMBER_FUNCTION_P (fn));
1934 1935
}

Mike Stump committed
1936 1937 1938

#define PRINT_RING_SIZE 4

1939 1940
static const char *
cxx_printable_name_internal (tree decl, int v, bool translate)
Mike Stump committed
1941
{
1942
  static unsigned int uid_ring[PRINT_RING_SIZE];
Mike Stump committed
1943
  static char *print_ring[PRINT_RING_SIZE];
1944
  static bool trans_ring[PRINT_RING_SIZE];
Mike Stump committed
1945 1946 1947 1948
  static int ring_counter;
  int i;

  /* Only cache functions.  */
1949 1950
  if (v < 2
      || TREE_CODE (decl) != FUNCTION_DECL
Mike Stump committed
1951
      || DECL_LANG_SPECIFIC (decl) == 0)
1952
    return lang_decl_name (decl, v, translate);
Mike Stump committed
1953 1954 1955

  /* See if this print name is lying around.  */
  for (i = 0; i < PRINT_RING_SIZE; i++)
1956
    if (uid_ring[i] == DECL_UID (decl) && translate == trans_ring[i])
Mike Stump committed
1957 1958 1959 1960 1961 1962 1963 1964
      /* yes, so return it.  */
      return print_ring[i];

  if (++ring_counter == PRINT_RING_SIZE)
    ring_counter = 0;

  if (current_function_decl != NULL_TREE)
    {
1965 1966 1967 1968 1969 1970 1971 1972 1973
      /* There may be both translated and untranslated versions of the
	 name cached.  */
      for (i = 0; i < 2; i++)
	{
	  if (uid_ring[ring_counter] == DECL_UID (current_function_decl))
	    ring_counter += 1;
	  if (ring_counter == PRINT_RING_SIZE)
	    ring_counter = 0;
	}
1974
      gcc_assert (uid_ring[ring_counter] != DECL_UID (current_function_decl));
Mike Stump committed
1975 1976
    }

1977
  free (print_ring[ring_counter]);
Mike Stump committed
1978

1979
  print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v, translate));
1980
  uid_ring[ring_counter] = DECL_UID (decl);
1981
  trans_ring[ring_counter] = translate;
Mike Stump committed
1982 1983
  return print_ring[ring_counter];
}
1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995

const char *
cxx_printable_name (tree decl, int v)
{
  return cxx_printable_name_internal (decl, v, false);
}

const char *
cxx_printable_name_translate (tree decl, int v)
{
  return cxx_printable_name_internal (decl, v, true);
}
Mike Stump committed
1996

Mike Stump committed
1997
/* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
Mike Stump committed
1998
   listed in RAISES.  */
Mike Stump committed
1999

Mike Stump committed
2000
tree
2001
build_exception_variant (tree type, tree raises)
Mike Stump committed
2002
{
2003 2004
  tree v;
  int type_quals;
Mike Stump committed
2005

2006 2007 2008 2009
  if (comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (type), ce_exact))
    return type;

  type_quals = TYPE_QUALS (type);
2010
  cp_ref_qualifier rqual = type_memfn_rqual (type);
2011
  for (v = TYPE_MAIN_VARIANT (type); v; v = TYPE_NEXT_VARIANT (v))
2012
    if (cp_check_qualified_type (v, type, type_quals, rqual, raises))
2013
      return v;
Mike Stump committed
2014 2015

  /* Need to build a new variant.  */
2016
  v = build_variant_type_copy (type);
Mike Stump committed
2017 2018 2019 2020
  TYPE_RAISES_EXCEPTIONS (v) = raises;
  return v;
}

2021 2022
/* Given a TEMPLATE_TEMPLATE_PARM node T, create a new
   BOUND_TEMPLATE_TEMPLATE_PARM bound with NEWARGS as its template
2023
   arguments.  */
2024 2025

tree
2026
bind_template_template_parm (tree t, tree newargs)
2027
{
2028
  tree decl = TYPE_NAME (t);
2029 2030
  tree t2;

2031
  t2 = cxx_make_type (BOUND_TEMPLATE_TEMPLATE_PARM);
2032 2033
  decl = build_decl (input_location,
		     TYPE_DECL, DECL_NAME (decl), NULL_TREE);
2034

2035 2036 2037 2038 2039
  /* These nodes have to be created to reflect new TYPE_DECL and template
     arguments.  */
  TEMPLATE_TYPE_PARM_INDEX (t2) = copy_node (TEMPLATE_TYPE_PARM_INDEX (t));
  TEMPLATE_PARM_DECL (TEMPLATE_TYPE_PARM_INDEX (t2)) = decl;
  TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2)
2040
    = build_template_info (TEMPLATE_TEMPLATE_PARM_TEMPLATE_DECL (t), newargs);
2041

2042 2043 2044
  TREE_TYPE (decl) = t2;
  TYPE_NAME (t2) = decl;
  TYPE_STUB_DECL (t2) = decl;
2045
  TYPE_SIZE (t2) = 0;
2046
  SET_TYPE_STRUCTURAL_EQUALITY (t2);
2047 2048 2049 2050

  return t2;
}

2051
/* Called from count_trees via walk_tree.  */
2052 2053

static tree
2054
count_trees_r (tree *tp, int *walk_subtrees, void *data)
2055
{
2056 2057 2058 2059 2060
  ++*((int *) data);

  if (TYPE_P (*tp))
    *walk_subtrees = 0;

2061 2062 2063 2064 2065 2066 2067
  return NULL_TREE;
}

/* Debugging function for measuring the rough complexity of a tree
   representation.  */

int
2068
count_trees (tree t)
2069
{
2070
  int n_trees = 0;
2071
  cp_walk_tree_without_duplicates (&t, count_trees_r, &n_trees);
2072
  return n_trees;
2073
}
2074

2075 2076 2077
/* Called from verify_stmt_tree via walk_tree.  */

static tree
2078
verify_stmt_tree_r (tree* tp, int * /*walk_subtrees*/, void* data)
2079 2080
{
  tree t = *tp;
2081 2082 2083
  hash_table <pointer_hash <tree_node> > *statements
      = static_cast <hash_table <pointer_hash <tree_node> > *> (data);
  tree_node **slot;
2084

2085
  if (!STATEMENT_CODE_P (TREE_CODE (t)))
2086 2087 2088 2089
    return NULL_TREE;

  /* If this statement is already present in the hash table, then
     there is a circularity in the statement tree.  */
2090
  gcc_assert (!statements->find (t));
2091

2092
  slot = statements->find_slot (t, INSERT);
2093 2094 2095 2096 2097 2098 2099 2100 2101 2102
  *slot = t;

  return NULL_TREE;
}

/* Debugging function to check that the statement T has not been
   corrupted.  For now, this function simply checks that T contains no
   circularities.  */

void
2103
verify_stmt_tree (tree t)
2104
{
2105 2106
  hash_table <pointer_hash <tree_node> > statements;
  statements.create (37);
2107
  cp_walk_tree (&t, verify_stmt_tree_r, &statements, NULL);
2108
  statements.dispose ();
2109 2110
}

2111
/* Check if the type T depends on a type with no linkage and if so, return
2112
   it.  If RELAXED_P then do not consider a class type declared within
2113
   a vague-linkage function to have no linkage.  */
2114 2115

tree
2116
no_linkage_check (tree t, bool relaxed_p)
2117
{
2118 2119
  tree r;

2120 2121 2122 2123 2124
  /* There's no point in checking linkage on template functions; we
     can't know their complete types.  */
  if (processing_template_decl)
    return NULL_TREE;

2125 2126 2127 2128 2129
  switch (TREE_CODE (t))
    {
    case RECORD_TYPE:
      if (TYPE_PTRMEMFUNC_P (t))
	goto ptrmem;
2130
      /* Lambda types that don't have mangling scope have no linkage.  We
2131
	 check CLASSTYPE_LAMBDA_EXPR for error_mark_node because
2132 2133 2134 2135
	 when we get here from pushtag none of the lambda information is
	 set up yet, so we want to assume that the lambda has linkage and
	 fix it up later if not.  */
      if (CLASSTYPE_LAMBDA_EXPR (t)
2136
	  && CLASSTYPE_LAMBDA_EXPR (t) != error_mark_node
2137 2138
	  && LAMBDA_TYPE_EXTRA_SCOPE (t) == NULL_TREE)
	return t;
2139 2140 2141 2142 2143 2144
      /* Fall through.  */
    case UNION_TYPE:
      if (!CLASS_TYPE_P (t))
	return NULL_TREE;
      /* Fall through.  */
    case ENUMERAL_TYPE:
2145
      /* Only treat anonymous types as having no linkage if they're at
2146
	 namespace scope.  This is core issue 966.  */
2147
      if (TYPE_ANONYMOUS_P (t) && TYPE_NAMESPACE_SCOPE_P (t))
2148
	return t;
2149

2150
      for (r = CP_TYPE_CONTEXT (t); ; )
2151
	{
2152 2153 2154 2155 2156 2157 2158
	  /* If we're a nested type of a !TREE_PUBLIC class, we might not
	     have linkage, or we might just be in an anonymous namespace.
	     If we're in a TREE_PUBLIC class, we have linkage.  */
	  if (TYPE_P (r) && !TREE_PUBLIC (TYPE_NAME (r)))
	    return no_linkage_check (TYPE_CONTEXT (t), relaxed_p);
	  else if (TREE_CODE (r) == FUNCTION_DECL)
	    {
2159
	      if (!relaxed_p || !vague_linkage_p (r))
2160 2161 2162 2163
		return t;
	      else
		r = CP_DECL_CONTEXT (r);
	    }
2164
	  else
2165
	    break;
2166 2167
	}

2168 2169 2170 2171 2172
      return NULL_TREE;

    case ARRAY_TYPE:
    case POINTER_TYPE:
    case REFERENCE_TYPE:
2173
    case VECTOR_TYPE:
2174
      return no_linkage_check (TREE_TYPE (t), relaxed_p);
2175 2176 2177

    case OFFSET_TYPE:
    ptrmem:
2178 2179
      r = no_linkage_check (TYPE_PTRMEM_POINTED_TO_TYPE (t),
			    relaxed_p);
2180 2181
      if (r)
	return r;
2182
      return no_linkage_check (TYPE_PTRMEM_CLASS_TYPE (t), relaxed_p);
2183 2184

    case METHOD_TYPE:
2185
      r = no_linkage_check (TYPE_METHOD_BASETYPE (t), relaxed_p);
2186 2187 2188 2189 2190 2191
      if (r)
	return r;
      /* Fall through.  */
    case FUNCTION_TYPE:
      {
	tree parm;
2192 2193
	for (parm = TYPE_ARG_TYPES (t);
	     parm && parm != void_list_node;
2194 2195
	     parm = TREE_CHAIN (parm))
	  {
2196
	    r = no_linkage_check (TREE_VALUE (parm), relaxed_p);
2197 2198 2199
	    if (r)
	      return r;
	  }
2200
	return no_linkage_check (TREE_TYPE (t), relaxed_p);
2201 2202 2203 2204 2205
      }

    default:
      return NULL_TREE;
    }
2206 2207
}

Mike Stump committed
2208 2209
extern int depth_reached;

Mike Stump committed
2210
void
2211
cxx_print_statistics (void)
Mike Stump committed
2212 2213 2214
{
  print_search_statistics ();
  print_class_statistics ();
2215
  print_template_statistics ();
2216 2217 2218
  if (GATHER_STATISTICS)
    fprintf (stderr, "maximum template instantiation depth reached: %d\n",
	     depth_reached);
Mike Stump committed
2219 2220
}

Mike Stump committed
2221 2222 2223
/* Return, as an INTEGER_CST node, the number of elements for TYPE
   (which is an ARRAY_TYPE).  This counts only elements of the top
   array.  */
Mike Stump committed
2224 2225

tree
2226
array_type_nelts_top (tree type)
Mike Stump committed
2227
{
2228 2229
  return fold_build2_loc (input_location,
		      PLUS_EXPR, sizetype,
2230
		      array_type_nelts (type),
2231
		      size_one_node);
Mike Stump committed
2232 2233
}

Mike Stump committed
2234 2235 2236
/* Return, as an INTEGER_CST node, the number of elements for TYPE
   (which is an ARRAY_TYPE).  This one is a recursive count of all
   ARRAY_TYPEs that are clumped together.  */
Mike Stump committed
2237 2238

tree
2239
array_type_nelts_total (tree type)
Mike Stump committed
2240 2241 2242 2243 2244 2245
{
  tree sz = array_type_nelts_top (type);
  type = TREE_TYPE (type);
  while (TREE_CODE (type) == ARRAY_TYPE)
    {
      tree n = array_type_nelts_top (type);
2246 2247
      sz = fold_build2_loc (input_location,
			MULT_EXPR, sizetype, sz, n);
Mike Stump committed
2248 2249 2250 2251
      type = TREE_TYPE (type);
    }
  return sz;
}
Mike Stump committed
2252

2253 2254 2255
/* Called from break_out_target_exprs via mapcar.  */

static tree
2256
bot_manip (tree* tp, int* walk_subtrees, void* data)
Mike Stump committed
2257
{
2258 2259 2260
  splay_tree target_remap = ((splay_tree) data);
  tree t = *tp;

2261
  if (!TYPE_P (t) && TREE_CONSTANT (t) && !TREE_SIDE_EFFECTS (t))
2262
    {
2263 2264 2265 2266 2267
      /* There can't be any TARGET_EXPRs or their slot variables below this
	 point.  But we must make a copy, in case subsequent processing
	 alters any part of it.  For example, during gimplification a cast
	 of the form (T) &X::f (where "f" is a member function) will lead
	 to replacing the PTRMEM_CST for &X::f with a VAR_DECL.  */
2268
      *walk_subtrees = 0;
2269
      *tp = unshare_expr (t);
2270 2271
      return NULL_TREE;
    }
2272
  if (TREE_CODE (t) == TARGET_EXPR)
2273
    {
2274 2275
      tree u;

2276
      if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
2277 2278 2279 2280 2281 2282
	{
	  u = build_cplus_new (TREE_TYPE (t), TREE_OPERAND (t, 1),
			       tf_warning_or_error);
	  if (AGGR_INIT_ZERO_FIRST (TREE_OPERAND (t, 1)))
	    AGGR_INIT_ZERO_FIRST (TREE_OPERAND (u, 1)) = true;
	}
2283
      else
2284 2285
	u = build_target_expr_with_type (TREE_OPERAND (t, 1), TREE_TYPE (t),
					 tf_warning_or_error);
2286

2287 2288 2289 2290
      TARGET_EXPR_IMPLICIT_P (u) = TARGET_EXPR_IMPLICIT_P (t);
      TARGET_EXPR_LIST_INIT_P (u) = TARGET_EXPR_LIST_INIT_P (t);
      TARGET_EXPR_DIRECT_INIT_P (u) = TARGET_EXPR_DIRECT_INIT_P (t);

2291
      /* Map the old variable to the new one.  */
2292 2293
      splay_tree_insert (target_remap,
			 (splay_tree_key) TREE_OPERAND (t, 0),
2294
			 (splay_tree_value) TREE_OPERAND (u, 0));
2295

2296 2297
      TREE_OPERAND (u, 1) = break_out_target_exprs (TREE_OPERAND (u, 1));

2298 2299 2300 2301 2302 2303 2304
      /* Replace the old expression with the new version.  */
      *tp = u;
      /* We don't have to go below this point; the recursive call to
	 break_out_target_exprs will have handled anything below this
	 point.  */
      *walk_subtrees = 0;
      return NULL_TREE;
2305 2306
    }

2307
  /* Make a copy of this node.  */
2308 2309
  t = copy_tree_r (tp, walk_subtrees, NULL);
  if (TREE_CODE (*tp) == CALL_EXPR)
2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323
    {
      set_flags_from_callee (*tp);

      /* builtin_LINE and builtin_FILE get the location where the default
	 argument is expanded, not where the call was written.  */
      tree callee = get_callee_fndecl (*tp);
      if (callee && DECL_BUILT_IN (callee))
	switch (DECL_FUNCTION_CODE (callee))
	  {
	  case BUILT_IN_FILE:
	  case BUILT_IN_LINE:
	    SET_EXPR_LOCATION (*tp, input_location);
	  }
    }
2324
  return t;
Mike Stump committed
2325
}
2326

2327 2328 2329
/* Replace all remapped VAR_DECLs in T with their new equivalents.
   DATA is really a splay-tree mapping old variables to new
   variables.  */
2330 2331

static tree
2332
bot_replace (tree* t, int* /*walk_subtrees*/, void* data)
2333
{
2334 2335
  splay_tree target_remap = ((splay_tree) data);

Gabriel Dos Reis committed
2336
  if (VAR_P (*t))
2337 2338 2339 2340 2341 2342
    {
      splay_tree_node n = splay_tree_lookup (target_remap,
					     (splay_tree_key) *t);
      if (n)
	*t = (tree) n->value;
    }
2343 2344 2345 2346 2347 2348 2349
  else if (TREE_CODE (*t) == PARM_DECL
	   && DECL_NAME (*t) == this_identifier)
    {
      /* In an NSDMI we need to replace the 'this' parameter we used for
	 parsing with the real one for this function.  */
      *t = current_class_ptr;
    }
2350 2351 2352 2353 2354 2355
  else if (TREE_CODE (*t) == CONVERT_EXPR
	   && CONVERT_EXPR_VBASE_PATH (*t))
    {
      /* In an NSDMI build_base_path defers building conversions to virtual
	 bases, and we handle it here.  */
      tree basetype = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (*t)));
2356
      vec<tree, va_gc> *vbases = CLASSTYPE_VBASECLASSES (current_class_type);
2357
      int i; tree binfo;
2358
      FOR_EACH_VEC_SAFE_ELT (vbases, i, binfo)
2359 2360 2361 2362 2363
	if (BINFO_TYPE (binfo) == basetype)
	  break;
      *t = build_base_path (PLUS_EXPR, TREE_OPERAND (*t, 0), binfo, true,
			    tf_warning_or_error);
    }
2364 2365 2366

  return NULL_TREE;
}
2367

2368 2369
/* When we parse a default argument expression, we may create
   temporary variables via TARGET_EXPRs.  When we actually use the
2370 2371
   default-argument expression, we make a copy of the expression
   and replace the temporaries with appropriate local versions.  */
Mike Stump committed
2372

Mike Stump committed
2373
tree
2374
break_out_target_exprs (tree t)
Mike Stump committed
2375
{
2376 2377 2378
  static int target_remap_count;
  static splay_tree target_remap;

2379
  if (!target_remap_count++)
2380 2381
    target_remap = splay_tree_new (splay_tree_compare_pointers,
				   /*splay_tree_delete_key_fn=*/NULL,
2382
				   /*splay_tree_delete_value_fn=*/NULL);
2383 2384
  cp_walk_tree (&t, bot_manip, target_remap, NULL);
  cp_walk_tree (&t, bot_replace, target_remap, NULL);
2385 2386 2387 2388 2389 2390 2391 2392

  if (!--target_remap_count)
    {
      splay_tree_delete (target_remap);
      target_remap = NULL;
    }

  return t;
Mike Stump committed
2393
}
Mike Stump committed
2394

2395 2396
/* Similar to `build_nt', but for template definitions of dependent
   expressions  */
Mike Stump committed
2397 2398

tree
2399
build_min_nt_loc (location_t loc, enum tree_code code, ...)
Mike Stump committed
2400
{
2401 2402 2403
  tree t;
  int length;
  int i;
2404
  va_list p;
Mike Stump committed
2405

2406 2407
  gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);

2408
  va_start (p, code);
Mike Stump committed
2409 2410

  t = make_node (code);
2411
  SET_EXPR_LOCATION (t, loc);
2412
  length = TREE_CODE_LENGTH (code);
Mike Stump committed
2413 2414 2415 2416

  for (i = 0; i < length; i++)
    {
      tree x = va_arg (p, tree);
2417
      TREE_OPERAND (t, i) = x;
Mike Stump committed
2418 2419
    }

2420
  va_end (p);
Mike Stump committed
2421 2422 2423
  return t;
}

2424

2425
/* Similar to `build', but for template definitions.  */
Mike Stump committed
2426 2427

tree
2428
build_min (enum tree_code code, tree tt, ...)
Mike Stump committed
2429
{
2430 2431 2432
  tree t;
  int length;
  int i;
2433
  va_list p;
Mike Stump committed
2434

2435 2436
  gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);

2437
  va_start (p, tt);
Mike Stump committed
2438 2439

  t = make_node (code);
2440
  length = TREE_CODE_LENGTH (code);
2441
  TREE_TYPE (t) = tt;
Mike Stump committed
2442 2443 2444 2445

  for (i = 0; i < length; i++)
    {
      tree x = va_arg (p, tree);
2446
      TREE_OPERAND (t, i) = x;
2447
      if (x && !TYPE_P (x) && TREE_SIDE_EFFECTS (x))
2448
	TREE_SIDE_EFFECTS (t) = 1;
Mike Stump committed
2449 2450
    }

2451
  va_end (p);
Mike Stump committed
2452 2453 2454
  return t;
}

2455 2456 2457 2458 2459 2460 2461
/* Similar to `build', but for template definitions of non-dependent
   expressions. NON_DEP is the non-dependent expression that has been
   built.  */

tree
build_min_non_dep (enum tree_code code, tree non_dep, ...)
{
2462 2463 2464
  tree t;
  int length;
  int i;
2465 2466
  va_list p;

2467 2468
  gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);

2469 2470
  va_start (p, non_dep);

2471 2472 2473
  if (REFERENCE_REF_P (non_dep))
    non_dep = TREE_OPERAND (non_dep, 0);

2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486
  t = make_node (code);
  length = TREE_CODE_LENGTH (code);
  TREE_TYPE (t) = TREE_TYPE (non_dep);
  TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (non_dep);

  for (i = 0; i < length; i++)
    {
      tree x = va_arg (p, tree);
      TREE_OPERAND (t, i) = x;
    }

  if (code == COMPOUND_EXPR && TREE_CODE (non_dep) != COMPOUND_EXPR)
    /* This should not be considered a COMPOUND_EXPR, because it
2487
       resolves to an overload.  */
2488
    COMPOUND_EXPR_OVERLOADED (t) = 1;
2489

2490
  va_end (p);
2491
  return convert_from_reference (t);
2492 2493
}

2494 2495 2496
/* Similar to `build_nt_call_vec', but for template definitions of
   non-dependent expressions. NON_DEP is the non-dependent expression
   that has been built.  */
2497 2498

tree
2499
build_min_non_dep_call_vec (tree non_dep, tree fn, vec<tree, va_gc> *argvec)
2500
{
2501
  tree t = build_nt_call_vec (fn, argvec);
2502 2503
  if (REFERENCE_REF_P (non_dep))
    non_dep = TREE_OPERAND (non_dep, 0);
2504 2505
  TREE_TYPE (t) = TREE_TYPE (non_dep);
  TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (non_dep);
2506
  return convert_from_reference (t);
2507 2508
}

Mike Stump committed
2509
tree
2510
get_type_decl (tree t)
Mike Stump committed
2511 2512 2513
{
  if (TREE_CODE (t) == TYPE_DECL)
    return t;
2514
  if (TYPE_P (t))
Mike Stump committed
2515
    return TYPE_STUB_DECL (t);
2516 2517
  gcc_assert (t == error_mark_node);
  return t;
Mike Stump committed
2518 2519
}

2520 2521 2522 2523
/* Returns the namespace that contains DECL, whether directly or
   indirectly.  */

tree
2524
decl_namespace_context (tree decl)
2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536
{
  while (1)
    {
      if (TREE_CODE (decl) == NAMESPACE_DECL)
	return decl;
      else if (TYPE_P (decl))
	decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl));
      else
	decl = CP_DECL_CONTEXT (decl);
    }
}

2537 2538 2539 2540
/* Returns true if decl is within an anonymous namespace, however deeply
   nested, or false otherwise.  */

bool
2541
decl_anon_ns_mem_p (const_tree decl)
2542 2543 2544
{
  while (1)
    {
2545
      if (decl == NULL_TREE || decl == error_mark_node)
2546 2547 2548 2549 2550 2551 2552
	return false;
      if (TREE_CODE (decl) == NAMESPACE_DECL
	  && DECL_NAME (decl) == NULL_TREE)
	return true;
      /* Classes and namespaces inside anonymous namespaces have
         TREE_PUBLIC == 0, so we can shortcut the search.  */
      else if (TYPE_P (decl))
2553
	return (TREE_PUBLIC (TYPE_MAIN_DECL (decl)) == 0);
2554 2555 2556 2557 2558 2559 2560
      else if (TREE_CODE (decl) == NAMESPACE_DECL)
	return (TREE_PUBLIC (decl) == 0);
      else
	decl = DECL_CONTEXT (decl);
    }
}

2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587
/* Subroutine of cp_tree_equal: t1 and t2 are the CALL_EXPR_FNs of two
   CALL_EXPRS.  Return whether they are equivalent.  */

static bool
called_fns_equal (tree t1, tree t2)
{
  /* Core 1321: dependent names are equivalent even if the overload sets
     are different.  But do compare explicit template arguments.  */
  tree name1 = dependent_name (t1);
  tree name2 = dependent_name (t2);
  if (name1 || name2)
    {
      tree targs1 = NULL_TREE, targs2 = NULL_TREE;

      if (name1 != name2)
	return false;

      if (TREE_CODE (t1) == TEMPLATE_ID_EXPR)
	targs1 = TREE_OPERAND (t1, 1);
      if (TREE_CODE (t2) == TEMPLATE_ID_EXPR)
	targs2 = TREE_OPERAND (t2, 1);
      return cp_tree_equal (targs1, targs2);
    }
  else
    return cp_tree_equal (t1, t2);
}

2588
/* Return truthvalue of whether T1 is the same tree structure as T2.
2589
   Return 1 if they are the same. Return 0 if they are different.  */
2590

2591
bool
2592
cp_tree_equal (tree t1, tree t2)
2593
{
2594
  enum tree_code code1, code2;
2595 2596

  if (t1 == t2)
2597 2598 2599 2600 2601
    return true;
  if (!t1 || !t2)
    return false;

  for (code1 = TREE_CODE (t1);
2602
       CONVERT_EXPR_CODE_P (code1)
2603 2604 2605 2606
	 || code1 == NON_LVALUE_EXPR;
       code1 = TREE_CODE (t1))
    t1 = TREE_OPERAND (t1, 0);
  for (code2 = TREE_CODE (t2);
2607
       CONVERT_EXPR_CODE_P (code2)
2608
	 || code2 == NON_LVALUE_EXPR;
2609 2610 2611 2612 2613 2614
       code2 = TREE_CODE (t2))
    t2 = TREE_OPERAND (t2, 0);

  /* They might have become equal now.  */
  if (t1 == t2)
    return true;
2615

2616
  if (code1 != code2)
2617
    return false;
2618 2619 2620 2621 2622 2623 2624 2625 2626 2627 2628 2629

  switch (code1)
    {
    case INTEGER_CST:
      return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
	&& TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);

    case REAL_CST:
      return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));

    case STRING_CST:
      return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
2630
	&& !memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
2631
		    TREE_STRING_LENGTH (t1));
2632

2633 2634 2635 2636
    case FIXED_CST:
      return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1),
				     TREE_FIXED_CST (t2));

2637 2638 2639 2640
    case COMPLEX_CST:
      return cp_tree_equal (TREE_REALPART (t1), TREE_REALPART (t2))
	&& cp_tree_equal (TREE_IMAGPART (t1), TREE_IMAGPART (t2));

2641 2642 2643
    case VECTOR_CST:
      return operand_equal_p (t1, t2, OEP_ONLY_CONST);

2644
    case CONSTRUCTOR:
2645 2646 2647
      /* We need to do this when determining whether or not two
	 non-type pointer to member function template arguments
	 are the same.  */
2648 2649
      if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
	  || CONSTRUCTOR_NELTS (t1) != CONSTRUCTOR_NELTS (t2))
2650
	return false;
2651 2652 2653 2654 2655 2656 2657 2658 2659 2660 2661 2662
      {
	tree field, value;
	unsigned int i;
	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, field, value)
	  {
	    constructor_elt *elt2 = CONSTRUCTOR_ELT (t2, i);
	    if (!cp_tree_equal (field, elt2->index)
		|| !cp_tree_equal (value, elt2->value))
	      return false;
	  }
      }
      return true;
2663 2664

    case TREE_LIST:
2665 2666 2667 2668
      if (!cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2)))
	return false;
      if (!cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2)))
	return false;
2669
      return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2));
2670 2671 2672 2673 2674

    case SAVE_EXPR:
      return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));

    case CALL_EXPR:
2675 2676 2677
      {
	tree arg1, arg2;
	call_expr_arg_iterator iter1, iter2;
2678
	if (!called_fns_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2)))
2679 2680 2681 2682 2683 2684 2685 2686
	  return false;
	for (arg1 = first_call_expr_arg (t1, &iter1),
	       arg2 = first_call_expr_arg (t2, &iter2);
	     arg1 && arg2;
	     arg1 = next_call_expr_arg (&iter1),
	       arg2 = next_call_expr_arg (&iter2))
	  if (!cp_tree_equal (arg1, arg2))
	    return false;
2687 2688 2689
	if (arg1 || arg2)
	  return false;
	return true;
2690
      }
2691

2692 2693 2694 2695
    case TARGET_EXPR:
      {
	tree o1 = TREE_OPERAND (t1, 0);
	tree o2 = TREE_OPERAND (t2, 0);
2696

2697 2698 2699 2700
	/* Special case: if either target is an unallocated VAR_DECL,
	   it means that it's going to be unified with whatever the
	   TARGET_EXPR is really supposed to initialize, so treat it
	   as being equivalent to anything.  */
Gabriel Dos Reis committed
2701
	if (VAR_P (o1) && DECL_NAME (o1) == NULL_TREE
2702 2703
	    && !DECL_RTL_SET_P (o1))
	  /*Nop*/;
Gabriel Dos Reis committed
2704
	else if (VAR_P (o2) && DECL_NAME (o2) == NULL_TREE
2705 2706 2707 2708
		 && !DECL_RTL_SET_P (o2))
	  /*Nop*/;
	else if (!cp_tree_equal (o1, o2))
	  return false;
2709

2710 2711
	return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
      }
2712

2713
    case WITH_CLEANUP_EXPR:
2714 2715
      if (!cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)))
	return false;
2716
      return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
2717 2718

    case COMPONENT_REF:
2719 2720 2721
      if (TREE_OPERAND (t1, 1) != TREE_OPERAND (t2, 1))
	return false;
      return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2722 2723

    case PARM_DECL:
2724
      /* For comparing uses of parameters in late-specified return types
2725 2726 2727
	 with an out-of-class definition of the function, but can also come
	 up for expressions that involve 'this' in a member function
	 template.  */
2728 2729 2730 2731 2732 2733 2734

      if (comparing_specializations)
	/* When comparing hash table entries, only an exact match is
	   good enough; we don't want to replace 'this' with the
	   version from another function.  */
	return false;

2735 2736 2737 2738 2739 2740 2741 2742 2743 2744
      if (same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)))
	{
	  if (DECL_ARTIFICIAL (t1) ^ DECL_ARTIFICIAL (t2))
	    return false;
	  if (DECL_ARTIFICIAL (t1)
	      || (DECL_PARM_LEVEL (t1) == DECL_PARM_LEVEL (t2)
		  && DECL_PARM_INDEX (t1) == DECL_PARM_INDEX (t2)))
	    return true;
	}
      return false;
2745 2746

    case VAR_DECL:
2747
    case CONST_DECL:
2748
    case FIELD_DECL:
2749
    case FUNCTION_DECL:
2750 2751
    case TEMPLATE_DECL:
    case IDENTIFIER_NODE:
2752
    case SSA_NAME:
2753
      return false;
2754

2755 2756 2757
    case BASELINK:
      return (BASELINK_BINFO (t1) == BASELINK_BINFO (t2)
	      && BASELINK_ACCESS_BINFO (t1) == BASELINK_ACCESS_BINFO (t2)
2758
	      && BASELINK_QUALIFIED_P (t1) == BASELINK_QUALIFIED_P (t2)
2759 2760 2761
	      && cp_tree_equal (BASELINK_FUNCTIONS (t1),
				BASELINK_FUNCTIONS (t2)));

2762
    case TEMPLATE_PARM_INDEX:
Nathan Sidwell committed
2763 2764
      return (TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
	      && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2)
2765 2766
	      && (TEMPLATE_PARM_PARAMETER_PACK (t1)
		  == TEMPLATE_PARM_PARAMETER_PACK (t2))
Nathan Sidwell committed
2767 2768
	      && same_type_p (TREE_TYPE (TEMPLATE_PARM_DECL (t1)),
			      TREE_TYPE (TEMPLATE_PARM_DECL (t2))));
2769

2770
    case TEMPLATE_ID_EXPR:
2771 2772 2773 2774
      return (cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
	      && cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));

    case TREE_VEC:
2775 2776
      {
	unsigned ix;
2777
	if (TREE_VEC_LENGTH (t1) != TREE_VEC_LENGTH (t2))
2778
	  return false;
2779 2780 2781
	for (ix = TREE_VEC_LENGTH (t1); ix--;)
	  if (!cp_tree_equal (TREE_VEC_ELT (t1, ix),
			      TREE_VEC_ELT (t2, ix)))
2782 2783 2784
	    return false;
	return true;
      }
2785

2786
    case SIZEOF_EXPR:
2787
    case ALIGNOF_EXPR:
2788 2789 2790
      {
	tree o1 = TREE_OPERAND (t1, 0);
	tree o2 = TREE_OPERAND (t2, 0);
2791

2792 2793 2794 2795 2796 2797 2798
	if (code1 == SIZEOF_EXPR)
	  {
	    if (SIZEOF_EXPR_TYPE_P (t1))
	      o1 = TREE_TYPE (o1);
	    if (SIZEOF_EXPR_TYPE_P (t2))
	      o2 = TREE_TYPE (o2);
	  }
2799 2800 2801 2802 2803 2804 2805
	if (TREE_CODE (o1) != TREE_CODE (o2))
	  return false;
	if (TYPE_P (o1))
	  return same_type_p (o1, o2);
	else
	  return cp_tree_equal (o1, o2);
      }
2806

2807 2808 2809 2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821
    case MODOP_EXPR:
      {
	tree t1_op1, t2_op1;

	if (!cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)))
	  return false;

	t1_op1 = TREE_OPERAND (t1, 1);
	t2_op1 = TREE_OPERAND (t2, 1);
	if (TREE_CODE (t1_op1) != TREE_CODE (t2_op1))
	  return false;

	return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t2, 2));
      }

2822 2823 2824
    case PTRMEM_CST:
      /* Two pointer-to-members are the same if they point to the same
	 field or function in the same class.  */
2825 2826 2827 2828
      if (PTRMEM_CST_MEMBER (t1) != PTRMEM_CST_MEMBER (t2))
	return false;

      return same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2));
2829

2830 2831 2832 2833 2834
    case OVERLOAD:
      if (OVL_FUNCTION (t1) != OVL_FUNCTION (t2))
	return false;
      return cp_tree_equal (OVL_CHAIN (t1), OVL_CHAIN (t2));

2835 2836 2837 2838 2839 2840
    case TRAIT_EXPR:
      if (TRAIT_EXPR_KIND (t1) != TRAIT_EXPR_KIND (t2))
	return false;
      return same_type_p (TRAIT_EXPR_TYPE1 (t1), TRAIT_EXPR_TYPE1 (t2))
	&& same_type_p (TRAIT_EXPR_TYPE2 (t1), TRAIT_EXPR_TYPE2 (t2));

2841 2842 2843 2844 2845
    case CAST_EXPR:
    case STATIC_CAST_EXPR:
    case REINTERPRET_CAST_EXPR:
    case CONST_CAST_EXPR:
    case DYNAMIC_CAST_EXPR:
2846
    case IMPLICIT_CONV_EXPR:
2847 2848 2849 2850 2851 2852
    case NEW_EXPR:
      if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)))
	return false;
      /* Now compare operands as usual.  */
      break;

2853 2854 2855 2856 2857 2858 2859
    case DEFERRED_NOEXCEPT:
      return (cp_tree_equal (DEFERRED_NOEXCEPT_PATTERN (t1),
			     DEFERRED_NOEXCEPT_PATTERN (t2))
	      && comp_template_args (DEFERRED_NOEXCEPT_ARGS (t1),
				     DEFERRED_NOEXCEPT_ARGS (t2)));
      break;

2860 2861
    default:
      break;
2862 2863 2864 2865
    }

  switch (TREE_CODE_CLASS (code1))
    {
2866 2867 2868 2869
    case tcc_unary:
    case tcc_binary:
    case tcc_comparison:
    case tcc_expression:
2870
    case tcc_vl_exp:
2871 2872
    case tcc_reference:
    case tcc_statement:
2873
      {
2874 2875
	int i, n;

2876
	n = cp_tree_operand_length (t1);
2877 2878 2879
	if (TREE_CODE_CLASS (code1) == tcc_vl_exp
	    && n != TREE_OPERAND_LENGTH (t2))
	  return false;
2880

2881
	for (i = 0; i < n; ++i)
2882 2883
	  if (!cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i)))
	    return false;
2884

2885
	return true;
2886
      }
2887

2888
    case tcc_type:
2889
      return same_type_p (t1, t2);
2890 2891
    default:
      gcc_unreachable ();
2892
    }
2893
  /* We can get here with --disable-checking.  */
2894
  return false;
2895
}
2896

Mike Stump committed
2897 2898 2899
/* The type of ARG when used as an lvalue.  */

tree
2900
lvalue_type (tree arg)
Mike Stump committed
2901
{
2902
  tree type = TREE_TYPE (arg);
2903
  return type;
Mike Stump committed
2904 2905 2906 2907 2908 2909
}

/* The type of ARG for printing error messages; denote lvalues with
   reference types.  */

tree
2910
error_type (tree arg)
Mike Stump committed
2911 2912
{
  tree type = TREE_TYPE (arg);
2913

Mike Stump committed
2914 2915
  if (TREE_CODE (type) == ARRAY_TYPE)
    ;
2916 2917
  else if (TREE_CODE (type) == ERROR_MARK)
    ;
Mike Stump committed
2918 2919
  else if (real_lvalue_p (arg))
    type = build_reference_type (lvalue_type (arg));
2920
  else if (MAYBE_CLASS_TYPE_P (type))
Mike Stump committed
2921 2922 2923 2924
    type = lvalue_type (arg);

  return type;
}
Mike Stump committed
2925 2926 2927 2928

/* Does FUNCTION use a variable-length argument list?  */

int
2929
varargs_function_p (const_tree function)
Mike Stump committed
2930
{
2931
  return stdarg_p (TREE_TYPE (function));
Mike Stump committed
2932
}
2933 2934 2935 2936

/* Returns 1 if decl is a member of a class.  */

int
2937
member_p (const_tree decl)
2938
{
2939
  const_tree const ctx = DECL_CONTEXT (decl);
2940
  return (ctx && TYPE_P (ctx));
2941
}
2942 2943 2944 2945 2946

/* Create a placeholder for member access where we don't actually have an
   object that the access is against.  */

tree
2947
build_dummy_object (tree type)
2948
{
2949
  tree decl = build1 (NOP_EXPR, build_pointer_type (type), void_zero_node);
2950
  return cp_build_indirect_ref (decl, RO_NULL, tf_warning_or_error);
2951 2952 2953 2954 2955 2956 2957
}

/* We've gotten a reference to a member of TYPE.  Return *this if appropriate,
   or a dummy object otherwise.  If BINFOP is non-0, it is filled with the
   binfo path from current_class_type to TYPE, or 0.  */

tree
2958
maybe_dummy_object (tree type, tree* binfop)
2959 2960
{
  tree decl, context;
2961
  tree binfo;
2962
  tree current = current_nonlambda_class_type ();
2963

2964
  if (current
2965 2966
      && (binfo = lookup_base (current, type, ba_any, NULL,
			       tf_warning_or_error)))
2967
    context = current;
2968 2969 2970 2971
  else
    {
      /* Reference from a nested class member function.  */
      context = type;
2972
      binfo = TYPE_BINFO (type);
2973 2974
    }

2975 2976
  if (binfop)
    *binfop = binfo;
2977

2978 2979 2980 2981 2982 2983
  if (current_class_ref
      /* current_class_ref might not correspond to current_class_type if
	 we're in tsubst_default_argument or a lambda-declarator; in either
	 case, we want to use current_class_ref if it matches CONTEXT.  */
      && (same_type_ignoring_top_level_qualifiers_p
	  (TREE_TYPE (current_class_ref), context)))
2984 2985 2986 2987 2988 2989 2990 2991 2992 2993
    decl = current_class_ref;
  else
    decl = build_dummy_object (context);

  return decl;
}

/* Returns 1 if OB is a placeholder object, or a pointer to one.  */

int
2994
is_dummy_object (const_tree ob)
2995
{
2996
  if (INDIRECT_REF_P (ob))
2997 2998
    ob = TREE_OPERAND (ob, 0);
  return (TREE_CODE (ob) == NOP_EXPR
2999
	  && TREE_OPERAND (ob, 0) == void_zero_node);
3000
}
3001

3002 3003 3004
/* Returns 1 iff type T is something we want to treat as a scalar type for
   the purpose of deciding whether it is trivial/POD/standard-layout.  */

3005
bool
3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021 3022 3023 3024 3025 3026 3027
scalarish_type_p (const_tree t)
{
  if (t == error_mark_node)
    return 1;

  return (SCALAR_TYPE_P (t)
	  || TREE_CODE (t) == VECTOR_TYPE);
}

/* Returns true iff T requires non-trivial default initialization.  */

bool
type_has_nontrivial_default_init (const_tree t)
{
  t = strip_array_types (CONST_CAST_TREE (t));

  if (CLASS_TYPE_P (t))
    return TYPE_HAS_COMPLEX_DFLT (t);
  else
    return 0;
}

3028 3029 3030
/* Returns true iff copying an object of type T (including via move
   constructor) is non-trivial.  That is, T has no non-trivial copy
   constructors and no non-trivial move constructors.  */
3031 3032 3033 3034 3035 3036 3037

bool
type_has_nontrivial_copy_init (const_tree t)
{
  t = strip_array_types (CONST_CAST_TREE (t));

  if (CLASS_TYPE_P (t))
3038 3039 3040 3041 3042 3043
    {
      gcc_assert (COMPLETE_TYPE_P (t));
      return ((TYPE_HAS_COPY_CTOR (t)
	       && TYPE_HAS_COMPLEX_COPY_CTOR (t))
	      || TYPE_HAS_COMPLEX_MOVE_CTOR (t));
    }
3044 3045 3046 3047
  else
    return 0;
}

3048 3049
/* Returns 1 iff type T is a trivially copyable type, as defined in
   [basic.types] and [class].  */
3050 3051

bool
3052
trivially_copyable_p (const_tree t)
3053 3054 3055 3056
{
  t = strip_array_types (CONST_CAST_TREE (t));

  if (CLASS_TYPE_P (t))
3057 3058 3059 3060 3061 3062
    return ((!TYPE_HAS_COPY_CTOR (t)
	     || !TYPE_HAS_COMPLEX_COPY_CTOR (t))
	    && !TYPE_HAS_COMPLEX_MOVE_CTOR (t)
	    && (!TYPE_HAS_COPY_ASSIGN (t)
		|| !TYPE_HAS_COMPLEX_COPY_ASSIGN (t))
	    && !TYPE_HAS_COMPLEX_MOVE_ASSIGN (t)
3063
	    && TYPE_HAS_TRIVIAL_DESTRUCTOR (t));
3064 3065 3066 3067
  else
    return scalarish_type_p (t);
}

3068 3069 3070 3071 3072 3073 3074 3075 3076 3077 3078 3079 3080 3081 3082
/* Returns 1 iff type T is a trivial type, as defined in [basic.types] and
   [class].  */

bool
trivial_type_p (const_tree t)
{
  t = strip_array_types (CONST_CAST_TREE (t));

  if (CLASS_TYPE_P (t))
    return (TYPE_HAS_TRIVIAL_DFLT (t)
	    && trivially_copyable_p (t));
  else
    return scalarish_type_p (t);
}

3083 3084
/* Returns 1 iff type T is a POD type, as defined in [basic.types].  */

3085
bool
3086
pod_type_p (const_tree t)
3087
{
3088
  /* This CONST_CAST is okay because strip_array_types returns its
3089
     argument unmodified and we assign it to a const_tree.  */
3090
  t = strip_array_types (CONST_CAST_TREE(t));
3091

3092 3093 3094
  if (!CLASS_TYPE_P (t))
    return scalarish_type_p (t);
  else if (cxx_dialect > cxx98)
3095 3096 3097 3098 3099 3100 3101 3102
    /* [class]/10: A POD struct is a class that is both a trivial class and a
       standard-layout class, and has no non-static data members of type
       non-POD struct, non-POD union (or array of such types).

       We don't need to check individual members because if a member is
       non-std-layout or non-trivial, the class will be too.  */
    return (std_layout_type_p (t) && trivial_type_p (t));
  else
3103 3104
    /* The C++98 definition of POD is different.  */
    return !CLASSTYPE_NON_LAYOUT_POD_P (t);
3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132
}

/* Returns true iff T is POD for the purpose of layout, as defined in the
   C++ ABI.  */

bool
layout_pod_type_p (const_tree t)
{
  t = strip_array_types (CONST_CAST_TREE (t));

  if (CLASS_TYPE_P (t))
    return !CLASSTYPE_NON_LAYOUT_POD_P (t);
  else
    return scalarish_type_p (t);
}

/* Returns true iff T is a standard-layout type, as defined in
   [basic.types].  */

bool
std_layout_type_p (const_tree t)
{
  t = strip_array_types (CONST_CAST_TREE (t));

  if (CLASS_TYPE_P (t))
    return !CLASSTYPE_NON_STD_LAYOUT (t);
  else
    return scalarish_type_p (t);
3133
}
3134

3135 3136 3137
/* Nonzero iff type T is a class template implicit specialization.  */

bool
3138
class_tmpl_impl_spec_p (const_tree t)
3139 3140 3141 3142
{
  return CLASS_TYPE_P (t) && CLASSTYPE_TEMPLATE_INSTANTIATION (t);
}

3143 3144 3145 3146
/* Returns 1 iff zero initialization of type T means actually storing
   zeros in it.  */

int
3147
zero_init_p (const_tree t)
3148
{
3149
  /* This CONST_CAST is okay because strip_array_types returns its
3150
     argument unmodified and we assign it to a const_tree.  */
3151
  t = strip_array_types (CONST_CAST_TREE(t));
3152

3153 3154 3155
  if (t == error_mark_node)
    return 1;

3156
  /* NULL pointers to data members are initialized with -1.  */
3157
  if (TYPE_PTRDATAMEM_P (t))
3158 3159 3160 3161 3162 3163 3164 3165 3166 3167
    return 0;

  /* Classes that contain types that can't be zero-initialized, cannot
     be zero-initialized themselves.  */
  if (CLASS_TYPE_P (t) && CLASSTYPE_NON_ZERO_INIT_P (t))
    return 0;

  return 1;
}

Joseph Myers committed
3168
/* Table of valid C++ attributes.  */
3169
const struct attribute_spec cxx_attribute_table[] =
3170
{
Kai Tietz committed
3171 3172 3173 3174 3175 3176 3177 3178
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
       affects_type_identity } */
  { "java_interface", 0, 0, false, false, false,
    handle_java_interface_attribute, false },
  { "com_interface",  0, 0, false, false, false,
    handle_com_interface_attribute, false },
  { "init_priority",  1, 1, true,  false, false,
    handle_init_priority_attribute, false },
3179 3180
  { "abi_tag", 1, -1, false, false, false,
    handle_abi_tag_attribute, true },
Kai Tietz committed
3181
  { NULL,	      0, 0, false, false, false, NULL, false }
Joseph Myers committed
3182 3183 3184 3185 3186
};

/* Handle a "java_interface" attribute; arguments as in
   struct attribute_spec.handler.  */
static tree
3187
handle_java_interface_attribute (tree* node,
Mike Stump committed
3188
				 tree name,
3189
				 tree /*args*/,
Mike Stump committed
3190 3191
				 int flags,
				 bool* no_add_attrs)
Joseph Myers committed
3192 3193 3194 3195
{
  if (DECL_P (*node)
      || !CLASS_TYPE_P (*node)
      || !TYPE_FOR_JAVA (*node))
Bryce McKinlay committed
3196
    {
3197
      error ("%qE attribute can only be applied to Java class definitions",
3198
	     name);
Joseph Myers committed
3199 3200
      *no_add_attrs = true;
      return NULL_TREE;
Bryce McKinlay committed
3201
    }
Joseph Myers committed
3202
  if (!(flags & (int) ATTR_FLAG_TYPE_IN_PLACE))
3203
    *node = build_variant_type_copy (*node);
Joseph Myers committed
3204
  TYPE_JAVA_INTERFACE (*node) = 1;
3205

Joseph Myers committed
3206 3207 3208 3209 3210 3211
  return NULL_TREE;
}

/* Handle a "com_interface" attribute; arguments as in
   struct attribute_spec.handler.  */
static tree
3212
handle_com_interface_attribute (tree* node,
Mike Stump committed
3213
				tree name,
3214 3215
				tree /*args*/,
				int /*flags*/,
Mike Stump committed
3216
				bool* no_add_attrs)
Joseph Myers committed
3217 3218 3219 3220 3221 3222 3223 3224
{
  static int warned;

  *no_add_attrs = true;

  if (DECL_P (*node)
      || !CLASS_TYPE_P (*node)
      || *node != TYPE_MAIN_VARIANT (*node))
3225
    {
3226 3227
      warning (OPT_Wattributes, "%qE attribute can only be applied "
	       "to class definitions", name);
Joseph Myers committed
3228 3229
      return NULL_TREE;
    }
3230

Joseph Myers committed
3231
  if (!warned++)
3232
    warning (0, "%qE is obsolete; g++ vtables are now COM-compatible by default",
3233
	     name);
Joseph Myers committed
3234 3235 3236 3237 3238 3239 3240

  return NULL_TREE;
}

/* Handle an "init_priority" attribute; arguments as in
   struct attribute_spec.handler.  */
static tree
3241
handle_init_priority_attribute (tree* node,
Mike Stump committed
3242 3243
				tree name,
				tree args,
3244
				int /*flags*/,
Mike Stump committed
3245
				bool* no_add_attrs)
Joseph Myers committed
3246 3247 3248 3249 3250 3251 3252
{
  tree initp_expr = TREE_VALUE (args);
  tree decl = *node;
  tree type = TREE_TYPE (decl);
  int pri;

  STRIP_NOPS (initp_expr);
3253
  initp_expr = default_conversion (initp_expr);
3254

Joseph Myers committed
3255 3256 3257 3258 3259 3260
  if (!initp_expr || TREE_CODE (initp_expr) != INTEGER_CST)
    {
      error ("requested init_priority is not an integer constant");
      *no_add_attrs = true;
      return NULL_TREE;
    }
3261

Joseph Myers committed
3262
  pri = TREE_INT_CST_LOW (initp_expr);
3263

Joseph Myers committed
3264 3265 3266
  type = strip_array_types (type);

  if (decl == NULL_TREE
Gabriel Dos Reis committed
3267
      || !VAR_P (decl)
Joseph Myers committed
3268 3269 3270 3271 3272 3273 3274
      || !TREE_STATIC (decl)
      || DECL_EXTERNAL (decl)
      || (TREE_CODE (type) != RECORD_TYPE
	  && TREE_CODE (type) != UNION_TYPE)
      /* Static objects in functions are initialized the
	 first time control passes through that
	 function. This is not precise enough to pin down an
3275
	 init_priority value, so don't allow it.  */
3276
      || current_function_decl)
Joseph Myers committed
3277
    {
3278
      error ("can only use %qE attribute on file-scope definitions "
Mike Stump committed
3279
	     "of objects of class type", name);
Joseph Myers committed
3280 3281 3282
      *no_add_attrs = true;
      return NULL_TREE;
    }
3283

Joseph Myers committed
3284 3285 3286 3287 3288 3289
  if (pri > MAX_INIT_PRIORITY || pri <= 0)
    {
      error ("requested init_priority is out of range");
      *no_add_attrs = true;
      return NULL_TREE;
    }
3290

Joseph Myers committed
3291 3292 3293 3294
  /* Check for init_priorities that are reserved for
     language and runtime support implementations.*/
  if (pri <= MAX_RESERVED_INIT_PRIORITY)
    {
3295
      warning
3296
	(0, "requested init_priority is reserved for internal use");
3297 3298
    }

Joseph Myers committed
3299 3300
  if (SUPPORTS_INIT_PRIORITY)
    {
3301 3302
      SET_DECL_INIT_PRIORITY (decl, pri);
      DECL_HAS_INIT_PRIORITY_P (decl) = 1;
Joseph Myers committed
3303 3304 3305 3306
      return NULL_TREE;
    }
  else
    {
3307
      error ("%qE attribute is not supported on this platform", name);
Joseph Myers committed
3308 3309 3310
      *no_add_attrs = true;
      return NULL_TREE;
    }
3311
}
3312

3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354
/* DECL is being redeclared; the old declaration had the abi tags in OLD,
   and the new one has the tags in NEW_.  Give an error if there are tags
   in NEW_ that weren't in OLD.  */

bool
check_abi_tag_redeclaration (const_tree decl, const_tree old, const_tree new_)
{
  if (old && TREE_CODE (TREE_VALUE (old)) == TREE_LIST)
    old = TREE_VALUE (old);
  if (new_ && TREE_CODE (TREE_VALUE (new_)) == TREE_LIST)
    new_ = TREE_VALUE (new_);
  bool err = false;
  for (const_tree t = new_; t; t = TREE_CHAIN (t))
    {
      tree str = TREE_VALUE (t);
      for (const_tree in = old; in; in = TREE_CHAIN (in))
	{
	  tree ostr = TREE_VALUE (in);
	  if (cp_tree_equal (str, ostr))
	    goto found;
	}
      error ("redeclaration of %qD adds abi tag %E", decl, str);
      err = true;
    found:;
    }
  if (err)
    {
      inform (DECL_SOURCE_LOCATION (decl), "previous declaration here");
      return false;
    }
  return true;
}

/* Handle an "abi_tag" attribute; arguments as in
   struct attribute_spec.handler.  */

static tree
handle_abi_tag_attribute (tree* node, tree name, tree args,
			  int flags, bool* no_add_attrs)
{
  if (TYPE_P (*node))
    {
3355
      if (!OVERLOAD_TYPE_P (*node))
3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390 3391 3392 3393 3394 3395 3396 3397 3398 3399 3400 3401 3402
	{
	  error ("%qE attribute applied to non-class, non-enum type %qT",
		 name, *node);
	  goto fail;
	}
      else if (!(flags & (int)ATTR_FLAG_TYPE_IN_PLACE))
	{
	  error ("%qE attribute applied to %qT after its definition",
		 name, *node);
	  goto fail;
	}

      tree attributes = TYPE_ATTRIBUTES (*node);
      tree decl = TYPE_NAME (*node);

      /* Make sure all declarations have the same abi tags.  */
      if (DECL_SOURCE_LOCATION (decl) != input_location)
	{
	  if (!check_abi_tag_redeclaration (decl,
					    lookup_attribute ("abi_tag",
							      attributes),
					    args))
	    goto fail;
	}
    }
  else
    {
      if (TREE_CODE (*node) != FUNCTION_DECL)
	{
	  error ("%qE attribute applied to non-function %qD", name, *node);
	  goto fail;
	}
      else if (DECL_LANGUAGE (*node) == lang_c)
	{
	  error ("%qE attribute applied to extern \"C\" function %qD",
		 name, *node);
	  goto fail;
	}
    }

  return NULL_TREE;

 fail:
  *no_add_attrs = true;
  return NULL_TREE;
}

3403 3404 3405 3406
/* Return a new PTRMEM_CST of the indicated TYPE.  The MEMBER is the
   thing pointed to by the constant.  */

tree
3407
make_ptrmem_cst (tree type, tree member)
3408 3409 3410 3411 3412 3413 3414
{
  tree ptrmem_cst = make_node (PTRMEM_CST);
  TREE_TYPE (ptrmem_cst) = type;
  PTRMEM_CST_MEMBER (ptrmem_cst) = member;
  return ptrmem_cst;
}

3415
/* Build a variant of TYPE that has the indicated ATTRIBUTES.  May
Jason Merrill committed
3416
   return an existing type if an appropriate type already exists.  */
3417 3418 3419 3420 3421 3422 3423

tree
cp_build_type_attribute_variant (tree type, tree attributes)
{
  tree new_type;

  new_type = build_type_attribute_variant (type, attributes);
3424 3425
  if (TREE_CODE (new_type) == FUNCTION_TYPE
      || TREE_CODE (new_type) == METHOD_TYPE)
3426 3427 3428 3429 3430 3431
    {
      new_type = build_exception_variant (new_type,
					  TYPE_RAISES_EXCEPTIONS (type));
      new_type = build_ref_qualified_type (new_type,
					   type_memfn_rqual (type));
    }
3432 3433 3434 3435

  /* Making a new main variant of a class type is broken.  */
  gcc_assert (!CLASS_TYPE_P (type) || new_type == type);
    
3436 3437 3438
  return new_type;
}

3439 3440 3441 3442 3443 3444 3445 3446
/* Return TRUE if TYPE1 and TYPE2 are identical for type hashing purposes.
   Called only after doing all language independent checks.  Only
   to check TYPE_RAISES_EXCEPTIONS for FUNCTION_TYPE, the rest is already
   compared in type_hash_eq.  */

bool
cxx_type_hash_eq (const_tree typea, const_tree typeb)
{
Kai Tietz committed
3447 3448
  gcc_assert (TREE_CODE (typea) == FUNCTION_TYPE
	      || TREE_CODE (typea) == METHOD_TYPE);
3449 3450

  return comp_except_specs (TYPE_RAISES_EXCEPTIONS (typea),
3451
			    TYPE_RAISES_EXCEPTIONS (typeb), ce_exact);
3452 3453
}

3454
/* Apply FUNC to all language-specific sub-trees of TP in a pre-order
3455
   traversal.  Called from walk_tree.  */
3456

3457
tree
3458
cp_walk_subtrees (tree *tp, int *walk_subtrees_p, walk_tree_fn func,
3459
		  void *data, struct pointer_set_t *pset)
3460 3461 3462
{
  enum tree_code code = TREE_CODE (*tp);
  tree result;
3463

3464 3465 3466
#define WALK_SUBTREE(NODE)				\
  do							\
    {							\
3467
      result = cp_walk_tree (&(NODE), func, data, pset);	\
3468
      if (result) goto out;				\
3469 3470 3471 3472 3473
    }							\
  while (0)

  /* Not one of the easy cases.  We must explicitly go through the
     children.  */
3474
  result = NULL_TREE;
3475 3476 3477 3478 3479
  switch (code)
    {
    case DEFAULT_ARG:
    case TEMPLATE_TEMPLATE_PARM:
    case BOUND_TEMPLATE_TEMPLATE_PARM:
3480
    case UNBOUND_CLASS_TEMPLATE:
3481 3482 3483 3484
    case TEMPLATE_PARM_INDEX:
    case TEMPLATE_TYPE_PARM:
    case TYPENAME_TYPE:
    case TYPEOF_TYPE:
3485
    case UNDERLYING_TYPE:
Kazu Hirata committed
3486
      /* None of these have subtrees other than those already walked
Mike Stump committed
3487
	 above.  */
3488 3489 3490
      *walk_subtrees_p = 0;
      break;

3491 3492 3493 3494 3495
    case BASELINK:
      WALK_SUBTREE (BASELINK_FUNCTIONS (*tp));
      *walk_subtrees_p = 0;
      break;

3496 3497 3498 3499 3500 3501
    case PTRMEM_CST:
      WALK_SUBTREE (TREE_TYPE (*tp));
      *walk_subtrees_p = 0;
      break;

    case TREE_LIST:
3502
      WALK_SUBTREE (TREE_PURPOSE (*tp));
3503 3504 3505 3506 3507 3508
      break;

    case OVERLOAD:
      WALK_SUBTREE (OVL_FUNCTION (*tp));
      WALK_SUBTREE (OVL_CHAIN (*tp));
      *walk_subtrees_p = 0;
3509 3510 3511 3512 3513 3514 3515
      break;

    case USING_DECL:
      WALK_SUBTREE (DECL_NAME (*tp));
      WALK_SUBTREE (USING_DECL_SCOPE (*tp));
      WALK_SUBTREE (USING_DECL_DECLS (*tp));
      *walk_subtrees_p = 0;
3516 3517 3518 3519 3520 3521 3522
      break;

    case RECORD_TYPE:
      if (TYPE_PTRMEMFUNC_P (*tp))
	WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE (*tp));
      break;

3523 3524 3525 3526 3527 3528 3529 3530 3531 3532 3533 3534
    case TYPE_ARGUMENT_PACK:
    case NONTYPE_ARGUMENT_PACK:
      {
        tree args = ARGUMENT_PACK_ARGS (*tp);
        int i, len = TREE_VEC_LENGTH (args);
        for (i = 0; i < len; i++)
          WALK_SUBTREE (TREE_VEC_ELT (args, i));
      }
      break;

    case TYPE_PACK_EXPANSION:
      WALK_SUBTREE (TREE_TYPE (*tp));
3535
      WALK_SUBTREE (PACK_EXPANSION_EXTRA_ARGS (*tp));
3536 3537 3538 3539 3540
      *walk_subtrees_p = 0;
      break;
      
    case EXPR_PACK_EXPANSION:
      WALK_SUBTREE (TREE_OPERAND (*tp, 0));
3541
      WALK_SUBTREE (PACK_EXPANSION_EXTRA_ARGS (*tp));
3542 3543 3544 3545
      *walk_subtrees_p = 0;
      break;

    case CAST_EXPR:
3546 3547 3548 3549
    case REINTERPRET_CAST_EXPR:
    case STATIC_CAST_EXPR:
    case CONST_CAST_EXPR:
    case DYNAMIC_CAST_EXPR:
3550
    case IMPLICIT_CONV_EXPR:
3551 3552 3553 3554 3555 3556 3557 3558 3559 3560 3561
      if (TREE_TYPE (*tp))
	WALK_SUBTREE (TREE_TYPE (*tp));

      {
        int i;
        for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (*tp)); ++i)
	  WALK_SUBTREE (TREE_OPERAND (*tp, i));
      }
      *walk_subtrees_p = 0;
      break;

3562 3563 3564 3565 3566 3567
    case TRAIT_EXPR:
      WALK_SUBTREE (TRAIT_EXPR_TYPE1 (*tp));
      WALK_SUBTREE (TRAIT_EXPR_TYPE2 (*tp));
      *walk_subtrees_p = 0;
      break;

3568 3569 3570 3571 3572 3573
    case DECLTYPE_TYPE:
      WALK_SUBTREE (DECLTYPE_TYPE_EXPR (*tp));
      *walk_subtrees_p = 0;
      break;
 

3574
    default:
3575
      return NULL_TREE;
3576 3577 3578
    }

  /* We didn't find what we were looking for.  */
3579 3580
 out:
  return result;
3581 3582 3583 3584

#undef WALK_SUBTREE
}

3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597 3598
/* Like save_expr, but for C++.  */

tree
cp_save_expr (tree expr)
{
  /* There is no reason to create a SAVE_EXPR within a template; if
     needed, we can create the SAVE_EXPR when instantiating the
     template.  Furthermore, the middle-end cannot handle C++-specific
     tree codes.  */
  if (processing_template_decl)
    return expr;
  return save_expr (expr);
}

3599 3600
/* Initialize tree.c.  */

Gavin Romig-Koch committed
3601
void
3602
init_tree (void)
Gavin Romig-Koch committed
3603
{
3604
  list_hash_table = htab_create_ggc (31, list_hash, list_hash_eq, NULL);
Gavin Romig-Koch committed
3605 3606
}

3607
/* Returns the kind of special function that DECL (a FUNCTION_DECL)
3608 3609
   is.  Note that sfk_none is zero, so this function can be used as a
   predicate to test whether or not DECL is a special function.  */
3610 3611

special_function_kind
3612
special_function_p (const_tree decl)
3613 3614 3615 3616
{
  /* Rather than doing all this stuff with magic names, we should
     probably have a field of type `special_function_kind' in
     DECL_LANG_SPECIFIC.  */
3617 3618
  if (DECL_INHERITED_CTOR_BASE (decl))
    return sfk_inheriting_constructor;
3619 3620
  if (DECL_COPY_CONSTRUCTOR_P (decl))
    return sfk_copy_constructor;
3621 3622
  if (DECL_MOVE_CONSTRUCTOR_P (decl))
    return sfk_move_constructor;
3623 3624
  if (DECL_CONSTRUCTOR_P (decl))
    return sfk_constructor;
3625
  if (DECL_OVERLOADED_OPERATOR_P (decl) == NOP_EXPR)
3626 3627 3628 3629 3630 3631
    {
      if (copy_fn_p (decl))
	return sfk_copy_assignment;
      if (move_fn_p (decl))
	return sfk_move_assignment;
    }
3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644
  if (DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (decl))
    return sfk_destructor;
  if (DECL_COMPLETE_DESTRUCTOR_P (decl))
    return sfk_complete_destructor;
  if (DECL_BASE_DESTRUCTOR_P (decl))
    return sfk_base_destructor;
  if (DECL_DELETING_DESTRUCTOR_P (decl))
    return sfk_deleting_destructor;
  if (DECL_CONV_FN_P (decl))
    return sfk_conversion;

  return sfk_none;
}
3645

3646
/* Returns nonzero if TYPE is a character type, including wchar_t.  */
3647 3648

int
3649
char_type_p (tree type)
3650 3651 3652 3653
{
  return (same_type_p (type, char_type_node)
	  || same_type_p (type, unsigned_char_type_node)
	  || same_type_p (type, signed_char_type_node)
3654 3655
	  || same_type_p (type, char16_type_node)
	  || same_type_p (type, char32_type_node)
3656 3657
	  || same_type_p (type, wchar_type_node));
}
3658 3659 3660 3661 3662 3663 3664 3665 3666

/* Returns the kind of linkage associated with the indicated DECL.  Th
   value returned is as specified by the language standard; it is
   independent of implementation details regarding template
   instantiation, etc.  For example, it is possible that a declaration
   to which this function assigns external linkage would not show up
   as a global symbol when you run `nm' on the resulting object file.  */

linkage_kind
3667
decl_linkage (tree decl)
3668 3669 3670 3671 3672 3673 3674 3675 3676 3677 3678 3679
{
  /* This function doesn't attempt to calculate the linkage from first
     principles as given in [basic.link].  Instead, it makes use of
     the fact that we have already set TREE_PUBLIC appropriately, and
     then handles a few special cases.  Ideally, we would calculate
     linkage first, and then transform that into a concrete
     implementation.  */

  /* Things that don't have names have no linkage.  */
  if (!DECL_NAME (decl))
    return lk_none;

3680 3681 3682 3683
  /* Fields have no linkage.  */
  if (TREE_CODE (decl) == FIELD_DECL)
    return lk_none;

3684 3685 3686
  /* Things that are TREE_PUBLIC have external linkage.  */
  if (TREE_PUBLIC (decl))
    return lk_external;
Mike Stump committed
3687

3688 3689 3690
  if (TREE_CODE (decl) == NAMESPACE_DECL)
    return lk_external;

Mike Stump committed
3691
  /* Linkage of a CONST_DECL depends on the linkage of the enumeration
3692 3693
     type.  */
  if (TREE_CODE (decl) == CONST_DECL)
3694
    return decl_linkage (TYPE_NAME (DECL_CONTEXT (decl)));
3695 3696 3697 3698 3699 3700

  /* Some things that are not TREE_PUBLIC have external linkage, too.
     For example, on targets that don't have weak symbols, we make all
     template instantiations have internal linkage (in the object
     file), but the symbols should still be treated as having external
     linkage from the point of view of the language.  */
3701
  if (VAR_OR_FUNCTION_DECL_P (decl)
3702
      && DECL_COMDAT (decl))
3703 3704 3705 3706 3707 3708 3709
    return lk_external;

  /* Things in local scope do not have linkage, if they don't have
     TREE_PUBLIC set.  */
  if (decl_function_context (decl))
    return lk_none;

3710 3711 3712
  /* Members of the anonymous namespace also have TREE_PUBLIC unset, but
     are considered to have external linkage for language purposes.  DECLs
     really meant to have internal linkage have DECL_THIS_STATIC set.  */
3713
  if (TREE_CODE (decl) == TYPE_DECL)
3714
    return lk_external;
3715
  if (VAR_OR_FUNCTION_DECL_P (decl))
3716 3717 3718 3719 3720 3721 3722 3723 3724
    {
      if (!DECL_THIS_STATIC (decl))
	return lk_external;

      /* Static data members and static member functions from classes
	 in anonymous namespace also don't have TREE_PUBLIC set.  */
      if (DECL_CLASS_CONTEXT (decl))
	return lk_external;
    }
3725

3726 3727 3728
  /* Everything else has internal linkage.  */
  return lk_internal;
}
3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739

/* Returns the storage duration of the object or reference associated with
   the indicated DECL, which should be a VAR_DECL or PARM_DECL.  */

duration_kind
decl_storage_duration (tree decl)
{
  if (TREE_CODE (decl) == PARM_DECL)
    return dk_auto;
  if (TREE_CODE (decl) == FUNCTION_DECL)
    return dk_static;
Gabriel Dos Reis committed
3740
  gcc_assert (VAR_P (decl));
3741 3742 3743 3744 3745 3746 3747
  if (!TREE_STATIC (decl)
      && !DECL_EXTERNAL (decl))
    return dk_auto;
  if (DECL_THREAD_LOCAL_P (decl))
    return dk_thread;
  return dk_static;
}
3748

3749 3750 3751 3752 3753
/* EXP is an expression that we want to pre-evaluate.  Returns (in
   *INITP) an expression that will perform the pre-evaluation.  The
   value returned by this function is a side-effect free expression
   equivalent to the pre-evaluated expression.  Callers must ensure
   that *INITP is evaluated before EXP.  */
3754 3755

tree
3756
stabilize_expr (tree exp, tree* initp)
3757 3758 3759 3760
{
  tree init_expr;

  if (!TREE_SIDE_EFFECTS (exp))
3761
    init_expr = NULL_TREE;
3762 3763
  else if (VOID_TYPE_P (TREE_TYPE (exp)))
    {
3764 3765
      init_expr = exp;
      exp = void_zero_node;
3766
    }
3767 3768 3769
  /* There are no expressions with REFERENCE_TYPE, but there can be call
     arguments with such a type; just treat it as a pointer.  */
  else if (TREE_CODE (TREE_TYPE (exp)) == REFERENCE_TYPE
3770
	   || SCALAR_TYPE_P (TREE_TYPE (exp))
3771
	   || !lvalue_or_rvalue_with_address_p (exp))
3772 3773 3774 3775 3776 3777
    {
      init_expr = get_target_expr (exp);
      exp = TARGET_EXPR_SLOT (init_expr);
    }
  else
    {
3778
      bool xval = !real_lvalue_p (exp);
3779
      exp = cp_build_addr_expr (exp, tf_warning_or_error);
3780 3781
      init_expr = get_target_expr (exp);
      exp = TARGET_EXPR_SLOT (init_expr);
3782
      exp = cp_build_indirect_ref (exp, RO_NULL, tf_warning_or_error);
3783 3784
      if (xval)
	exp = move (exp);
3785 3786
    }
  *initp = init_expr;
3787 3788

  gcc_assert (!TREE_SIDE_EFFECTS (exp));
3789 3790
  return exp;
}
3791

3792
/* Add NEW_EXPR, an expression whose value we don't care about, after the
3793 3794 3795
   similar expression ORIG.  */

tree
3796
add_stmt_to_compound (tree orig, tree new_expr)
3797
{
3798
  if (!new_expr || !TREE_SIDE_EFFECTS (new_expr))
3799 3800
    return orig;
  if (!orig || !TREE_SIDE_EFFECTS (orig))
3801 3802
    return new_expr;
  return build2 (COMPOUND_EXPR, void_type_node, orig, new_expr);
3803 3804
}

3805 3806 3807 3808
/* Like stabilize_expr, but for a call whose arguments we want to
   pre-evaluate.  CALL is modified in place to use the pre-evaluated
   arguments, while, upon return, *INITP contains an expression to
   compute the arguments.  */
3809 3810 3811 3812 3813

void
stabilize_call (tree call, tree *initp)
{
  tree inits = NULL_TREE;
3814 3815
  int i;
  int nargs = call_expr_nargs (call);
3816

3817 3818 3819 3820 3821
  if (call == error_mark_node || processing_template_decl)
    {
      *initp = NULL_TREE;
      return;
    }
3822

3823
  gcc_assert (TREE_CODE (call) == CALL_EXPR);
3824

3825 3826 3827 3828 3829 3830 3831 3832 3833 3834 3835 3836 3837 3838 3839 3840
  for (i = 0; i < nargs; i++)
    {
      tree init;
      CALL_EXPR_ARG (call, i) =
	stabilize_expr (CALL_EXPR_ARG (call, i), &init);
      inits = add_stmt_to_compound (inits, init);
    }

  *initp = inits;
}

/* Like stabilize_expr, but for an AGGR_INIT_EXPR whose arguments we want
   to pre-evaluate.  CALL is modified in place to use the pre-evaluated
   arguments, while, upon return, *INITP contains an expression to
   compute the arguments.  */

3841
static void
3842 3843 3844 3845 3846 3847 3848 3849 3850 3851 3852 3853 3854 3855 3856 3857 3858 3859
stabilize_aggr_init (tree call, tree *initp)
{
  tree inits = NULL_TREE;
  int i;
  int nargs = aggr_init_expr_nargs (call);

  if (call == error_mark_node)
    return;

  gcc_assert (TREE_CODE (call) == AGGR_INIT_EXPR);

  for (i = 0; i < nargs; i++)
    {
      tree init;
      AGGR_INIT_EXPR_ARG (call, i) =
	stabilize_expr (AGGR_INIT_EXPR_ARG (call, i), &init);
      inits = add_stmt_to_compound (inits, init);
    }
3860 3861 3862 3863

  *initp = inits;
}

3864 3865 3866 3867 3868 3869
/* Like stabilize_expr, but for an initialization.  

   If the initialization is for an object of class type, this function
   takes care not to introduce additional temporaries.

   Returns TRUE iff the expression was successfully pre-evaluated,
3870
   i.e., if INIT is now side-effect free, except for, possibly, a
3871
   single call to a constructor.  */
3872 3873 3874 3875 3876 3877

bool
stabilize_init (tree init, tree *initp)
{
  tree t = init;

3878 3879
  *initp = NULL_TREE;

3880
  if (t == error_mark_node || processing_template_decl)
3881 3882
    return true;

3883 3884 3885 3886
  if (TREE_CODE (t) == INIT_EXPR)
    t = TREE_OPERAND (t, 1);
  if (TREE_CODE (t) == TARGET_EXPR)
    t = TARGET_EXPR_INITIAL (t);
3887 3888 3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913

  /* If the RHS can be stabilized without breaking copy elision, stabilize
     it.  We specifically don't stabilize class prvalues here because that
     would mean an extra copy, but they might be stabilized below.  */
  if (TREE_CODE (init) == INIT_EXPR
      && TREE_CODE (t) != CONSTRUCTOR
      && TREE_CODE (t) != AGGR_INIT_EXPR
      && (SCALAR_TYPE_P (TREE_TYPE (t))
	  || lvalue_or_rvalue_with_address_p (t)))
    {
      TREE_OPERAND (init, 1) = stabilize_expr (t, initp);
      return true;
    }

  if (TREE_CODE (t) == COMPOUND_EXPR
      && TREE_CODE (init) == INIT_EXPR)
    {
      tree last = expr_last (t);
      /* Handle stabilizing the EMPTY_CLASS_EXPR pattern.  */
      if (!TREE_SIDE_EFFECTS (last))
	{
	  *initp = t;
	  TREE_OPERAND (init, 1) = last;
	  return true;
	}
    }

3914 3915 3916 3917 3918
  if (TREE_CODE (t) == CONSTRUCTOR)
    {
      /* Aggregate initialization: stabilize each of the field
	 initializers.  */
      unsigned i;
3919
      constructor_elt *ce;
3920
      bool good = true;
3921 3922
      vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t);
      for (i = 0; vec_safe_iterate (v, i, &ce); ++i)
3923 3924 3925 3926 3927 3928 3929 3930 3931 3932
	{
	  tree type = TREE_TYPE (ce->value);
	  tree subinit;
	  if (TREE_CODE (type) == REFERENCE_TYPE
	      || SCALAR_TYPE_P (type))
	    ce->value = stabilize_expr (ce->value, &subinit);
	  else if (!stabilize_init (ce->value, &subinit))
	    good = false;
	  *initp = add_stmt_to_compound (*initp, subinit);
	}
3933 3934
      return good;
    }
3935

3936
  if (TREE_CODE (t) == CALL_EXPR)
3937 3938 3939
    {
      stabilize_call (t, initp);
      return true;
3940 3941
    }

3942 3943 3944 3945 3946 3947
  if (TREE_CODE (t) == AGGR_INIT_EXPR)
    {
      stabilize_aggr_init (t, initp);
      return true;
    }

3948 3949
  /* The initialization is being performed via a bitwise copy -- and
     the item copied may have side effects.  */
3950
  return !TREE_SIDE_EFFECTS (init);
3951 3952
}

3953 3954 3955 3956 3957 3958 3959 3960 3961
/* Like "fold", but should be used whenever we might be processing the
   body of a template.  */

tree
fold_if_not_in_template (tree expr)
{
  /* In the body of a template, there is never any need to call
     "fold".  We will call fold later when actually instantiating the
     template.  Integral constant expressions in templates will be
3962
     evaluated via fold_non_dependent_expr, as necessary.  */
3963 3964 3965 3966 3967 3968 3969 3970
  if (processing_template_decl)
    return expr;

  /* Fold C++ front-end specific tree codes.  */
  if (TREE_CODE (expr) == UNARY_PLUS_EXPR)
    return fold_convert (TREE_TYPE (expr), TREE_OPERAND (expr, 0));

  return fold (expr);
3971 3972
}

3973 3974 3975 3976 3977 3978 3979
/* Returns true if a cast to TYPE may appear in an integral constant
   expression.  */

bool
cast_valid_in_integral_constant_expression_p (tree type)
{
  return (INTEGRAL_OR_ENUMERATION_TYPE_P (type)
3980
	  || cxx_dialect >= cxx11
3981 3982 3983 3984
	  || dependent_type_p (type)
	  || type == error_mark_node);
}

Diego Novillo committed
3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995 3996 3997 3998 3999
/* Return true if we need to fix linkage information of DECL.  */

static bool
cp_fix_function_decl_p (tree decl)
{
  /* Skip if DECL is not externally visible.  */
  if (!TREE_PUBLIC (decl))
    return false;

  /* We need to fix DECL if it a appears to be exported but with no
     function body.  Thunks do not have CFGs and we may need to
     handle them specially later.   */
  if (!gimple_has_body_p (decl)
      && !DECL_THUNK_P (decl)
      && !DECL_EXTERNAL (decl))
4000 4001 4002 4003 4004
    {
      struct cgraph_node *node = cgraph_get_node (decl);

      /* Don't fix same_body aliases.  Although they don't have their own
	 CFG, they share it with what they alias to.  */
4005 4006
      if (!node || !node->alias
	  || !vec_safe_length (node->ref_list.references))
4007 4008
	return true;
    }
Diego Novillo committed
4009 4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037

  return false;
}

/* Clean the C++ specific parts of the tree T. */

void
cp_free_lang_data (tree t)
{
  if (TREE_CODE (t) == METHOD_TYPE
      || TREE_CODE (t) == FUNCTION_TYPE)
    {
      /* Default args are not interesting anymore.  */
      tree argtypes = TYPE_ARG_TYPES (t);
      while (argtypes)
        {
	  TREE_PURPOSE (argtypes) = 0;
	  argtypes = TREE_CHAIN (argtypes);
	}
    }
  else if (TREE_CODE (t) == FUNCTION_DECL
	   && cp_fix_function_decl_p (t))
    {
      /* If T is used in this translation unit at all,  the definition
	 must exist somewhere else since we have decided to not emit it
	 in this TU.  So make it an external reference.  */
      DECL_EXTERNAL (t) = 1;
      TREE_STATIC (t) = 0;
    }
4038 4039 4040 4041 4042 4043 4044 4045 4046
  if (TREE_CODE (t) == NAMESPACE_DECL)
    {
      /* The list of users of a namespace isn't useful for the middle-end
	 or debug generators.  */
      DECL_NAMESPACE_USERS (t) = NULL_TREE;
      /* Neither do we need the leftover chaining of namespaces
         from the binding level.  */
      DECL_CHAIN (t) = NULL_TREE;
    }
Diego Novillo committed
4047 4048
}

4049 4050 4051 4052 4053
/* Stub for c-common.  Please keep in sync with c-decl.c.
   FIXME: If address space support is target specific, then this
   should be a C target hook.  But currently this is not possible,
   because this function is called via REGISTER_TARGET_PRAGMAS.  */
void
4054
c_register_addr_space (const char * /*word*/, addr_space_t /*as*/)
4055 4056 4057
{
}

4058 4059 4060 4061 4062 4063 4064 4065 4066 4067 4068 4069 4070 4071 4072 4073 4074 4075 4076 4077 4078 4079 4080 4081 4082 4083
/* Return the number of operands in T that we care about for things like
   mangling.  */

int
cp_tree_operand_length (const_tree t)
{
  enum tree_code code = TREE_CODE (t);

  switch (code)
    {
    case PREINCREMENT_EXPR:
    case PREDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
      return 1;

    case ARRAY_REF:
      return 2;

    case EXPR_PACK_EXPANSION:
      return 1;

    default:
      return TREE_OPERAND_LENGTH (t);
    }
}
4084 4085 4086 4087 4088 4089 4090 4091 4092 4093 4094 4095 4096 4097 4098

/* Implement -Wzero_as_null_pointer_constant.  Return true if the
   conditions for the warning hold, false otherwise.  */
bool
maybe_warn_zero_as_null_pointer_constant (tree expr, location_t loc)
{
  if (c_inhibit_evaluation_warnings == 0
      && !NULLPTR_TYPE_P (TREE_TYPE (expr)))
    {
      warning_at (loc, OPT_Wzero_as_null_pointer_constant,
		  "zero as null pointer constant");
      return true;
    }
  return false;
}
4099 4100 4101 4102 4103 4104

#if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
/* Complain that some language-specific thing hanging off a tree
   node has been accessed improperly.  */

void
4105
lang_check_failed (const char* file, int line, const char* function)
4106 4107 4108 4109 4110 4111 4112
{
  internal_error ("lang_* check: failed in %s, at %s:%d",
		  function, trim_filename (file), line);
}
#endif /* ENABLE_TREE_CHECKING */

#include "gt-cp-tree.h"