tree.c 68.9 KB
Newer Older
Mike Stump committed
1
/* Language-dependent node constructors for parse phase of GNU compiler.
Jeff Law committed
2
   Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
Mike Stump committed
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
   Hacked by Michael Tiemann (tiemann@cygnus.com)

This file is part of GNU CC.

GNU CC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.

GNU CC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with GNU CC; see the file COPYING.  If not, write to
Richard Kenner committed
19 20
the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.  */
Mike Stump committed
21 22

#include "config.h"
23
#include "system.h"
Mike Stump committed
24 25 26 27
#include "obstack.h"
#include "tree.h"
#include "cp-tree.h"
#include "flags.h"
Mike Stump committed
28
#include "rtl.h"
29
#include "toplev.h"
30
#include "ggc.h"
31 32
#include "insn-config.h"
#include "integrate.h"
33

34 35
static tree bot_manip PROTO((tree *, int *, void *));
static tree bot_replace PROTO((tree *, int *, void *));
Jason Merrill committed
36 37 38
static tree build_cplus_array_type_1 PROTO((tree, tree));
static void list_hash_add PROTO((int, tree));
static int list_hash PROTO((tree, tree, tree));
39
static tree list_hash_lookup PROTO((int, tree, tree, tree));
40
static void propagate_binfo_offsets PROTO((tree, tree));
41
static cp_lvalue_kind lvalue_p_1 PROTO((tree, int));
42
static tree no_linkage_helper PROTO((tree *, int *, void *));
Kaveh R. Ghazi committed
43
static tree build_srcloc PROTO((char *, int));
44
static void mark_list_hash PROTO ((void *));
45 46 47 48
static int statement_code_p PROTO((enum tree_code));
static tree mark_local_for_remap_r PROTO((tree *, int *, void *));
static tree cp_unsave_r PROTO ((tree *, int *, void *));
static void cp_unsave PROTO((tree *));
49
static tree build_target_expr PROTO((tree, tree));
Jason Merrill committed
50

Mike Stump committed
51 52
#define CEIL(x,y) (((x) + (y) - 1) / (y))

53 54 55
/* If REF is an lvalue, returns the kind of lvalue that REF is.
   Otherwise, returns clk_none.  If TREAT_CLASS_RVALUES_AS_LVALUES is
   non-zero, rvalues of class type are considered lvalues.  */
Mike Stump committed
56

57
static cp_lvalue_kind
58
lvalue_p_1 (ref, treat_class_rvalues_as_lvalues)
Mike Stump committed
59
     tree ref;
60
     int treat_class_rvalues_as_lvalues;
Mike Stump committed
61
{
62 63 64
  cp_lvalue_kind op1_lvalue_kind = clk_none;
  cp_lvalue_kind op2_lvalue_kind = clk_none;

Mike Stump committed
65
  if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
66
    return clk_ordinary;
Mike Stump committed
67

68
  if (ref == current_class_ptr && flag_this_is_variable <= 0)
69
    return clk_none;
Mike Stump committed
70 71 72 73

  switch (TREE_CODE (ref))
    {
      /* preincrements and predecrements are valid lvals, provided
Mike Stump committed
74
	 what they refer to are valid lvals.  */
Mike Stump committed
75 76 77
    case PREINCREMENT_EXPR:
    case PREDECREMENT_EXPR:
    case SAVE_EXPR:
78 79 80
    case UNSAVE_EXPR:
    case TRY_CATCH_EXPR:
    case WITH_CLEANUP_EXPR:
81 82
    case REALPART_EXPR:
    case IMAGPART_EXPR:
83
    case NOP_EXPR:
84 85
      return lvalue_p_1 (TREE_OPERAND (ref, 0),
			 treat_class_rvalues_as_lvalues);
Mike Stump committed
86

87 88 89 90 91 92 93
    case COMPONENT_REF:
      op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
				    treat_class_rvalues_as_lvalues);
      if (op1_lvalue_kind 
	  /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some
	     situations.  */
	  && TREE_CODE (TREE_OPERAND (ref, 1)) == FIELD_DECL
94
	  && DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1)))
95 96 97 98 99 100 101 102 103
	{
	  /* Clear the ordinary bit.  If this object was a class
	     rvalue we want to preserve that information.  */
	  op1_lvalue_kind &= ~clk_ordinary;
	  /* The lvalue is for a btifield.  */
	  op1_lvalue_kind |= clk_bitfield;
	}
      return op1_lvalue_kind;

Mike Stump committed
104
    case STRING_CST:
105
      return clk_ordinary;
Mike Stump committed
106 107 108 109 110

    case VAR_DECL:
      if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
	  && DECL_LANG_SPECIFIC (ref)
	  && DECL_IN_AGGR_P (ref))
111
	return clk_none;
Mike Stump committed
112 113 114 115
    case INDIRECT_REF:
    case ARRAY_REF:
    case PARM_DECL:
    case RESULT_DECL:
116
      if (TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
117
	return clk_ordinary;
Mike Stump committed
118 119 120 121 122 123 124
      break;

      /* A currently unresolved scope ref.  */
    case SCOPE_REF:
      my_friendly_abort (103);
    case OFFSET_REF:
      if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
125 126 127 128 129 130 131 132
	return clk_ordinary;
      /* Fall through.  */
    case MAX_EXPR:
    case MIN_EXPR:
      op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
				    treat_class_rvalues_as_lvalues);
      op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
				    treat_class_rvalues_as_lvalues);
Mike Stump committed
133 134 135
      break;

    case COND_EXPR:
136 137 138 139 140
      op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
				    treat_class_rvalues_as_lvalues);
      op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 2),
				    treat_class_rvalues_as_lvalues);
      break;
Mike Stump committed
141 142

    case MODIFY_EXPR:
143
      return clk_ordinary;
Mike Stump committed
144 145

    case COMPOUND_EXPR:
146
      return lvalue_p_1 (TREE_OPERAND (ref, 1),
147
			 treat_class_rvalues_as_lvalues);
148 149

    case TARGET_EXPR:
150
      return treat_class_rvalues_as_lvalues ? clk_class : clk_none;
151 152

    case CALL_EXPR:
153
    case VA_ARG_EXPR:
154 155 156
      return ((treat_class_rvalues_as_lvalues
	       && IS_AGGR_TYPE (TREE_TYPE (ref)))
	      ? clk_class : clk_none);
157 158 159 160

    case FUNCTION_DECL:
      /* All functions (except non-static-member functions) are
	 lvalues.  */
161 162
      return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref) 
	      ? clk_none : clk_ordinary);
163 164 165

    default:
      break;
Mike Stump committed
166 167
    }

168 169 170 171 172 173 174 175 176 177 178 179 180
  /* If one operand is not an lvalue at all, then this expression is
     not an lvalue.  */
  if (!op1_lvalue_kind || !op2_lvalue_kind)
    return clk_none;

  /* Otherwise, it's an lvalue, and it has all the odd properties
     contributed by either operand.  */
  op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind;
  /* It's not an ordinary lvalue if it involves either a bit-field or
     a class rvalue.  */
  if ((op1_lvalue_kind & ~clk_ordinary) != clk_none)
    op1_lvalue_kind &= ~clk_ordinary;
  return op1_lvalue_kind;
Mike Stump committed
181 182
}

183 184 185 186
/* If REF is an lvalue, returns the kind of lvalue that REF is.
   Otherwise, returns clk_none.  Lvalues can be assigned, unless they
   have TREE_READONLY, or unless they are FUNCTION_DECLs.  Lvalues can
   have their address taken, unless they have DECL_REGISTER.  */
187

188
cp_lvalue_kind
189 190 191 192 193 194
real_lvalue_p (ref)
     tree ref;
{
  return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/0);
}

195 196
/* This differs from real_lvalue_p in that class rvalues are
   considered lvalues.  */
197

Mike Stump committed
198
int
Mike Stump committed
199 200 201
lvalue_p (ref)
     tree ref;
{
202 203
  return 
    (lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/1) != clk_none);
Mike Stump committed
204 205 206 207 208 209 210 211
}

/* Return nonzero if REF is an lvalue valid for this language;
   otherwise, print an error message and return zero.  */

int
lvalue_or_else (ref, string)
     tree ref;
212
     const char *string;
Mike Stump committed
213 214 215
{
  int win = lvalue_p (ref);
  if (! win)
216
    error ("non-lvalue in %s", string);
Mike Stump committed
217 218 219
  return win;
}

220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239
/* Build a TARGET_EXPR, initializing the DECL with the VALUE.  */

static tree
build_target_expr (decl, value)
     tree decl;
     tree value;
{
  tree t;

  t = build (TARGET_EXPR, TREE_TYPE (decl), decl, value, 
	     maybe_build_cleanup (decl), NULL_TREE);
  /* We always set TREE_SIDE_EFFECTS so that expand_expr does not
     ignore the TARGET_EXPR.  If there really turn out to be no
     side-effects, then the optimizer should be able to get rid of
     whatever code is generated anyhow.  */
  TREE_SIDE_EFFECTS (t) = 1;

  return t;
}

Mike Stump committed
240 241 242 243 244
/* INIT is a CALL_EXPR which needs info about its target.
   TYPE is the type that this initialization should appear to have.

   Build an encapsulation of the initialization to perform
   and return it so that it can be processed by language-independent
Mike Stump committed
245
   and language-specific expression expanders.  */
Mike Stump committed
246

Mike Stump committed
247
tree
Mike Stump committed
248
build_cplus_new (type, init)
Mike Stump committed
249 250 251
     tree type;
     tree init;
{
252
  tree fn;
Mike Stump committed
253 254 255
  tree slot;
  tree rval;

256 257
  /* Make sure that we're not trying to create an instance of an
     abstract class.  */
258
  abstract_virtuals_error (NULL_TREE, type);
259

260
  if (TREE_CODE (init) != CALL_EXPR && TREE_CODE (init) != AGGR_INIT_EXPR)
261
    return convert (type, init);
Mike Stump committed
262

Mike Stump committed
263
  slot = build (VAR_DECL, type);
264
  DECL_ARTIFICIAL (slot) = 1;
265
  DECL_CONTEXT (slot) = current_function_decl;
Mike Stump committed
266
  layout_decl (slot, 0);
267 268 269 270 271 272 273 274 275 276 277

  /* We split the CALL_EXPR into its function and its arguments here.
     Then, in expand_expr, we put them back together.  The reason for
     this is that this expression might be a default argument
     expression.  In that case, we need a new temporary every time the
     expression is used.  That's what break_out_target_exprs does; it
     replaces every AGGR_INIT_EXPR with a copy that uses a fresh
     temporary slot.  Then, expand_expr builds up a call-expression
     using the new slot.  */
  fn = TREE_OPERAND (init, 0);
  rval = build (AGGR_INIT_EXPR, type, fn, TREE_OPERAND (init, 1), slot);
Mike Stump committed
278
  TREE_SIDE_EFFECTS (rval) = 1;
279 280 281 282
  AGGR_INIT_VIA_CTOR_P (rval) 
    = (TREE_CODE (fn) == ADDR_EXPR
       && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
       && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)));
283
  rval = build_target_expr (slot, rval);
Mike Stump committed
284 285 286 287

  return rval;
}

288 289
/* Buidl a TARGET_EXPR using INIT to initialize a new temporary of the
   indicated TYPE.  */
290 291

tree
292
build_target_expr_with_type (init, type)
293
     tree init;
294
     tree type;
295 296 297 298
{
  tree slot;
  tree rval;

299
  slot = build (VAR_DECL, type);
300
  DECL_ARTIFICIAL (slot) = 1;
301
  DECL_CONTEXT (slot) = current_function_decl;
302
  layout_decl (slot, 0);
303
  rval = build_target_expr (slot, init);
304 305 306 307

  return rval;
}

308 309 310 311 312 313 314 315 316
/* Like build_target_expr_with_type, but use the type of INIT.  */

tree
get_target_expr (init)
     tree init;
{
  return build_target_expr_with_type (init, TREE_TYPE (init));
}

Mike Stump committed
317 318 319 320 321 322 323 324 325 326 327
/* Recursively search EXP for CALL_EXPRs that need cleanups and replace
   these CALL_EXPRs with tree nodes that will perform the cleanups.  */

tree
break_out_cleanups (exp)
     tree exp;
{
  tree tmp = exp;

  if (TREE_CODE (tmp) == CALL_EXPR
      && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
Mike Stump committed
328
    return build_cplus_new (TREE_TYPE (tmp), tmp);
Mike Stump committed
329 330 331 332 333 334 335 336 337 338

  while (TREE_CODE (tmp) == NOP_EXPR
	 || TREE_CODE (tmp) == CONVERT_EXPR
	 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
    {
      if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
	  && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
	{
	  TREE_OPERAND (tmp, 0)
	    = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
Mike Stump committed
339
			       TREE_OPERAND (tmp, 0));
Mike Stump committed
340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355
	  break;
	}
      else
	tmp = TREE_OPERAND (tmp, 0);
    }
  return exp;
}

/* Recursively perform a preorder search EXP for CALL_EXPRs, making
   copies where they are found.  Returns a deep copy all nodes transitively
   containing CALL_EXPRs.  */

tree
break_out_calls (exp)
     tree exp;
{
356
  register tree t1, t2 = NULL_TREE;
Mike Stump committed
357 358 359 360 361 362 363 364 365 366 367 368
  register enum tree_code code;
  register int changed = 0;
  register int i;

  if (exp == NULL_TREE)
    return exp;

  code = TREE_CODE (exp);

  if (code == CALL_EXPR)
    return copy_node (exp);

Mike Stump committed
369
  /* Don't try and defeat a save_expr, as it should only be done once.  */
Mike Stump committed
370 371 372 373 374 375 376 377 378 379 380 381 382 383
    if (code == SAVE_EXPR)
       return exp;

  switch (TREE_CODE_CLASS (code))
    {
    default:
      abort ();

    case 'c':  /* a constant */
    case 't':  /* a type node */
    case 'x':  /* something random, like an identifier or an ERROR_MARK.  */
      return exp;

    case 'd':  /* A decl node */
Mike Stump committed
384 385
#if 0                               /* This is bogus.  jason 9/21/94 */

Mike Stump committed
386 387 388 389 390 391
      t1 = break_out_calls (DECL_INITIAL (exp));
      if (t1 != DECL_INITIAL (exp))
	{
	  exp = copy_node (exp);
	  DECL_INITIAL (exp) = t1;
	}
Mike Stump committed
392
#endif
Mike Stump committed
393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438
      return exp;

    case 'b':  /* A block node */
      {
	/* Don't know how to handle these correctly yet.   Must do a
	   break_out_calls on all DECL_INITIAL values for local variables,
	   and also break_out_calls on all sub-blocks and sub-statements.  */
	abort ();
      }
      return exp;

    case 'e':  /* an expression */
    case 'r':  /* a reference */
    case 's':  /* an expression with side effects */
      for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
	{
	  t1 = break_out_calls (TREE_OPERAND (exp, i));
	  if (t1 != TREE_OPERAND (exp, i))
	    {
	      exp = copy_node (exp);
	      TREE_OPERAND (exp, i) = t1;
	    }
	}
      return exp;

    case '<':  /* a comparison expression */
    case '2':  /* a binary arithmetic expression */
      t2 = break_out_calls (TREE_OPERAND (exp, 1));
      if (t2 != TREE_OPERAND (exp, 1))
	changed = 1;
    case '1':  /* a unary arithmetic expression */
      t1 = break_out_calls (TREE_OPERAND (exp, 0));
      if (t1 != TREE_OPERAND (exp, 0))
	changed = 1;
      if (changed)
	{
	  if (tree_code_length[(int) code] == 1)
	    return build1 (code, TREE_TYPE (exp), t1);
	  else
	    return build (code, TREE_TYPE (exp), t1, t2);
	}
      return exp;
    }

}

439
extern struct obstack permanent_obstack;
Mike Stump committed
440 441 442 443 444 445 446 447

/* Here is how primitive or already-canonicalized types' hash
   codes are made.  MUST BE CONSISTENT WITH tree.c !!! */
#define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)

/* Construct, lay out and return the type of methods belonging to class
   BASETYPE and whose arguments are described by ARGTYPES and whose values
   are described by RETTYPE.  If each type exists already, reuse it.  */
Mike Stump committed
448

Mike Stump committed
449 450 451 452 453 454 455 456 457 458 459 460 461
tree
build_cplus_method_type (basetype, rettype, argtypes)
     tree basetype, rettype, argtypes;
{
  register tree t;
  tree ptype;
  int hashcode;

  /* Make a node of the sort we want.  */
  t = make_node (METHOD_TYPE);

  TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
  TREE_TYPE (t) = rettype;
462
  ptype = build_pointer_type (basetype);
Mike Stump committed
463

Mike Stump committed
464
  /* The actual arglist for this function includes a "hidden" argument
465 466 467
     which is "this".  Put it into the list of argument types.  Make
     sure that the new argument list is allocated on the same obstack
     as the type.  */
Mike Stump committed
468 469 470 471 472 473
  argtypes = tree_cons (NULL_TREE, ptype, argtypes);
  TYPE_ARG_TYPES (t) = argtypes;
  TREE_SIDE_EFFECTS (argtypes) = 1;  /* Mark first argtype as "artificial".  */

  /* If we already have such a type, use the old one and free this one.
     Note that it also frees up the above cons cell if found.  */
474 475 476
  hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) +
    type_hash_list (argtypes);

Mike Stump committed
477 478 479 480 481 482 483 484
  t = type_hash_canon (hashcode, t);

  if (TYPE_SIZE (t) == 0)
    layout_type (t);

  return t;
}

485
static tree
486
build_cplus_array_type_1 (elt_type, index_type)
Mike Stump committed
487 488 489 490 491
     tree elt_type;
     tree index_type;
{
  tree t;

492 493 494
  if (elt_type == error_mark_node || index_type == error_mark_node)
    return error_mark_node;

495 496 497
  if (processing_template_decl 
      || uses_template_parms (elt_type) 
      || uses_template_parms (index_type))
Mike Stump committed
498 499 500 501 502 503 504
    {
      t = make_node (ARRAY_TYPE);
      TREE_TYPE (t) = elt_type;
      TYPE_DOMAIN (t) = index_type;
    }
  else
    t = build_array_type (elt_type, index_type);
Mike Stump committed
505 506 507

  /* Push these needs up so that initialization takes place
     more easily.  */
508 509 510 511
  TYPE_NEEDS_CONSTRUCTING (t) 
    = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
  TYPE_NEEDS_DESTRUCTOR (t) 
    = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
Mike Stump committed
512 513
  return t;
}
514 515 516 517 518 519 520

tree
build_cplus_array_type (elt_type, index_type)
     tree elt_type;
     tree index_type;
{
  tree t;
521 522
  int type_quals = CP_TYPE_QUALS (elt_type);

523 524 525 526
  elt_type = TYPE_MAIN_VARIANT (elt_type);

  t = build_cplus_array_type_1 (elt_type, index_type);

527 528
  if (type_quals != TYPE_UNQUALIFIED)
    t = cp_build_qualified_type (t, type_quals);
529 530 531

  return t;
}
Mike Stump committed
532

533 534 535 536 537 538
/* Make a variant of TYPE, qualified with the TYPE_QUALS.  Handles
   arrays correctly.  In particular, if TYPE is an array of T's, and
   TYPE_QUALS is non-empty, returns an array of qualified T's.  If
   at attempt is made to qualify a type illegally, and COMPLAIN is
   non-zero, an error is issued.  If COMPLAIN is zero, error_mark_node
   is returned.  */
Mike Stump committed
539 540

tree
541
cp_build_qualified_type_real (type, type_quals, complain)
Mike Stump committed
542
     tree type;
543
     int type_quals;
544
     int complain;
Mike Stump committed
545
{
546 547
  tree result;

Mike Stump committed
548 549
  if (type == error_mark_node)
    return type;
550 551 552 553

  if (type_quals == TYPE_QUALS (type))
    return type;

554 555 556
  /* A restrict-qualified pointer type must be a pointer (or reference)
     to object or incomplete type.  */
  if ((type_quals & TYPE_QUAL_RESTRICT)
557
      && TREE_CODE (type) != TEMPLATE_TYPE_PARM
558 559 560 561
      && (!POINTER_TYPE_P (type)
	  || TYPE_PTRMEM_P (type)
	  || TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE))
    {
562 563 564 565 566
      if (complain)
	cp_error ("`%T' cannot be `restrict'-qualified", type);
      else
	return error_mark_node;

567 568 569
      type_quals &= ~TYPE_QUAL_RESTRICT;
    }

570 571 572
  if (type_quals != TYPE_UNQUALIFIED
      && TREE_CODE (type) == FUNCTION_TYPE)
    {
573 574 575 576
      if (complain)
	cp_error ("`%T' cannot be `const'-, `volatile'-, or `restrict'-qualified", type);
      else
	return error_mark_node;
577 578 579
      type_quals = TYPE_UNQUALIFIED;
    }
  else if (TREE_CODE (type) == ARRAY_TYPE)
Mike Stump committed
580
    {
581 582 583 584 585 586 587 588 589
      /* In C++, the qualification really applies to the array element
	 type.  Obtain the appropriately qualified element type.  */
      tree t;
      tree element_type 
	= cp_build_qualified_type_real (TREE_TYPE (type), 
					type_quals,
					complain);

      if (element_type == error_mark_node)
590
	return error_mark_node;
Mike Stump committed
591

592 593 594 595
      /* See if we already have an identically qualified type.  */
      for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
	if (CP_TYPE_QUALS (t) == type_quals)
	  break;
Mike Stump committed
596

597 598
      /* If we didn't already have it, create it now.  */
      if (!t)
Mike Stump committed
599
	{
600 601 602 603
	  /* Make a new array type, just like the old one, but with the
	     appropriately qualified element type.  */
	  t = build_type_copy (type);
	  TREE_TYPE (t) = element_type;
Mike Stump committed
604 605
	}

606 607 608 609 610 611 612 613 614 615 616 617
      /* Even if we already had this variant, we update
	 TYPE_NEEDS_CONSTRUCTING and TYPE_NEEDS_DESTRUCTOR in case
	 they changed since the variant was originally created.  
	 
	 This seems hokey; if there is some way to use a previous
	 variant *without* coming through here,
	 TYPE_NEEDS_CONSTRUCTING will never be updated.  */
      TYPE_NEEDS_CONSTRUCTING (t) 
	= TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type));
      TYPE_NEEDS_DESTRUCTOR (t) 
	= TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type));
      return t;
Mike Stump committed
618
    }
619 620 621 622 623 624 625 626 627 628
  else if (TYPE_PTRMEMFUNC_P (type))
    {
      /* For a pointer-to-member type, we can't just return a
	 cv-qualified version of the RECORD_TYPE.  If we do, we
	 haven't change the field that contains the actual pointer to
	 a method, and so TYPE_PTRMEMFUNC_FN_TYPE will be wrong.  */
      tree t;

      t = TYPE_PTRMEMFUNC_FN_TYPE (type);
      t = cp_build_qualified_type_real (t, type_quals, complain);
629
      return build_ptrmemfunc_type (t);
630 631 632 633 634 635 636 637 638 639 640 641 642 643 644
    }

  /* Retrieve (or create) the appropriately qualified variant.  */
  result = build_qualified_type (type, type_quals);

  /* If this was a pointer-to-method type, and we just made a copy,
     then we need to clear the cached associated
     pointer-to-member-function type; it is not valid for the new
     type.  */
  if (result != type 
      && TREE_CODE (type) == POINTER_TYPE
      && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)
    TYPE_SET_PTRMEMFUNC_TYPE (result, NULL_TREE);

  return result;
Mike Stump committed
645
}
646 647 648 649 650 651 652 653 654 655

/* Returns the canonical version of TYPE.  In other words, if TYPE is
   a typedef, returns the underlying type.  The cv-qualification of
   the type returned matches the type input; they will always be
   compatible types.  */

tree
canonical_type_variant (t)
     tree t;
{
656
  return cp_build_qualified_type (TYPE_MAIN_VARIANT (t), CP_TYPE_QUALS (t));
657
}
Mike Stump committed
658

Mike Stump committed
659 660 661 662 663 664
/* Add OFFSET to all base types of T.

   OFFSET, which is a type offset, is number of bytes.

   Note that we don't have to worry about having two paths to the
   same base type, since this type owns its association list.  */
Mike Stump committed
665

666
static void
Mike Stump committed
667 668 669 670 671 672 673
propagate_binfo_offsets (binfo, offset)
     tree binfo;
     tree offset;
{
  tree binfos = BINFO_BASETYPES (binfo);
  int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;

674
  if (flag_new_abi)
Mike Stump committed
675
    {
676
      for (i = 0; i < n_baselinks; ++i)
Mike Stump committed
677
	{
678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694
	  tree base_binfo;

	  /* Figure out which base we're looking at.  */
	  base_binfo = TREE_VEC_ELT (binfos, i);

	  /* Skip virtual bases.  Their BINFO_OFFSET doesn't matter
	     since they are always reached by using offsets looked up
	     at run-time.  */
	  if (TREE_VIA_VIRTUAL (base_binfo))
	    continue;

	  /* Whatever offset this class used to have in its immediate
	     derived class, it is now at OFFSET more bytes in its
	     final derived class, since the immediate derived class is
	     already at the indicated OFFSET.  */
	  BINFO_OFFSET (base_binfo)
	    = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
695

696
	  propagate_binfo_offsets (base_binfo, offset);
697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717
	}
    }
  else
    {
      /* This algorithm, used for the old ABI, is neither simple, nor
	 general.  For example, it mishandles the case of:
       
           struct A;
	   struct B : public A;
	   struct C : public B;
	   
	 if B is at offset zero in C, but A is not in offset zero in
	 B.  In that case, it sets the BINFO_OFFSET for A to zero.
	 (This sitution arises in the new ABI if B has virtual
	 functions, but A does not.)  Rather than change this
	 algorithm, and risking breaking the old ABI, it is preserved
	 here.  */
      for (i = 0; i < n_baselinks; /* note increment is done in the
				      loop.  */)
	{
	  tree base_binfo = TREE_VEC_ELT (binfos, i);
Mike Stump committed
718

719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748
	  if (TREE_VIA_VIRTUAL (base_binfo))
	    i += 1;
	  else
	    {
	      int j;
	      tree delta = NULL_TREE;

	      for (j = i+1; j < n_baselinks; j++)
		if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
		  {
		    /* The next basetype offset must take into account
		       the space between the classes, not just the
		       size of each class.  */
		    delta = size_binop (MINUS_EXPR,
					BINFO_OFFSET (TREE_VEC_ELT (binfos, 
								    j)),
					BINFO_OFFSET (base_binfo));
		    break;
		  }

	      BINFO_OFFSET (base_binfo) = offset;

	      propagate_binfo_offsets (base_binfo, offset);

	      /* Go to our next class that counts for offset
                 propagation.  */
	      i = j;
	      if (i < n_baselinks)
		offset = size_binop (PLUS_EXPR, offset, delta);
	    }
Mike Stump committed
749 750 751 752
	}
    }
}

753
/* Makes new binfos for the indirect bases under BINFO, and updates
754 755
   BINFO_OFFSET for them and their bases.  */

756 757 758
void
unshare_base_binfos (binfo)
     tree binfo;
759
{
760 761 762
  tree binfos = BINFO_BASETYPES (binfo);
  tree new_binfo;
  int j;
763

764 765
  if (binfos == NULL_TREE)
    return;
766

767 768 769 770 771 772 773 774 775 776 777 778 779 780 781
  /* Now unshare the structure beneath BINFO.  */
  for (j = TREE_VEC_LENGTH (binfos)-1;
       j >= 0; j--)
    {
      tree base_binfo = TREE_VEC_ELT (binfos, j);
      new_binfo = TREE_VEC_ELT (binfos, j)
	= make_binfo (BINFO_OFFSET (base_binfo),
		      base_binfo,
		      BINFO_VTABLE (base_binfo),
		      BINFO_VIRTUALS (base_binfo));
      TREE_VIA_PUBLIC (new_binfo) = TREE_VIA_PUBLIC (base_binfo);
      TREE_VIA_PROTECTED (new_binfo) = TREE_VIA_PROTECTED (base_binfo);
      TREE_VIA_VIRTUAL (new_binfo) = TREE_VIA_VIRTUAL (base_binfo);
      BINFO_INHERITANCE_CHAIN (new_binfo) = binfo;
      unshare_base_binfos (new_binfo);
782 783 784 785 786 787 788
    }
}

/* Finish the work of layout_record, now taking virtual bases into account.
   Also compute the actual offsets that our base classes will have.
   This must be performed after the fields are laid out, since virtual
   baseclasses must lay down at the end of the record.
Mike Stump committed
789

790
   Returns the maximum number of virtual functions any of the
Mike Stump committed
791
   baseclasses provide.  */
Mike Stump committed
792

Mike Stump committed
793
int
794
layout_basetypes (rec, max)
Mike Stump committed
795 796 797
     tree rec;
     int max;
{
798
  tree binfos = TYPE_BINFO_BASETYPES (rec);
799
  int i, n_baseclasses = CLASSTYPE_N_BASECLASSES (rec);
800
  tree vbase_types;
801
  tree *field;
Mike Stump committed
802

803 804
  unsigned int record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
  unsigned int desired_align;
Mike Stump committed
805

806
  /* Record size so far is CONST_SIZE bits, where CONST_SIZE is an integer.  */
807 808
  register unsigned int const_size = 0;
  unsigned int nonvirtual_const_size;
Mike Stump committed
809

810 811 812 813 814 815
#ifdef STRUCTURE_SIZE_BOUNDARY
  /* Packed structures don't need to have minimum size.  */
  if (! TYPE_PACKED (rec))
    record_align = MAX (record_align, STRUCTURE_SIZE_BOUNDARY);
#endif

816 817 818 819
  /* Get all the virtual base types that this type uses.  The
     TREE_VALUE slot holds the virtual baseclass type.  Note that
     get_vbase_types makes copies of the virtual base BINFOs, so that
     the vbase_types are unshared.  */
820
  vbase_types = CLASSTYPE_VBASECLASSES (rec);
Mike Stump committed
821

822 823
  my_friendly_assert (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST, 19970302);
  const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
Mike Stump committed
824 825 826 827 828 829 830 831

  nonvirtual_const_size = const_size;

  while (vbase_types)
    {
      tree basetype = BINFO_TYPE (vbase_types);
      tree offset;

Mike Stump committed
832 833 834
      desired_align = TYPE_ALIGN (basetype);
      record_align = MAX (record_align, desired_align);

Mike Stump committed
835 836 837
      if (const_size == 0)
	offset = integer_zero_node;
      else
Mike Stump committed
838 839
	{
	  /* Give each virtual base type the alignment it wants.  */
840
	  const_size = CEIL (const_size, desired_align) * desired_align;
Mike Stump committed
841 842
	  offset = size_int (CEIL (const_size, BITS_PER_UNIT));
	}
Mike Stump committed
843 844 845 846 847

      if (CLASSTYPE_VSIZE (basetype) > max)
	max = CLASSTYPE_VSIZE (basetype);
      BINFO_OFFSET (vbase_types) = offset;

848 849 850 851
      /* Every virtual baseclass takes a least a UNIT, so that we can
	 take it's address and get something different for each base.  */
      const_size += MAX (BITS_PER_UNIT,
			 TREE_INT_CST_LOW (CLASSTYPE_SIZE (basetype)));
Mike Stump committed
852 853 854 855

      vbase_types = TREE_CHAIN (vbase_types);
    }

Mike Stump committed
856 857 858
  if (const_size)
    {
      /* Because a virtual base might take a single byte above,
Jeff Law committed
859
	 we have to re-adjust the total size to make sure it is
Mike Stump committed
860 861 862 863 864
	 a multiple of the alignment.  */
      /* Give the whole object the alignment it wants.  */
      const_size = CEIL (const_size, record_align) * record_align;
    }

Mike Stump committed
865 866 867
  /* Set the alignment in the complete type.  We don't set CLASSTYPE_ALIGN
   here, as that is for this class, without any virtual base classes.  */
  TYPE_ALIGN (rec) = record_align;
Mike Stump committed
868
  if (const_size != nonvirtual_const_size)
869 870 871 872 873
    {
      TYPE_SIZE (rec) = size_int (const_size);
      TYPE_SIZE_UNIT (rec) = size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (rec),
                                         size_int (BITS_PER_UNIT));
    }
Mike Stump committed
874

875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890
  /* Now propagate offset information throughout the lattice.
     Simultaneously, remove the temporary FIELD_DECLS we created in
     build_base_fields to refer to base types.  */
  field = &TYPE_FIELDS (rec);
  if (TYPE_VFIELD (rec) == *field)
    {
      /* If this class did not have a primary base, we create a
	 virtual function table pointer.  It will be the first thing
	 in the class, under the new ABI.  Skip it; the base fields
	 will follow it.  */
      my_friendly_assert (flag_new_abi 
			  && !CLASSTYPE_HAS_PRIMARY_BASE_P (rec),
			  19991218);
      field = &TREE_CHAIN (*field);
    }
    
891
  for (i = 0; i < n_baseclasses; i++)
Mike Stump committed
892
    {
893 894
      register tree base_binfo = TREE_VEC_ELT (binfos, i);
      register tree basetype = BINFO_TYPE (base_binfo);
Mike Stump committed
895

896
      if (TREE_VIA_VIRTUAL (base_binfo))
897
	continue;
898

899
      my_friendly_assert (TREE_TYPE (*field) == basetype, 23897);
900

901
      if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
902
	cp_warning ("direct base `%T' inaccessible in `%T' due to ambiguity",
903 904 905
		    basetype, rec);

      BINFO_OFFSET (base_binfo)
906
	= size_int (CEIL (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (*field)),
907 908
			  BITS_PER_UNIT));
      propagate_binfo_offsets (base_binfo, BINFO_OFFSET (base_binfo));
909 910 911

      /* Remove this field.  */
      *field = TREE_CHAIN (*field);
912
    }
Mike Stump committed
913

914 915 916 917 918
  for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
       vbase_types = TREE_CHAIN (vbase_types))
    {
      BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
      unshare_base_binfos (vbase_types);
919
      propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
920 921 922 923 924

      if (extra_warnings)
	{
	  tree basetype = BINFO_TYPE (vbase_types);
	  if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
925
	    cp_warning ("virtual base `%T' inaccessible in `%T' due to ambiguity",
926 927
			basetype, rec);
	}
Mike Stump committed
928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953
    }

  return max;
}


/* Hashing of lists so that we don't make duplicates.
   The entry point is `list_hash_canon'.  */

/* Each hash table slot is a bucket containing a chain
   of these structures.  */

struct list_hash
{
  struct list_hash *next;	/* Next structure in the bucket.  */
  int hashcode;			/* Hash code of this list.  */
  tree list;			/* The list recorded here.  */
};

/* Now here is the hash table.  When recording a list, it is added
   to the slot whose index is the hash code mod the table size.
   Note that the hash table is used for several kinds of lists.
   While all these live in the same table, they are completely independent,
   and the hash code is computed differently for each of these.  */

#define TYPE_HASH_SIZE 59
954
static struct list_hash *list_hash_table[TYPE_HASH_SIZE];
Mike Stump committed
955 956 957 958 959

/* Compute a hash code for a list (chain of TREE_LIST nodes
   with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
   TREE_COMMON slots), by adding the hash codes of the individual entries.  */

960 961 962
static int
list_hash (purpose, value, chain)
     tree purpose, value, chain;
Mike Stump committed
963 964 965
{
  register int hashcode = 0;

966 967
  if (chain)
    hashcode += TYPE_HASH (chain);
Mike Stump committed
968

969 970
  if (value)
    hashcode += TYPE_HASH (value);
Mike Stump committed
971 972
  else
    hashcode += 1007;
973 974
  if (purpose)
    hashcode += TYPE_HASH (purpose);
Mike Stump committed
975 976 977 978 979 980 981 982
  else
    hashcode += 1009;
  return hashcode;
}

/* Look in the type hash table for a type isomorphic to TYPE.
   If one is found, return it.  Otherwise return 0.  */

983
static tree
984 985
list_hash_lookup (hashcode, purpose, value, chain)
     int hashcode;
986
     tree purpose, value, chain;
Mike Stump committed
987 988
{
  register struct list_hash *h;
989

Mike Stump committed
990 991
  for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
    if (h->hashcode == hashcode
992 993 994 995
	&& TREE_PURPOSE (h->list) == purpose
	&& TREE_VALUE (h->list) == value
	&& TREE_CHAIN (h->list) == chain)
      return h->list;
Mike Stump committed
996 997 998 999 1000 1001
  return 0;
}

/* Add an entry to the list-hash-table
   for a list TYPE whose hash code is HASHCODE.  */

1002
static void
Mike Stump committed
1003 1004 1005 1006 1007 1008
list_hash_add (hashcode, list)
     int hashcode;
     tree list;
{
  register struct list_hash *h;

1009
  h = (struct list_hash *) obstack_alloc (&permanent_obstack, sizeof (struct list_hash));
Mike Stump committed
1010 1011 1012 1013 1014 1015
  h->hashcode = hashcode;
  h->list = list;
  h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
  list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
}

1016 1017 1018
/* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical
   object for an identical list if one already exists.  Otherwise, build a
   new one, and record it as the canonical object.  */
Mike Stump committed
1019 1020

/* Set to 1 to debug without canonicalization.  Never set by program.  */
Mike Stump committed
1021

Mike Stump committed
1022
static int debug_no_list_hash = 0;
Mike Stump committed
1023 1024

tree
1025
hash_tree_cons (purpose, value, chain)
Mike Stump committed
1026 1027 1028
     tree purpose, value, chain;
{
  tree t;
1029
  int hashcode = 0;
Mike Stump committed
1030

1031 1032 1033
  if (! debug_no_list_hash)
    {
      hashcode = list_hash (purpose, value, chain);
1034
      t = list_hash_lookup (hashcode, purpose, value, chain);
1035 1036 1037 1038
      if (t)
	return t;
    }

Mike Stump committed
1039
  t = tree_cons (purpose, value, chain);
1040 1041 1042 1043 1044

  /* If this is a new list, record it for later reuse.  */
  if (! debug_no_list_hash)
    list_hash_add (hashcode, t);

Mike Stump committed
1045 1046 1047 1048
  return t;
}

/* Constructor for hashed lists.  */
Mike Stump committed
1049

Mike Stump committed
1050 1051 1052 1053
tree
hash_tree_chain (value, chain)
     tree value, chain;
{
1054
  return hash_tree_cons (NULL_TREE, value, chain);
Mike Stump committed
1055 1056 1057
}

/* Similar, but used for concatenating two lists.  */
Mike Stump committed
1058

Mike Stump committed
1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077
tree
hash_chainon (list1, list2)
     tree list1, list2;
{
  if (list2 == 0)
    return list1;
  if (list1 == 0)
    return list2;
  if (TREE_CHAIN (list1) == NULL_TREE)
    return hash_tree_chain (TREE_VALUE (list1), list2);
  return hash_tree_chain (TREE_VALUE (list1),
			  hash_chainon (TREE_CHAIN (list1), list2));
}

/* Build an association between TYPE and some parameters:

   OFFSET is the offset added to `this' to convert it to a pointer
   of type `TYPE *'

Mike Stump committed
1078 1079 1080
   BINFO is the base binfo to use, if we are deriving from one.  This
   is necessary, as we want specialized parent binfos from base
   classes, so that the VTABLE_NAMEs of bases are for the most derived
Jeff Law committed
1081
   type, instead of the simple type.
Mike Stump committed
1082

Mike Stump committed
1083 1084 1085
   VTABLE is the virtual function table with which to initialize
   sub-objects of type TYPE.

1086
   VIRTUALS are the virtual functions sitting in VTABLE.  */
Mike Stump committed
1087 1088

tree
1089
make_binfo (offset, binfo, vtable, virtuals)
Mike Stump committed
1090
     tree offset, binfo;
Mike Stump committed
1091 1092
     tree vtable, virtuals;
{
1093
  tree new_binfo = make_tree_vec (7);
Mike Stump committed
1094
  tree type;
Mike Stump committed
1095

Mike Stump committed
1096 1097 1098 1099 1100
  if (TREE_CODE (binfo) == TREE_VEC)
    type = BINFO_TYPE (binfo);
  else
    {
      type = binfo;
1101
      binfo = CLASS_TYPE_P (type) ? TYPE_BINFO (binfo) : NULL_TREE;
Mike Stump committed
1102
    }
Mike Stump committed
1103

Mike Stump committed
1104 1105 1106 1107 1108
  TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
  BINFO_OFFSET (new_binfo) = offset;
  BINFO_VTABLE (new_binfo) = vtable;
  BINFO_VIRTUALS (new_binfo) = virtuals;
  BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
Mike Stump committed
1109

Mike Stump committed
1110 1111 1112
  if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
    BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));      
  return new_binfo;
Mike Stump committed
1113 1114 1115 1116 1117 1118 1119 1120 1121 1122
}

/* Return the binfo value for ELEM in TYPE.  */

tree
binfo_value (elem, type)
     tree elem;
     tree type;
{
  if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1123
    compiler_error ("base class `%s' ambiguous in binfo_value",
Mike Stump committed
1124 1125 1126 1127 1128 1129 1130 1131
		    TYPE_NAME_STRING (elem));
  if (elem == type)
    return TYPE_BINFO (type);
  if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
    return type;
  return get_binfo (elem, type, 0);
}

1132
/* Return a reversed copy of the BINFO-chain given by PATH.  (If the 
1133 1134
   BINFO_INHERITANCE_CHAIN points from base classes to derived
   classes, it will instead point from derived classes to base
1135
   classes.)  Returns the first node in the reversed chain.  */
1136

Mike Stump committed
1137
tree
1138
reverse_path (path)
Mike Stump committed
1139 1140
     tree path;
{
1141 1142
  register tree prev = NULL_TREE, cur;
  for (cur = path; cur; cur = BINFO_INHERITANCE_CHAIN (cur))
Mike Stump committed
1143
    {
1144 1145 1146
      tree r = copy_node (cur);
      BINFO_INHERITANCE_CHAIN (r) = prev;
      prev = r;
Mike Stump committed
1147 1148 1149 1150 1151 1152 1153 1154
    }
  return prev;
}

void
debug_binfo (elem)
     tree elem;
{
Mike Stump committed
1155
  unsigned HOST_WIDE_INT n;
Mike Stump committed
1156 1157
  tree virtuals;

1158
  fprintf (stderr, "type \"%s\"; offset = %ld\n",
Mike Stump committed
1159
	   TYPE_NAME_STRING (BINFO_TYPE (elem)),
1160
	   (long) TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
Mike Stump committed
1161 1162 1163 1164 1165 1166 1167
  fprintf (stderr, "vtable type:\n");
  debug_tree (BINFO_TYPE (elem));
  if (BINFO_VTABLE (elem))
    fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
  else
    fprintf (stderr, "no vtable decl yet\n");
  fprintf (stderr, "virtuals:\n");
1168
  virtuals = skip_rtti_stuff (elem, BINFO_TYPE (elem), &n);
Mike Stump committed
1169

Mike Stump committed
1170 1171
  while (virtuals)
    {
1172
      tree fndecl = TREE_VALUE (virtuals);
1173
      fprintf (stderr, "%s [%ld =? %ld]\n",
Mike Stump committed
1174
	       IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1175
	       (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
Mike Stump committed
1176
      ++n;
Mike Stump committed
1177 1178 1179 1180 1181 1182 1183 1184
      virtuals = TREE_CHAIN (virtuals);
    }
}

int
count_functions (t)
     tree t;
{
1185
  int i;
Mike Stump committed
1186 1187
  if (TREE_CODE (t) == FUNCTION_DECL)
    return 1;
1188 1189 1190 1191 1192 1193
  else if (TREE_CODE (t) == OVERLOAD)
    {
      for (i=0; t; t = OVL_CHAIN (t))
	i++;
      return i;
    }
Mike Stump committed
1194

Mike Stump committed
1195
  my_friendly_abort (359);
1196
  return 0;
Mike Stump committed
1197 1198 1199 1200 1201 1202
}

int
is_overloaded_fn (x)
     tree x;
{
1203
  /* A baselink is also considered an overloaded function.  */
1204 1205
  if (TREE_CODE (x) == OFFSET_REF)
    x = TREE_OPERAND (x, 1);
1206 1207
  if (BASELINK_P (x))
    x = TREE_VALUE (x);
1208 1209 1210
  return (TREE_CODE (x) == FUNCTION_DECL
	  || TREE_CODE (x) == TEMPLATE_ID_EXPR
	  || DECL_FUNCTION_TEMPLATE_P (x)
1211
	  || TREE_CODE (x) == OVERLOAD);
Mike Stump committed
1212 1213
}

Mike Stump committed
1214 1215 1216 1217
int
really_overloaded_fn (x)
     tree x;
{     
1218
  /* A baselink is also considered an overloaded function.  */
1219 1220
  if (TREE_CODE (x) == OFFSET_REF)
    x = TREE_OPERAND (x, 1);
1221
  if (BASELINK_P (x))
1222 1223 1224 1225
    x = TREE_VALUE (x);
  return (TREE_CODE (x) == OVERLOAD 
	  && (TREE_CHAIN (x) != NULL_TREE
	      || DECL_FUNCTION_TEMPLATE_P (OVL_FUNCTION (x))));
Mike Stump committed
1226 1227
}

Mike Stump committed
1228 1229 1230 1231
tree
get_first_fn (from)
     tree from;
{
1232
  my_friendly_assert (is_overloaded_fn (from), 9);
1233
  /* A baselink is also considered an overloaded function. */
1234
  if (BASELINK_P (from))
1235 1236 1237
    from = TREE_VALUE (from);
  return OVL_CURRENT (from);
}
Mike Stump committed
1238

1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249
/* Returns nonzero if T is a ->* or .* expression that refers to a
   member function.  */

int
bound_pmf_p (t)
     tree t;
{
  return (TREE_CODE (t) == OFFSET_REF
	  && TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (t, 1))));
}

1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272
/* Return a new OVL node, concatenating it with the old one. */

tree
ovl_cons (decl, chain)
     tree decl;
     tree chain;
{
  tree result = make_node (OVERLOAD);
  TREE_TYPE (result) = unknown_type_node;
  OVL_FUNCTION (result) = decl;
  TREE_CHAIN (result) = chain;
  
  return result;
}

/* Build a new overloaded function. If this is the first one,
   just return it; otherwise, ovl_cons the _DECLs */

tree
build_overload (decl, chain)
     tree decl;
     tree chain;
{
1273
  if (! chain && TREE_CODE (decl) != TEMPLATE_DECL)
1274
    return decl;
1275
  if (chain && TREE_CODE (chain) != OVERLOAD)
1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286
    chain = ovl_cons (chain, NULL_TREE);
  return ovl_cons (decl, chain);
}

/* True if fn is in ovl. */

int
ovl_member (fn, ovl)
     tree fn;
     tree ovl;
{
1287
  if (ovl == NULL_TREE)
1288
    return 0;
1289
  if (TREE_CODE (ovl) != OVERLOAD)
Jason Merrill committed
1290
    return ovl == fn;
1291
  for (; ovl; ovl = OVL_CHAIN (ovl))
Jason Merrill committed
1292
    if (OVL_FUNCTION (ovl) == fn)
1293 1294
      return 1;
  return 0;
Mike Stump committed
1295 1296 1297 1298 1299 1300 1301 1302 1303 1304
}

int
is_aggr_type_2 (t1, t2)
     tree t1, t2;
{
  if (TREE_CODE (t1) != TREE_CODE (t2))
    return 0;
  return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
}
1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343

/* Returns non-zero if CODE is the code for a statement.  */

static int
statement_code_p (code)
     enum tree_code code;
{
  switch (code)
    {
    case EXPR_STMT:
    case COMPOUND_STMT:
    case DECL_STMT:
    case IF_STMT:
    case FOR_STMT:
    case WHILE_STMT:
    case DO_STMT:
    case RETURN_STMT:
    case BREAK_STMT:
    case CONTINUE_STMT:
    case SWITCH_STMT:
    case GOTO_STMT:
    case LABEL_STMT:
    case ASM_STMT:
    case SUBOBJECT:
    case CLEANUP_STMT:
    case START_CATCH_STMT:
    case CTOR_STMT:
    case SCOPE_STMT:
    case CTOR_INITIALIZER:
    case CASE_LABEL:
    case RETURN_INIT:
    case TRY_BLOCK:
    case HANDLER:
      return 1;

    default:
      return 0;
    }
}
Mike Stump committed
1344 1345 1346

#define PRINT_RING_SIZE 4

1347
const char *
1348
lang_printable_name (decl, v)
Mike Stump committed
1349
     tree decl;
1350
     int v;
Mike Stump committed
1351 1352 1353 1354 1355 1356 1357
{
  static tree decl_ring[PRINT_RING_SIZE];
  static char *print_ring[PRINT_RING_SIZE];
  static int ring_counter;
  int i;

  /* Only cache functions.  */
1358 1359
  if (v < 2
      || TREE_CODE (decl) != FUNCTION_DECL
Mike Stump committed
1360
      || DECL_LANG_SPECIFIC (decl) == 0)
1361
    return lang_decl_name (decl, v);
Mike Stump committed
1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384

  /* See if this print name is lying around.  */
  for (i = 0; i < PRINT_RING_SIZE; i++)
    if (decl_ring[i] == decl)
      /* yes, so return it.  */
      return print_ring[i];

  if (++ring_counter == PRINT_RING_SIZE)
    ring_counter = 0;

  if (current_function_decl != NULL_TREE)
    {
      if (decl_ring[ring_counter] == current_function_decl)
	ring_counter += 1;
      if (ring_counter == PRINT_RING_SIZE)
	ring_counter = 0;
      if (decl_ring[ring_counter] == current_function_decl)
	my_friendly_abort (106);
    }

  if (print_ring[ring_counter])
    free (print_ring[ring_counter]);

1385 1386
  print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v));
  decl_ring[ring_counter] = decl;
Mike Stump committed
1387 1388 1389
  return print_ring[ring_counter];
}

Mike Stump committed
1390
/* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
Mike Stump committed
1391
   listed in RAISES.  */
Mike Stump committed
1392

Mike Stump committed
1393
tree
Mike Stump committed
1394 1395
build_exception_variant (type, raises)
     tree type;
Mike Stump committed
1396 1397 1398
     tree raises;
{
  tree v = TYPE_MAIN_VARIANT (type);
1399
  int type_quals = TYPE_QUALS (type);
Mike Stump committed
1400

Mike Stump committed
1401
  for (; v; v = TYPE_NEXT_VARIANT (v))
1402 1403 1404
    if (TYPE_QUALS (v) == type_quals
        && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), 1))
      return v;
Mike Stump committed
1405 1406

  /* Need to build a new variant.  */
Mike Stump committed
1407
  v = build_type_copy (type);
Mike Stump committed
1408 1409 1410 1411
  TYPE_RAISES_EXCEPTIONS (v) = raises;
  return v;
}

1412 1413 1414 1415 1416 1417 1418 1419
/* Given a TEMPLATE_TEMPLATE_PARM node T, create a new one together with its 
   lang_specific field and its corresponding TEMPLATE_DECL node */

tree
copy_template_template_parm (t)
     tree t;
{
  tree template = TYPE_NAME (t);
1420 1421
  tree t2;

1422
  t2 = make_aggr_type (TEMPLATE_TEMPLATE_PARM);
1423 1424
  template = copy_node (template);
  copy_lang_decl (template);
1425

1426 1427 1428 1429 1430 1431
  TREE_TYPE (template) = t2;
  TYPE_NAME (t2) = template;
  TYPE_STUB_DECL (t2) = template;

  /* No need to copy these */
  TYPE_FIELDS (t2) = TYPE_FIELDS (t);
1432 1433
  TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2) 
    = TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t);
1434 1435 1436
  return t2;
}

1437 1438 1439 1440
/* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
   FUNC is called with the DATA and the address of each sub-tree.  If
   FUNC returns a non-NULL value, the traversal is aborted, and the
   value returned by FUNC is returned.  */
1441

1442 1443
tree 
walk_tree (tp, func, data)
1444
     tree *tp;
1445 1446
     walk_tree_fn func;
     void *data;
1447
{
1448 1449 1450
  enum tree_code code;
  int walk_subtrees;
  tree result;
1451
  
1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462
#define WALK_SUBTREE(NODE)			\
  do						\
    {						\
      result = walk_tree (&(NODE), func, data);	\
      if (result)				\
	return result;				\
    }						\
  while (0)

  /* Skip empty subtrees.  */
  if (!*tp)
1463
    return NULL_TREE;
1464

1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479
  /* Call the function.  */
  walk_subtrees = 1;
  result = (*func) (tp, &walk_subtrees, data);

  /* If we found something, return it.  */
  if (result)
    return result;

  /* Even if we didn't, FUNC may have decided that there was nothing
     interesting below this point in the tree.  */
  if (!walk_subtrees)
    return NULL_TREE;

  code = TREE_CODE (*tp);

1480
  /* Handle common cases up front.  */
1481
  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1482 1483
      || TREE_CODE_CLASS (code) == 'r'
      || TREE_CODE_CLASS (code) == 's')
1484
    {
1485
      int i, len;
1486 1487

      /* Walk over all the sub-trees of this operand.  */
1488
      len = first_rtl_op (code);
1489 1490 1491 1492
      /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
	 But, we only want to walk once.  */
      if (code == TARGET_EXPR
	  && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
1493 1494 1495 1496 1497
	--len;
      /* Go through the subtrees.  We need to do this in forward order so
         that the scope of a FOR_EXPR is handled properly.  */
      for (i = 0; i < len; ++i)
	WALK_SUBTREE (TREE_OPERAND (*tp, i));
1498

1499 1500 1501
      /* For statements, we also walk the chain so that we cover the
	 entire statement tree.  */
      if (statement_code_p (code))
1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517
	{
	  if (code == DECL_STMT 
	      && DECL_STMT_DECL (*tp) 
	      && TREE_CODE_CLASS (TREE_CODE (DECL_STMT_DECL (*tp))) == 'd')
	    {
	      /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
		 into declarations that are just mentioned, rather than
		 declared; they don't really belong to this part of the tree.
		 And, we can see cycles: the initializer for a declaration can
		 refer to the declaration itself.  */
	      WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
	      WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
	    }

	  WALK_SUBTREE (TREE_CHAIN (*tp));
	}
1518

1519
      /* We didn't find what we were looking for.  */
1520 1521
      return NULL_TREE;
    }
1522
  else if (TREE_CODE_CLASS (code) == 'd')
1523
    {
1524 1525 1526
      WALK_SUBTREE (TREE_TYPE (*tp));

      /* We didn't find what we were looking for.  */
1527 1528 1529
      return NULL_TREE;
    }

1530 1531
  /* Not one of the easy cases.  We must explicitly go through the
     children.  */
1532
  switch (code)
1533 1534 1535
    {
    case ERROR_MARK:
    case IDENTIFIER_NODE:
1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553
    case INTEGER_CST:
    case REAL_CST:
    case STRING_CST:
    case DEFAULT_ARG:
    case TEMPLATE_TEMPLATE_PARM:
    case TEMPLATE_PARM_INDEX:
    case TEMPLATE_TYPE_PARM:
    case REAL_TYPE:
    case COMPLEX_TYPE:
    case VOID_TYPE:
    case BOOLEAN_TYPE:
    case TYPENAME_TYPE:
    case UNION_TYPE:
    case ENUMERAL_TYPE:
    case TYPEOF_TYPE:
    case BLOCK:
      /* None of thse have subtrees other than those already walked
         above.  */
1554 1555
      break;

1556 1557
    case PTRMEM_CST:
      WALK_SUBTREE (TREE_TYPE (*tp));
1558 1559
      break;

1560 1561 1562
    case POINTER_TYPE:
    case REFERENCE_TYPE:
      WALK_SUBTREE (TREE_TYPE (*tp));
1563 1564 1565
      break;

    case TREE_LIST:
1566 1567 1568
      WALK_SUBTREE (TREE_PURPOSE (*tp));
      WALK_SUBTREE (TREE_VALUE (*tp));
      WALK_SUBTREE (TREE_CHAIN (*tp));
1569 1570 1571
      break;

    case OVERLOAD:
1572 1573
      WALK_SUBTREE (OVL_FUNCTION (*tp));
      WALK_SUBTREE (OVL_CHAIN (*tp));
1574 1575 1576 1577
      break;

    case TREE_VEC:
      {
1578
	int len = TREE_VEC_LENGTH (*tp);
1579
	while (len--)
1580
	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1581 1582 1583 1584
      }
      break;

    case COMPLEX_CST:
1585 1586
      WALK_SUBTREE (TREE_REALPART (*tp));
      WALK_SUBTREE (TREE_IMAGPART (*tp));
1587 1588 1589
      break;

    case CONSTRUCTOR:
1590
      WALK_SUBTREE (CONSTRUCTOR_ELTS (*tp));
1591 1592
      break;

1593 1594 1595
    case METHOD_TYPE:
      WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
      /* Fall through.  */
1596 1597

    case FUNCTION_TYPE:
1598 1599
      WALK_SUBTREE (TREE_TYPE (*tp));
      WALK_SUBTREE (TYPE_ARG_TYPES (*tp));
1600 1601 1602
      break;

    case ARRAY_TYPE:
1603 1604
      WALK_SUBTREE (TREE_TYPE (*tp));
      WALK_SUBTREE (TYPE_DOMAIN (*tp));
1605 1606 1607
      break;

    case INTEGER_TYPE:
1608 1609
      WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
      WALK_SUBTREE (TYPE_MAX_VALUE (*tp));
1610 1611 1612
      break;

    case OFFSET_TYPE:
1613 1614
      WALK_SUBTREE (TREE_TYPE (*tp));
      WALK_SUBTREE (TYPE_OFFSET_BASETYPE (*tp));
1615 1616 1617
      break;

    case RECORD_TYPE:
1618 1619
      if (TYPE_PTRMEMFUNC_P (*tp))
	WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE (*tp));
1620
      break;
1621

1622
    default:
1623
      my_friendly_abort (19990803);
1624 1625
    }

1626
  /* We didn't find what we were looking for.  */
1627 1628
  return NULL_TREE;

1629
#undef WALK_SUBTREE
1630 1631
}

1632
/* Passed to walk_tree.  Checks for the use of types with no linkage.  */
1633 1634

static tree
1635
no_linkage_helper (tp, walk_subtrees, data)
1636
     tree *tp;
1637 1638
     int *walk_subtrees ATTRIBUTE_UNUSED;
     void *data ATTRIBUTE_UNUSED;
1639
{
1640 1641
  tree t = *tp;

1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656
  if (TYPE_P (t)
      && (IS_AGGR_TYPE (t) || TREE_CODE (t) == ENUMERAL_TYPE)
      && (decl_function_context (TYPE_MAIN_DECL (t))
	  || ANON_AGGRNAME_P (TYPE_IDENTIFIER (t))))
    return t;
  return NULL_TREE;
}

/* Check if the type T depends on a type with no linkage and if so, return
   it.  */

tree
no_linkage_check (t)
     tree t;
{
1657 1658 1659 1660 1661
  /* There's no point in checking linkage on template functions; we
     can't know their complete types.  */
  if (processing_template_decl)
    return NULL_TREE;

1662
  t = walk_tree (&t, no_linkage_helper, NULL);
1663 1664 1665 1666 1667
  if (t != error_mark_node)
    return t;
  return NULL_TREE;
}

1668
/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
1669

1670
tree
1671 1672
copy_tree_r (tp, walk_subtrees, data)
     tree *tp;
1673
     int *walk_subtrees;
1674
     void *data ATTRIBUTE_UNUSED;
Mike Stump committed
1675
{
1676 1677 1678 1679 1680 1681
  enum tree_code code = TREE_CODE (*tp);

  /* We make copies of most nodes.  */
  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
      || TREE_CODE_CLASS (code) == 'r'
      || TREE_CODE_CLASS (code) == 'c'
1682
      || TREE_CODE_CLASS (code) == 's'
1683 1684 1685 1686
      || code == PARM_DECL
      || code == TREE_LIST
      || code == TREE_VEC
      || code == OVERLOAD)
1687
    {
1688 1689 1690
      /* Because the chain gets clobbered when we make a copy, we save it
	 here.  */
      tree chain = TREE_CHAIN (*tp);
1691

1692 1693
      /* Copy the node.  */
      *tp = copy_node (*tp);
Mike Stump committed
1694

1695 1696
      /* Now, restore the chain, if appropriate.  That will cause
	 walk_tree to walk into the chain as well.  */
1697 1698
      if (code == PARM_DECL || code == TREE_LIST || code == OVERLOAD
	  || statement_code_p (code))
1699
	TREE_CHAIN (*tp) = chain;
1700 1701 1702 1703

      /* For now, we don't update BLOCKs when we make copies.  So, we
	 have to nullify all scope-statements.  */
      if (TREE_CODE (*tp) == SCOPE_STMT)
1704
	SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
Mike Stump committed
1705
    }
1706 1707 1708
  else if (code == TEMPLATE_TEMPLATE_PARM)
    /* These must be copied specially.  */
    *tp = copy_template_template_parm (*tp);
1709 1710 1711
  else if (TREE_CODE_CLASS (code) == 't')
    /* There's no need to copy types, or anything beneath them.  */
    *walk_subtrees = 0;
1712

Mike Stump committed
1713 1714 1715
  return NULL_TREE;
}

Mike Stump committed
1716 1717 1718 1719
#ifdef GATHER_STATISTICS
extern int depth_reached;
#endif

Mike Stump committed
1720 1721 1722 1723 1724
void
print_lang_statistics ()
{
  print_search_statistics ();
  print_class_statistics ();
Mike Stump committed
1725 1726 1727 1728
#ifdef GATHER_STATISTICS
  fprintf (stderr, "maximum template instantiation depth reached: %d\n",
	   depth_reached);
#endif
Mike Stump committed
1729 1730 1731 1732 1733 1734
}

/* This is used by the `assert' macro.  It is provided in libgcc.a,
   which `cc' doesn't know how to link.  Note that the C++ front-end
   no longer actually uses the `assert' macro (instead, it calls
   my_friendly_assert).  But all of the back-end files still need this.  */
Mike Stump committed
1735

Mike Stump committed
1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747
void
__eprintf (string, expression, line, filename)
     const char *string;
     const char *expression;
     unsigned line;
     const char *filename;
{
  fprintf (stderr, string, expression, line, filename);
  fflush (stderr);
  abort ();
}

Mike Stump committed
1748 1749 1750
/* Return, as an INTEGER_CST node, the number of elements for TYPE
   (which is an ARRAY_TYPE).  This counts only elements of the top
   array.  */
Mike Stump committed
1751 1752 1753 1754 1755

tree
array_type_nelts_top (type)
     tree type;
{
1756
  return fold (build (PLUS_EXPR, sizetype,
Mike Stump committed
1757 1758 1759 1760
		      array_type_nelts (type),
		      integer_one_node));
}

Mike Stump committed
1761 1762 1763
/* Return, as an INTEGER_CST node, the number of elements for TYPE
   (which is an ARRAY_TYPE).  This one is a recursive count of all
   ARRAY_TYPEs that are clumped together.  */
Mike Stump committed
1764 1765 1766 1767 1768 1769 1770 1771 1772 1773

tree
array_type_nelts_total (type)
     tree type;
{
  tree sz = array_type_nelts_top (type);
  type = TREE_TYPE (type);
  while (TREE_CODE (type) == ARRAY_TYPE)
    {
      tree n = array_type_nelts_top (type);
1774
      sz = fold (build (MULT_EXPR, sizetype, sz, n));
Mike Stump committed
1775 1776 1777 1778
      type = TREE_TYPE (type);
    }
  return sz;
}
Mike Stump committed
1779

1780 1781 1782
/* Called from break_out_target_exprs via mapcar.  */

static tree
1783 1784 1785 1786
bot_manip (tp, walk_subtrees, data)
     tree *tp;
     int *walk_subtrees;
     void *data;
Mike Stump committed
1787
{
1788 1789 1790
  splay_tree target_remap = ((splay_tree) data);
  tree t = *tp;

Mike Stump committed
1791
  if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1792 1793 1794 1795 1796
    {
      /* There can't be any TARGET_EXPRs below this point.  */
      *walk_subtrees = 0;
      return NULL_TREE;
    }
Mike Stump committed
1797
  else if (TREE_CODE (t) == TARGET_EXPR)
1798
    {
1799 1800
      tree u;

1801
      if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
1802 1803
	{
	  mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 1), 0), 0));
1804
	  u = build_cplus_new
1805 1806
	    (TREE_TYPE (t), break_out_target_exprs (TREE_OPERAND (t, 1)));
	}
1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817
      else 
	{
	  u = copy_node (t);
	  TREE_OPERAND (u, 0) = build (VAR_DECL, TREE_TYPE (t));
	  layout_decl (TREE_OPERAND (u, 0), 0);
	}

      /* Map the old variable to the new one.  */
      splay_tree_insert (target_remap, 
			 (splay_tree_key) TREE_OPERAND (t, 0), 
			 (splay_tree_value) TREE_OPERAND (u, 0));
1818 1819 1820 1821 1822 1823 1824 1825

      /* Replace the old expression with the new version.  */
      *tp = u;
      /* We don't have to go below this point; the recursive call to
	 break_out_target_exprs will have handled anything below this
	 point.  */
      *walk_subtrees = 0;
      return NULL_TREE;
1826 1827 1828 1829
    }
  else if (TREE_CODE (t) == CALL_EXPR)
    mark_used (TREE_OPERAND (TREE_OPERAND (t, 0), 0));

1830 1831
  /* Make a copy of this node.  */
  return copy_tree_r (tp, walk_subtrees, NULL);
Mike Stump committed
1832 1833
}
  
1834 1835 1836
/* Replace all remapped VAR_DECLs in T with their new equivalents.
   DATA is really a splay-tree mapping old variables to new
   variables.  */
1837 1838

static tree
1839
bot_replace (t, walk_subtrees, data)
1840
     tree *t;
1841 1842
     int *walk_subtrees ATTRIBUTE_UNUSED;
     void *data;
1843
{
1844 1845
  splay_tree target_remap = ((splay_tree) data);

1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856
  if (TREE_CODE (*t) == VAR_DECL)
    {
      splay_tree_node n = splay_tree_lookup (target_remap,
					     (splay_tree_key) *t);
      if (n)
	*t = (tree) n->value;
    }

  return NULL_TREE;
}
	
1857 1858 1859 1860
/* When we parse a default argument expression, we may create
   temporary variables via TARGET_EXPRs.  When we actually use the
   default-argument expression, we make a copy of the expression, but
   we must replace the temporaries with appropriate local versions.  */
Mike Stump committed
1861

Mike Stump committed
1862 1863 1864 1865
tree
break_out_target_exprs (t)
     tree t;
{
1866 1867 1868
  static int target_remap_count;
  static splay_tree target_remap;

1869 1870 1871 1872
  if (!target_remap_count++)
    target_remap = splay_tree_new (splay_tree_compare_pointers, 
				   /*splay_tree_delete_key_fn=*/NULL, 
				   /*splay_tree_delete_value_fn=*/NULL);
1873 1874
  walk_tree (&t, bot_manip, target_remap);
  walk_tree (&t, bot_replace, target_remap);
1875 1876 1877 1878 1879 1880 1881 1882

  if (!--target_remap_count)
    {
      splay_tree_delete (target_remap);
      target_remap = NULL;
    }

  return t;
Mike Stump committed
1883
}
Mike Stump committed
1884

Mike Stump committed
1885 1886 1887
/* Obstack used for allocating nodes in template function and variable
   definitions.  */

1888 1889
/* Similar to `build_nt', except that we set TREE_COMPLEXITY to be the
   current line number.  */
Mike Stump committed
1890 1891 1892 1893

tree
build_min_nt VPROTO((enum tree_code code, ...))
{
Jeff Law committed
1894
#ifndef ANSI_PROTOTYPES
Mike Stump committed
1895 1896 1897 1898 1899 1900 1901 1902 1903
  enum tree_code code;
#endif
  va_list p;
  register tree t;
  register int length;
  register int i;

  VA_START (p, code);

Jeff Law committed
1904
#ifndef ANSI_PROTOTYPES
Mike Stump committed
1905 1906 1907 1908 1909 1910 1911 1912 1913 1914
  code = va_arg (p, enum tree_code);
#endif

  t = make_node (code);
  length = tree_code_length[(int) code];
  TREE_COMPLEXITY (t) = lineno;

  for (i = 0; i < length; i++)
    {
      tree x = va_arg (p, tree);
1915
      TREE_OPERAND (t, i) = x;
Mike Stump committed
1916 1917 1918 1919 1920 1921
    }

  va_end (p);
  return t;
}

1922 1923
/* Similar to `build', except we set TREE_COMPLEXITY to the current
   line-number.  */
Mike Stump committed
1924 1925 1926 1927

tree
build_min VPROTO((enum tree_code code, tree tt, ...))
{
Jeff Law committed
1928
#ifndef ANSI_PROTOTYPES
Mike Stump committed
1929 1930 1931 1932 1933 1934 1935 1936 1937 1938
  enum tree_code code;
  tree tt;
#endif
  va_list p;
  register tree t;
  register int length;
  register int i;

  VA_START (p, tt);

Jeff Law committed
1939
#ifndef ANSI_PROTOTYPES
Mike Stump committed
1940 1941 1942 1943 1944 1945
  code = va_arg (p, enum tree_code);
  tt = va_arg (p, tree);
#endif

  t = make_node (code);
  length = tree_code_length[(int) code];
1946
  TREE_TYPE (t) = tt;
Mike Stump committed
1947 1948 1949 1950 1951
  TREE_COMPLEXITY (t) = lineno;

  for (i = 0; i < length; i++)
    {
      tree x = va_arg (p, tree);
1952
      TREE_OPERAND (t, i) = x;
Mike Stump committed
1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968
    }

  va_end (p);
  return t;
}

tree
get_type_decl (t)
     tree t;
{
  if (TREE_CODE (t) == TYPE_DECL)
    return t;
  if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
    return TYPE_STUB_DECL (t);
  
  my_friendly_abort (42);
a  
Manfred Hollstein committed
1969 1970 1971

  /* Stop compiler from complaining control reaches end of non-void function.  */
  return 0;
Mike Stump committed
1972 1973 1974 1975 1976 1977 1978
}

int
can_free (obstack, t)
     struct obstack *obstack;
     tree t;
{
1979
  int size = 0;
Mike Stump committed
1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995

  if (TREE_CODE (t) == TREE_VEC)
    size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
  else
    my_friendly_abort (42);

#define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
		  & ~ obstack_alignment_mask (obstack))
  if ((char *)t + ROUND (size) == obstack_next_free (obstack))
    return 1;
#undef ROUND

  return 0;
}

/* Return first vector element whose BINFO_TYPE is ELEM.
1996
   Return 0 if ELEM is not in VEC.  VEC may be NULL_TREE.  */
Mike Stump committed
1997 1998 1999 2000 2001 2002

tree
vec_binfo_member (elem, vec)
     tree elem, vec;
{
  int i;
2003 2004 2005

  if (vec)
    for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
2006
      if (same_type_p (elem, BINFO_TYPE (TREE_VEC_ELT (vec, i))))
2007 2008
	return TREE_VEC_ELT (vec, i);

Mike Stump committed
2009 2010
  return NULL_TREE;
}
Mike Stump committed
2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024

/* Kludge around the fact that DECL_CONTEXT for virtual functions returns
   the wrong thing for decl_function_context.  Hopefully the uses in the
   backend won't matter, since we don't need a static chain for local class
   methods.  FIXME!  */

tree
hack_decl_function_context (decl)
     tree decl;
{
  if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
    return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
  return decl_function_context (decl);
}
2025

2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043
/* Returns the namespace that contains DECL, whether directly or
   indirectly.  */

tree
decl_namespace_context (decl)
     tree decl;
{
  while (1)
    {
      if (TREE_CODE (decl) == NAMESPACE_DECL)
	return decl;
      else if (TYPE_P (decl))
	decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl));
      else
	decl = CP_DECL_CONTEXT (decl);
    }
}

2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065
/* Return truthvalue of whether T1 is the same tree structure as T2.
   Return 1 if they are the same.
   Return 0 if they are understandably different.
   Return -1 if either contains tree structure not understood by
   this function.  */

int
cp_tree_equal (t1, t2)
     tree t1, t2;
{
  register enum tree_code code1, code2;
  int cmp;

  if (t1 == t2)
    return 1;
  if (t1 == 0 || t2 == 0)
    return 0;

  code1 = TREE_CODE (t1);
  code2 = TREE_CODE (t2);

  if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
2066 2067 2068 2069 2070 2071
    {
      if (code2 == NOP_EXPR || code2 == CONVERT_EXPR || code2 == NON_LVALUE_EXPR)
	return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
      else
	return cp_tree_equal (TREE_OPERAND (t1, 0), t2);
    }
2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093
  else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
	   || code2 == NON_LVALUE_EXPR)
    return cp_tree_equal (t1, TREE_OPERAND (t2, 0));

  if (code1 != code2)
    return 0;

  switch (code1)
    {
    case INTEGER_CST:
      return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
	&& TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);

    case REAL_CST:
      return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));

    case STRING_CST:
      return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
	&& !bcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
		  TREE_STRING_LENGTH (t1));

    case CONSTRUCTOR:
2094 2095 2096
      /* We need to do this when determining whether or not two
	 non-type pointer to member function template arguments
	 are the same.  */
2097
      if (!(same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110
	    /* The first operand is RTL.  */
	    && TREE_OPERAND (t1, 0) == TREE_OPERAND (t2, 0)))
	return 0;
      return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));

    case TREE_LIST:
      cmp = cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
      if (cmp <= 0)
	return cmp;
      cmp = cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2));
      if (cmp <= 0)
	return cmp;
      return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2));
2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155

    case SAVE_EXPR:
      return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));

    case CALL_EXPR:
      cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
      if (cmp <= 0)
	return cmp;
      return simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));

    case TARGET_EXPR:
      /* Special case: if either target is an unallocated VAR_DECL,
	 it means that it's going to be unified with whatever the
	 TARGET_EXPR is really supposed to initialize, so treat it
	 as being equivalent to anything.  */
      if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
	   && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
	   && DECL_RTL (TREE_OPERAND (t1, 0)) == 0)
	  || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
	      && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
	      && DECL_RTL (TREE_OPERAND (t2, 0)) == 0))
	cmp = 1;
      else
	cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
      if (cmp <= 0)
	return cmp;
      return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));

    case WITH_CLEANUP_EXPR:
      cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
      if (cmp <= 0)
	return cmp;
      return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t1, 2));

    case COMPONENT_REF:
      if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
	return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
      return 0;

    case VAR_DECL:
    case PARM_DECL:
    case CONST_DECL:
    case FUNCTION_DECL:
      return 0;

2156 2157 2158
    case TEMPLATE_PARM_INDEX:
      return TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
	&& TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2);
2159 2160

    case SIZEOF_EXPR:
2161
    case ALIGNOF_EXPR:
2162 2163 2164
      if (TREE_CODE (TREE_OPERAND (t1, 0)) != TREE_CODE (TREE_OPERAND (t2, 0)))
	return 0;
      if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1, 0))) == 't')
2165
	return same_type_p (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2166
      break;
2167

2168 2169 2170 2171
    case PTRMEM_CST:
      /* Two pointer-to-members are the same if they point to the same
	 field or function in the same class.  */
      return (PTRMEM_CST_MEMBER (t1) == PTRMEM_CST_MEMBER (t2)
2172
	      && same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2)));
2173

2174 2175
    default:
      break;
2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198
    }

  switch (TREE_CODE_CLASS (code1))
    {
      int i;
    case '1':
    case '2':
    case '<':
    case 'e':
    case 'r':
    case 's':
      cmp = 1;
      for (i=0; i<tree_code_length[(int) code1]; ++i)
	{
	  cmp = cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
	  if (cmp <= 0)
	    return cmp;
	}
      return cmp;
    }

  return -1;
}
2199

2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216
/* Build a wrapper around some pointer PTR so we can use it as a tree.  */

tree
build_ptr_wrapper (ptr)
     void *ptr;
{
  tree t = make_node (WRAPPER);
  WRAPPER_PTR (t) = ptr;
  return t;
}

/* Same, but on the expression_obstack.  */

tree
build_expr_ptr_wrapper (ptr)
     void *ptr;
{
2217
  return build_ptr_wrapper (ptr);
2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230
}

/* Build a wrapper around some integer I so we can use it as a tree.  */

tree
build_int_wrapper (i)
     int i;
{
  tree t = make_node (WRAPPER);
  WRAPPER_INT (t) = i;
  return t;
}

Kaveh R. Ghazi committed
2231
static tree
Jason Merrill committed
2232 2233 2234 2235
build_srcloc (file, line)
     char *file;
     int line;
{
2236 2237 2238
  tree t;

  t = make_node (SRCLOC);
Jason Merrill committed
2239 2240
  SRCLOC_FILE (t) = file;
  SRCLOC_LINE (t) = line;
2241

Jason Merrill committed
2242 2243 2244 2245 2246 2247 2248 2249 2250
  return t;
}

tree
build_srcloc_here ()
{
  return build_srcloc (input_filename, lineno);
}

Mike Stump committed
2251 2252 2253 2254 2255 2256
/* The type of ARG when used as an lvalue.  */

tree
lvalue_type (arg)
     tree arg;
{
2257 2258 2259
  tree type = TREE_TYPE (arg);
  if (TREE_CODE (arg) == OVERLOAD)
    type = unknown_type_node;
2260
  return type;
Mike Stump committed
2261 2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279
}

/* The type of ARG for printing error messages; denote lvalues with
   reference types.  */

tree
error_type (arg)
     tree arg;
{
  tree type = TREE_TYPE (arg);
  if (TREE_CODE (type) == ARRAY_TYPE)
    ;
  else if (real_lvalue_p (arg))
    type = build_reference_type (lvalue_type (arg));
  else if (IS_AGGR_TYPE (type))
    type = lvalue_type (arg);

  return type;
}
Mike Stump committed
2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292

/* Does FUNCTION use a variable-length argument list?  */

int
varargs_function_p (function)
     tree function;
{
  tree parm = TYPE_ARG_TYPES (TREE_TYPE (function));
  for (; parm; parm = TREE_CHAIN (parm))
    if (TREE_VALUE (parm) == void_type_node)
      return 0;
  return 1;
}
2293 2294 2295 2296 2297 2298 2299 2300 2301 2302

/* Returns 1 if decl is a member of a class.  */

int
member_p (decl)
     tree decl;
{
  tree ctx = DECL_CONTEXT (decl);
  return (ctx && TREE_CODE_CLASS (TREE_CODE (ctx)) == 't');
}
2303 2304 2305 2306 2307 2308 2309 2310

/* Create a placeholder for member access where we don't actually have an
   object that the access is against.  */

tree
build_dummy_object (type)
     tree type;
{
2311
  tree decl = build1 (NOP_EXPR, build_pointer_type (type), void_zero_node);
2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352 2353
  return build_indirect_ref (decl, NULL_PTR);
}

/* We've gotten a reference to a member of TYPE.  Return *this if appropriate,
   or a dummy object otherwise.  If BINFOP is non-0, it is filled with the
   binfo path from current_class_type to TYPE, or 0.  */

tree
maybe_dummy_object (type, binfop)
     tree type;
     tree *binfop;
{
  tree decl, context;

  if (current_class_type
      && get_base_distance (type, current_class_type, 0, binfop) != -1)
    context = current_class_type;
  else
    {
      /* Reference from a nested class member function.  */
      context = type;
      if (binfop)
	*binfop = TYPE_BINFO (type);
    }

  if (current_class_ref && context == current_class_type)
    decl = current_class_ref;
  else
    decl = build_dummy_object (context);

  return decl;
}

/* Returns 1 if OB is a placeholder object, or a pointer to one.  */

int
is_dummy_object (ob)
     tree ob;
{
  if (TREE_CODE (ob) == INDIRECT_REF)
    ob = TREE_OPERAND (ob, 0);
  return (TREE_CODE (ob) == NOP_EXPR
2354
	  && TREE_OPERAND (ob, 0) == void_zero_node);
2355
}
2356 2357 2358 2359 2360 2361 2362 2363 2364 2365

/* Returns 1 iff type T is a POD type, as defined in [basic.types].  */

int
pod_type_p (t)
     tree t;
{
  while (TREE_CODE (t) == ARRAY_TYPE)
    t = TREE_TYPE (t);

2366 2367 2368
  if (INTEGRAL_TYPE_P (t))
    return 1;  /* integral, character or enumeral type */
  if (FLOAT_TYPE_P (t))
2369
    return 1;
2370 2371 2372 2373 2374 2375 2376 2377 2378 2379
  if (TYPE_PTR_P (t))
    return 1; /* pointer to non-member */
  if (TYPE_PTRMEM_P (t))
    return 1; /* pointer to member object */
  if (TYPE_PTRMEMFUNC_P (t))
    return 1; /* pointer to member function */
  
  if (! CLASS_TYPE_P (t))
    return 0; /* other non-class type (reference or function) */
  if (CLASSTYPE_NON_POD_P (t))
2380 2381 2382
    return 0;
  return 1;
}
2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394

/* Return a 1 if ATTR_NAME and ATTR_ARGS denote a valid C++-specific
   attribute for either declaration DECL or type TYPE and 0 otherwise.
   Plugged into valid_lang_attribute.  */

int
cp_valid_lang_attribute (attr_name, attr_args, decl, type)
  tree attr_name;
  tree attr_args ATTRIBUTE_UNUSED;
  tree decl ATTRIBUTE_UNUSED;
  tree type ATTRIBUTE_UNUSED;
{
Jason Merrill committed
2395
  if (is_attribute_p ("com_interface", attr_name))
2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414
    {
      if (! flag_vtable_thunks)
	{
	  error ("`com_interface' only supported with -fvtable-thunks");
	  return 0;
	}

      if (attr_args != NULL_TREE
	  || decl != NULL_TREE
	  || ! CLASS_TYPE_P (type)
	  || type != TYPE_MAIN_VARIANT (type))
	{
	  warning ("`com_interface' attribute can only be applied to class definitions");
	  return 0;
	}

      CLASSTYPE_COM_INTERFACE (type) = 1;
      return 1;
    }
Jason Merrill committed
2415
  else if (is_attribute_p ("init_priority", attr_name))
2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431 2432 2433 2434 2435 2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463
    {
      tree initp_expr = (attr_args ? TREE_VALUE (attr_args): NULL_TREE);
      int pri;

      if (initp_expr)
	STRIP_NOPS (initp_expr);
	  
      if (!initp_expr || TREE_CODE (initp_expr) != INTEGER_CST)
	{
	  error ("requested init_priority is not an integer constant");
	  return 0;
	}

      pri = TREE_INT_CST_LOW (initp_expr);
	
      while (TREE_CODE (type) == ARRAY_TYPE)
	type = TREE_TYPE (type);

      if (decl == NULL_TREE
	  || TREE_CODE (decl) != VAR_DECL
	  || ! TREE_STATIC (decl)
	  || DECL_EXTERNAL (decl)
	  || (TREE_CODE (type) != RECORD_TYPE
	      && TREE_CODE (type) != UNION_TYPE)
	  /* Static objects in functions are initialized the
	     first time control passes through that
	     function. This is not precise enough to pin down an
	     init_priority value, so don't allow it. */
	  || current_function_decl) 
	{
	  error ("can only use init_priority attribute on file-scope definitions of objects of class type");
	  return 0;
	}

      if (pri > MAX_INIT_PRIORITY || pri <= 0)
	{
	  error ("requested init_priority is out of range");
	  return 0;
	}

      /* Check for init_priorities that are reserved for
	 language and runtime support implementations.*/
      if (pri <= MAX_RESERVED_INIT_PRIORITY)
	{
	  warning 
	    ("requested init_priority is reserved for internal use");
	}

2464
      DECL_INIT_PRIORITY (decl) = pri;
2465 2466 2467 2468 2469
      return 1;
    }

  return 0;
}
2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487

/* Return a new PTRMEM_CST of the indicated TYPE.  The MEMBER is the
   thing pointed to by the constant.  */

tree
make_ptrmem_cst (type, member)
     tree type;
     tree member;
{
  tree ptrmem_cst = make_node (PTRMEM_CST);
  /* If would seem a great convenience if make_node would set
     TREE_CONSTANT for things of class `c', but it does not.  */
  TREE_CONSTANT (ptrmem_cst) = 1;
  TREE_TYPE (ptrmem_cst) = type;
  PTRMEM_CST_MEMBER (ptrmem_cst) = member;
  return ptrmem_cst;
}

2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501
/* Mark ARG (which is really a list_hash_table **) for GC.  */

static void
mark_list_hash (arg)
     void *arg;
{
  struct list_hash *lh;

  for (lh = * ((struct list_hash **) arg); lh; lh = lh->next)
    ggc_mark_tree (lh->list);
}

/* Initialize tree.c.  */

Gavin Romig-Koch committed
2502
void
2503
init_tree ()
Gavin Romig-Koch committed
2504
{
2505
  make_lang_type_fn = cp_make_lang_type;
2506
  lang_unsave = cp_unsave;
2507 2508 2509 2510
  ggc_add_root (list_hash_table, 
		sizeof (list_hash_table) / sizeof (struct list_hash *),
		sizeof (struct list_hash *),
		mark_list_hash);
Gavin Romig-Koch committed
2511 2512
}

2513 2514 2515 2516
/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
   information indicating to what new SAVE_EXPR this one should be
   mapped, use that one.  Otherwise, create a new node and enter it in
   ST.  FN is the function into which the copy will be placed.  */
Gavin Romig-Koch committed
2517

Gavin Romig-Koch committed
2518
void
2519
remap_save_expr (tp, st, fn, walk_subtrees)
2520 2521 2522
     tree *tp;
     splay_tree st;
     tree fn;
2523
     int *walk_subtrees;
Gavin Romig-Koch committed
2524
{
2525
  splay_tree_node n;
Gavin Romig-Koch committed
2526

2527 2528 2529 2530 2531
  /* See if we already encountered this SAVE_EXPR.  */
  n = splay_tree_lookup (st, (splay_tree_key) *tp);
      
  /* If we didn't already remap this SAVE_EXPR, do so now.  */
  if (!n)
Gavin Romig-Koch committed
2532
    {
2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544
      tree t = copy_node (*tp);

      /* The SAVE_EXPR is now part of the function into which we
	 are inlining this body.  */
      SAVE_EXPR_CONTEXT (t) = fn;
      /* And we haven't evaluated it yet.  */
      SAVE_EXPR_RTL (t) = NULL_RTX;
      /* Remember this SAVE_EXPR.  */
      n = splay_tree_insert (st,
			     (splay_tree_key) *tp,
			     (splay_tree_value) t);
    }
2545 2546 2547 2548
  else
    /* We've already walked into this SAVE_EXPR, so we needn't do it
       again.  */
    *walk_subtrees = 0;
2549 2550 2551 2552 2553 2554 2555 2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586

  /* Replace this SAVE_EXPR with the copy.  */
  *tp = (tree) n->value;
}

/* Called via walk_tree.  If *TP points to a DECL_STMT for a local
   declaration, copies the declaration and enters it in the splay_tree
   pointed to by DATA (which is really a `splay_tree *').  */

static tree
mark_local_for_remap_r (tp, walk_subtrees, data)
     tree *tp;
     int *walk_subtrees ATTRIBUTE_UNUSED;
     void *data;
{
  tree t = *tp;
  splay_tree st = (splay_tree) data;

  if ((TREE_CODE (t) == DECL_STMT
       && nonstatic_local_decl_p (DECL_STMT_DECL (t)))
      || TREE_CODE (t) == LABEL_STMT)
    {
      tree decl;
      tree copy;

      /* Figure out what's being declared.  */
      decl = (TREE_CODE (t) == DECL_STMT
	      ? DECL_STMT_DECL (t) : LABEL_STMT_LABEL (t));
      
      /* Make a copy.  */
      copy = copy_decl_for_inlining (decl, 
				     DECL_CONTEXT (decl), 
				     DECL_CONTEXT (decl));

      /* Remember the copy.  */
      splay_tree_insert (st,
			 (splay_tree_key) decl, 
			 (splay_tree_value) copy);
Gavin Romig-Koch committed
2587 2588
    }

2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600 2601 2602 2603 2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615
  return NULL_TREE;
}

/* Called via walk_tree when an expression is unsaved.  Using the
   splay_tree pointed to by ST (which is really a `splay_tree *'),
   remaps all local declarations to appropriate replacements.  */

static tree
cp_unsave_r (tp, walk_subtrees, data)
     tree *tp;
     int *walk_subtrees;
     void *data;
{
  splay_tree st = (splay_tree) data;
  splay_tree_node n;

  /* Only a local declaration (variable or label).  */
  if (nonstatic_local_decl_p (*tp))
    {
      /* Lookup the declaration.  */
      n = splay_tree_lookup (st, (splay_tree_key) *tp);
      
      /* If it's there, remap it.  */
      if (n)
	*tp = (tree) n->value;
    }
  else if (TREE_CODE (*tp) == SAVE_EXPR)
2616
    remap_save_expr (tp, st, current_function_decl, walk_subtrees);
Gavin Romig-Koch committed
2617
  else
2618 2619 2620 2621 2622 2623 2624 2625 2626
    {
      copy_tree_r (tp, walk_subtrees, NULL);

      /* Do whatever unsaving is required.  */
      unsave_expr_1 (*tp);
    }

  /* Keep iterating.  */
  return NULL_TREE;
Gavin Romig-Koch committed
2627 2628
}

2629 2630 2631 2632 2633 2634 2635 2636 2637 2638 2639 2640 2641 2642 2643 2644 2645 2646 2647 2648 2649 2650
/* Called by unsave_expr_now whenever an expression (*TP) needs to be
   unsaved.  */

static void
cp_unsave (tp)
     tree *tp;
{
  splay_tree st;

  /* Create a splay-tree to map old local variable declarations to new
     ones.  */
  st = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);

  /* Walk the tree once figuring out what needs to be remapped.  */
  walk_tree (tp, mark_local_for_remap_r, st);

  /* Walk the tree again, copying, remapping, and unsaving.  */
  walk_tree (tp, cp_unsave_r, st);

  /* Clean up.  */
  splay_tree_delete (st);
}