expr.c 102 KB
Newer Older
Anthony Green committed
1
/* Process expressions for the GNU compiler for the Java(TM) language.
2 3
   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001
   Free Software Foundation, Inc.
Anthony Green committed
4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28

This file is part of GNU CC.

GNU CC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.

GNU CC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with GNU CC; see the file COPYING.  If not, write to
the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.  

Java and all Java-based marks are trademarks or registered trademarks
of Sun Microsystems, Inc. in the United States and other countries.
The Free Software Foundation is independent of Sun Microsystems, Inc.  */

/* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */

#include "config.h"
29
#include "system.h"
Anthony Green committed
30 31 32
#include "tree.h"
#include "real.h"
#include "rtl.h"
33
#include "flags.h"
Anthony Green committed
34 35 36 37 38 39 40
#include "expr.h"
#include "java-tree.h"
#include "javaop.h"
#include "java-opcodes.h"
#include "jcf.h"
#include "java-except.h"
#include "parse.h"
41
#include "toplev.h"
Kaveh R. Ghazi committed
42
#include "except.h"
43
#include "ggc.h"
Anthony Green committed
44

45 46 47 48 49 50
static void flush_quick_stack PARAMS ((void));
static void push_value PARAMS ((tree));
static tree pop_value PARAMS ((tree));
static void java_stack_swap PARAMS ((void));
static void java_stack_dup PARAMS ((int, int));
static void build_java_athrow PARAMS ((tree));
51
static void build_java_jsr PARAMS ((int, int));
52 53 54 55 56 57 58 59
static void build_java_ret PARAMS ((tree));
static void expand_java_multianewarray PARAMS ((tree, int));
static void expand_java_arraystore PARAMS ((tree));
static void expand_java_arrayload PARAMS ((tree));
static void expand_java_array_length PARAMS ((void));
static tree build_java_monitor PARAMS ((tree, tree));
static void expand_java_pushc PARAMS ((int, tree));
static void expand_java_return PARAMS ((tree));
60
static void expand_load_internal PARAMS ((int, tree, int));
61 62 63 64 65 66 67 68 69 70
static void expand_java_NEW PARAMS ((tree));
static void expand_java_INSTANCEOF PARAMS ((tree));
static void expand_java_CHECKCAST PARAMS ((tree));
static void expand_iinc PARAMS ((unsigned int, int, int));
static void expand_java_binop PARAMS ((tree, enum tree_code));
static void note_label PARAMS ((int, int));
static void expand_compare PARAMS ((enum tree_code, tree, tree, int));
static void expand_test PARAMS ((enum tree_code, tree, int));
static void expand_cond PARAMS ((enum tree_code, tree, int));
static void expand_java_goto PARAMS ((int));
71
#if 0
72 73
static void expand_java_call PARAMS ((int, int));
static void expand_java_ret PARAMS ((tree)); 
74
#endif
75 76 77 78 79 80 81 82
static tree pop_arguments PARAMS ((tree)); 
static void expand_invoke PARAMS ((int, int, int)); 
static void expand_java_field_op PARAMS ((int, int, int)); 
static void java_push_constant_from_pool PARAMS ((struct JCF *, int)); 
static void java_stack_pop PARAMS ((int)); 
static tree build_java_throw_out_of_bounds_exception PARAMS ((tree)); 
static tree build_java_check_indexed_type PARAMS ((tree, tree)); 
static tree case_identity PARAMS ((tree, tree)); 
83
static unsigned char peek_opcode_at_pc PARAMS ((struct JCF *, int, int));
84 85
static int emit_init_test_initialization PARAMS ((void **entry,
						  void * ptr));
86
static int get_offset_table_index PARAMS ((tree));
87

88
static GTY(()) tree operand_type[59];
Anthony Green committed
89

90 91
static GTY(()) tree methods_ident;
static GTY(()) tree ncode_ident;
92 93
tree dtable_ident = NULL_TREE;

94
/* Set to nonzero value in order to emit class initilization code
95 96 97
   before static field references.  */
int always_initialize_class_p;

Anthony Green committed
98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124
/* We store the stack state in two places:
   Within a basic block, we use the quick_stack, which is a
   pushdown list (TREE_LISTs) of expression nodes.
   This is the top part of the stack;  below that we use find_stack_slot.
   At the end of a basic block, the quick_stack must be flushed
   to the stack slot array (as handled by find_stack_slot).
   Using quick_stack generates better code (especially when
   compiled without optimization), because we do not have to
   explicitly store and load trees to temporary variables.

   If a variable is on the quick stack, it means the value of variable
   when the quick stack was last flushed.  Conceptually, flush_quick_stack
   saves all the the quick_stack elements in parellel.  However, that is
   complicated, so it actually saves them (i.e. copies each stack value
   to is home virtual register) from low indexes.  This allows a quick_stack
   element at index i (counting from the bottom of stack the) to references
   slot virtuals for register that are >= i, but not those that are deeper.
   This convention makes most operations easier.  For example iadd works
   even when the stack contains (reg[0], reg[1]):  It results in the
   stack containing (reg[0]+reg[1]), which is OK.  However, some stack
   operations are more complicated.  For example dup given a stack
   containing (reg[0]) would yield (reg[0], reg[0]), which would violate
   the convention, since stack value 1 would refer to a register with
   lower index (reg[0]), which flush_quick_stack does not safely handle.
   So dup cannot just add an extra element to the quick_stack, but iadd can.
*/

125
static GTY(()) tree quick_stack;
Anthony Green committed
126 127

/* A free-list of unused permamnet TREE_LIST nodes. */
128
static GTY((deletable (""))) tree tree_list_free_list;
Anthony Green committed
129 130 131 132 133 134

/* The stack pointer of the Java virtual machine.
   This does include the size of the quick_stack. */

int stack_pointer;

Kaveh R. Ghazi committed
135
const unsigned char *linenumber_table;
Anthony Green committed
136 137
int linenumber_count;

138 139 140 141 142 143 144 145 146 147
void
init_expr_processing()
{
  operand_type[21] = operand_type[54] = int_type_node;
  operand_type[22] = operand_type[55] = long_type_node;
  operand_type[23] = operand_type[56] = float_type_node;
  operand_type[24] = operand_type[57] = double_type_node;
  operand_type[25] = operand_type[58] = ptr_type_node;
}

Anthony Green committed
148
tree
149
java_truthvalue_conversion (expr)
Anthony Green committed
150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178
     tree expr;
{
  /* It is simpler and generates better code to have only TRUTH_*_EXPR
     or comparison expressions as truth values at this level.

     This function should normally be identity for Java.  */

  switch (TREE_CODE (expr))
    {
    case EQ_EXPR:
    case NE_EXPR: case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    case TRUTH_AND_EXPR:
    case TRUTH_OR_EXPR:
    case ERROR_MARK:
      return expr;

    case INTEGER_CST:
      return integer_zerop (expr) ? boolean_false_node : boolean_true_node;

    case REAL_CST:
      return real_zerop (expr) ? boolean_false_node : boolean_true_node;

    /* are these legal? XXX JH */
    case NEGATE_EXPR:
    case ABS_EXPR:
    case FLOAT_EXPR:
    case FFS_EXPR:
179
      /* These don't change whether an object is nonzero or zero.  */
180
      return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
Anthony Green committed
181 182 183 184

    case COND_EXPR:
      /* Distribute the conversion into the arms of a COND_EXPR.  */
      return fold (build (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
185 186
                          java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
                          java_truthvalue_conversion (TREE_OPERAND (expr, 2))));
Anthony Green committed
187 188 189 190 191

    case NOP_EXPR:
      /* If this is widening the argument, we can ignore it.  */
      if (TYPE_PRECISION (TREE_TYPE (expr))
          >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
192
        return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
Anthony Green committed
193 194 195 196 197 198 199 200 201 202 203 204 205 206
      /* fall through to default */

    default:
      return fold (build (NE_EXPR, boolean_type_node, expr, boolean_false_node));
    }
}

/* Save any stack slots that happen to be in the quick_stack into their
   home virtual register slots.

   The copy order is from low stack index to high, to support the invariant
   that the expression for a slot may contain decls for stack slots with
   higher (or the same) index, but not lower. */

207
static void
Anthony Green committed
208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239
flush_quick_stack ()
{
  int stack_index = stack_pointer;
  register tree prev, cur, next;

  /* First reverse the quick_stack, and count the number of slots it has. */
  for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
    {
      next = TREE_CHAIN (cur);
      TREE_CHAIN (cur) = prev;
      prev = cur;
      stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
    }
  quick_stack = prev;

  while (quick_stack != NULL_TREE)
    {
      tree decl;
      tree node = quick_stack, type;
      quick_stack = TREE_CHAIN (node);
      TREE_CHAIN (node) = tree_list_free_list;
      tree_list_free_list = node;
      node = TREE_VALUE (node);
      type = TREE_TYPE (node);

      decl = find_stack_slot (stack_index, type);
      if (decl != node)
	  expand_assignment (decl, node, 0, 0);
      stack_index += 1 + TYPE_IS_WIDE (type);
    }
}

240 241 242 243 244
/* Push TYPE on the type stack.
   Return true on success, 0 on overflow. */

int
push_type_0 (type)
Anthony Green committed
245 246 247 248 249 250
     tree type;
{
  int n_words;
  type = promote_type (type);
  n_words = 1 + TYPE_IS_WIDE (type);
  if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
251
    return 0;
Anthony Green committed
252 253 254 255
  stack_type_map[stack_pointer++] = type;
  n_words--;
  while (--n_words >= 0)
    stack_type_map[stack_pointer++] = TYPE_SECOND;
256 257 258 259 260 261 262 263
  return 1;
}

void
push_type (type)
     tree type;
{
  if (! push_type_0 (type))
264
    abort ();
Anthony Green committed
265 266
}

267
static void
Anthony Green committed
268 269 270 271 272 273 274 275 276 277 278
push_value (value)
     tree value;
{
  tree type = TREE_TYPE (value);
  if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
    {
      type = promote_type (type);
      value = convert (type, value);
    }
  push_type (type);
  if (tree_list_free_list == NULL_TREE)
Mark Mitchell committed
279
    quick_stack = tree_cons (NULL_TREE, value, quick_stack);
Anthony Green committed
280 281 282 283 284 285 286 287 288 289
  else
    {
      tree node = tree_list_free_list;
      tree_list_free_list = TREE_CHAIN (tree_list_free_list);
      TREE_VALUE (node) = value;
      TREE_CHAIN (node) = quick_stack;
      quick_stack = node;
    }
}

290 291
/* Pop a type from the type stack.
   TYPE is the expected type.   Return the actual type, which must be
292 293
   convertible to TYPE.
   On an error, *MESSAGEP is set to a freshly malloc'd error message. */
294

Anthony Green committed
295
tree
296
pop_type_0 (type, messagep)
Anthony Green committed
297
     tree type;
298
     char **messagep;
Anthony Green committed
299 300 301
{
  int n_words;
  tree t;
302
  *messagep = NULL;
Anthony Green committed
303 304 305 306
  if (TREE_CODE (type) == RECORD_TYPE)
    type = promote_type (type);
  n_words = 1 + TYPE_IS_WIDE (type);
  if (stack_pointer < n_words)
307 308 309 310
    {
      *messagep = xstrdup ("stack underflow");
      return type;
    }
Anthony Green committed
311 312 313
  while (--n_words > 0)
    {
      if (stack_type_map[--stack_pointer] != void_type_node)
314 315 316 317
	{
	  *messagep = xstrdup ("Invalid multi-word value on type stack");
	  return type;
	}
Anthony Green committed
318 319 320 321 322 323 324 325 326 327 328 329 330 331 332
    }
  t = stack_type_map[--stack_pointer];
  if (type == NULL_TREE || t == type)
    return t;
  if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
      && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
      return t;
  if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
    {
      if (type == ptr_type_node || type == object_ptr_type_node)
	return t;
      else if (t == ptr_type_node)  /* Special case for null reference. */
	return type;
      else if (can_widen_reference_to (t, type))
	return t;
333 334 335
      /* This is a kludge, but matches what Sun's verifier does.
	 It can be tricked, but is safe as long as type errors
	 (i.e. interface method calls) are caught at run-time. */
336 337
      else if (CLASS_INTERFACE (TYPE_NAME (TREE_TYPE (type))))
	return object_ptr_type_node;
Anthony Green committed
338
    }
339

340 341 342 343 344 345 346 347 348
  /* lang_printable_name uses a static buffer, so we must save the result
     from calling it the first time.  */
  {
    char *temp = xstrdup (lang_printable_name (type, 0));
    *messagep = concat ("expected type '", temp,
			"' but stack contains '", lang_printable_name (t, 0),
			"'", NULL);
    free (temp);
  }
349
  return type;
350 351 352 353 354 355 356 357 358 359
}

/* Pop a type from the type stack.
   TYPE is the expected type.  Return the actual type, which must be
   convertible to TYPE, otherwise call error. */

tree
pop_type (type)
     tree type;
{
360 361 362 363
  char *message = NULL;
  type = pop_type_0 (type, &message);
  if (message != NULL)
    {
364
      error ("%s", message);
365 366
      free (message);
    }
367
  return type;
Anthony Green committed
368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415
}

/* Return 1f if SOURCE_TYPE can be safely widened to TARGET_TYPE.
   Handles array types and interfaces.  */

int
can_widen_reference_to (source_type, target_type)
     tree source_type, target_type;
{
  if (source_type == ptr_type_node || target_type == object_ptr_type_node)
    return 1;

  /* Get rid of pointers  */
  if (TREE_CODE (source_type) == POINTER_TYPE)
    source_type = TREE_TYPE (source_type);
  if (TREE_CODE (target_type) == POINTER_TYPE)
    target_type = TREE_TYPE (target_type);

  if (source_type == target_type)
    return 1;
  else
    {
      if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
	{
	  HOST_WIDE_INT source_length, target_length;
	  if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
	    return 0;
	  target_length = java_array_type_length (target_type);
	  if (target_length >= 0)
	    {
	      source_length = java_array_type_length (source_type);
	      if (source_length != target_length)
		return 0;
	    }
	  source_type = TYPE_ARRAY_ELEMENT (source_type);
	  target_type = TYPE_ARRAY_ELEMENT (target_type);
	  if (source_type == target_type)
	    return 1;
	  if (TREE_CODE (source_type) != POINTER_TYPE
	      || TREE_CODE (target_type) != POINTER_TYPE)
	    return 0;
	  return can_widen_reference_to (source_type, target_type);
	}
      else
	{
	  int source_depth = class_depth (source_type);
	  int target_depth = class_depth (target_type);

416 417 418 419
	  /* class_depth can return a negative depth if an error occurred */
	  if (source_depth < 0 || target_depth < 0)
	    return 0;

Anthony Green committed
420 421 422 423 424 425 426 427 428 429 430 431
	  if (CLASS_INTERFACE (TYPE_NAME (target_type)))
	    {
	      /* target_type is OK if source_type or source_type ancestors
		 implement target_type. We handle multiple sub-interfaces  */

	      tree basetype_vec = TYPE_BINFO_BASETYPES (source_type);
	      int n = TREE_VEC_LENGTH (basetype_vec), i;
	      for (i=0 ; i < n; i++)
	        if (can_widen_reference_to 
		    (TREE_TYPE (TREE_VEC_ELT (basetype_vec, i)),
		     target_type))
		  return 1;
432 433
	      if (n == 0)
		return 0;
Anthony Green committed
434 435 436 437 438 439 440 441 442 443 444
	    }

	  for ( ; source_depth > target_depth;  source_depth--) 
	    {
	      source_type = TYPE_BINFO_BASETYPE (source_type, 0); 
	    }
	  return source_type == target_type;
	}
    }
}

445
static tree
Anthony Green committed
446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465
pop_value (type)
     tree type;
{
  type = pop_type (type);
  if (quick_stack)
    {
      tree node = quick_stack;
      quick_stack = TREE_CHAIN (quick_stack);
      TREE_CHAIN (node) = tree_list_free_list;
      tree_list_free_list = node;
      node = TREE_VALUE (node);
      return node;
    }
  else
    return find_stack_slot (stack_pointer, promote_type (type));
}


/* Pop and discrad the top COUNT stack slots. */

466
static void
Anthony Green committed
467 468 469 470 471 472
java_stack_pop (count)
     int count;
{
  while (count > 0)
    {
      tree type, val;
473

Anthony Green committed
474
      if (stack_pointer == 0)
475 476
	abort ();

Anthony Green committed
477 478 479 480 481
      type = stack_type_map[stack_pointer - 1];
      if (type == TYPE_SECOND)
	{
	  count--;
	  if (stack_pointer == 1 || count <= 0)
482 483
	    abort ();

Anthony Green committed
484 485 486 487 488 489 490 491 492
	  type = stack_type_map[stack_pointer - 2];
	}
      val = pop_value (type);
      count--;
    }
}

/* Implement the 'swap' operator (to swap two top stack slots). */

493
static void
Anthony Green committed
494 495 496 497 498 499 500 501 502 503 504
java_stack_swap ()
{
  tree type1, type2;
  rtx temp;
  tree decl1, decl2;

  if (stack_pointer < 2
      || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_UNKNOWN
      || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_UNKNOWN
      || type1 == TYPE_SECOND || type2 == TYPE_SECOND
      || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
505 506
    /* Bad stack swap.  */
    abort ();
Anthony Green committed
507 508 509 510 511 512 513 514 515 516 517

  flush_quick_stack ();
  decl1 = find_stack_slot (stack_pointer - 1, type1);
  decl2 = find_stack_slot (stack_pointer - 2, type2);
  temp = copy_to_reg (DECL_RTL (decl1));
  emit_move_insn (DECL_RTL (decl1), DECL_RTL (decl2));
  emit_move_insn (DECL_RTL (decl2), temp);
  stack_type_map[stack_pointer - 1] = type2;
  stack_type_map[stack_pointer - 2] = type1;
}

518
static void
Anthony Green committed
519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541
java_stack_dup (size, offset)
     int size, offset;
{
  int low_index = stack_pointer - size - offset;
  int dst_index;
  if (low_index < 0)
    error ("stack underflow - dup* operation");

  flush_quick_stack ();

  stack_pointer += size;
  dst_index = stack_pointer;

  for (dst_index = stack_pointer;  --dst_index >= low_index; )
    {
      tree type;
      int src_index = dst_index - size;
      if (src_index < low_index)
	src_index = dst_index + size + offset;
      type = stack_type_map [src_index];
      if (type == TYPE_SECOND)
	{
	  if (src_index <= low_index)
542 543 544
	    /* Dup operation splits 64-bit number.  */
	    abort ();

Anthony Green committed
545 546 547 548
	  stack_type_map[dst_index] = type;
	  src_index--;  dst_index--;
	  type = stack_type_map[src_index];
	  if (! TYPE_IS_WIDE (type))
549
	    abort ();
Anthony Green committed
550 551
	}
      else if (TYPE_IS_WIDE (type))
552 553
	abort ();

Anthony Green committed
554 555 556 557 558 559 560 561 562 563
      if (src_index != dst_index)
	{
	  tree src_decl = find_stack_slot (src_index, type);
	  tree dst_decl = find_stack_slot (dst_index, type);
	  emit_move_insn (DECL_RTL (dst_decl), DECL_RTL (src_decl));
	  stack_type_map[dst_index] = type;
	}
    }
}

564 565
/* Calls _Jv_Throw or _Jv_Sjlj_Throw.  Discard the contents of the
   value stack. */
Anthony Green committed
566

567
static void
Anthony Green committed
568 569 570 571 572 573 574
build_java_athrow (node)
    tree node;
{
  tree call;

  call = build (CALL_EXPR,
		void_type_node,
575
		build_address_of (throw_node),
Anthony Green committed
576 577 578 579 580 581 582 583 584
		build_tree_list (NULL_TREE, node),
		NULL_TREE);
  TREE_SIDE_EFFECTS (call) = 1;
  expand_expr_stmt (call);
  java_stack_pop (stack_pointer);
}

/* Implementation for jsr/ret */

585
static void
586 587
build_java_jsr (target_pc, return_pc)
     int target_pc, return_pc;
Anthony Green committed
588
{
589 590
  tree where =  lookup_label (target_pc);
  tree ret = lookup_label (return_pc);
Anthony Green committed
591 592 593
  tree ret_label = fold (build1 (ADDR_EXPR, return_address_type_node, ret));
  push_value (ret_label);
  flush_quick_stack ();
594
  emit_jump (label_rtx (where));
Anthony Green committed
595
  expand_label (ret);
596 597
  if (instruction_bits [return_pc] & BCODE_VERIFIED)
    load_type_state (ret);
Anthony Green committed
598 599
}

600
static void
Anthony Green committed
601 602 603 604 605 606 607 608 609
build_java_ret (location)
  tree location;
{
  expand_computed_goto (location);
}
 
/* Implementation of operations on array: new, load, store, length */

tree
610 611
decode_newarray_type (atype)
  int atype;
Anthony Green committed
612 613 614 615 616 617 618 619 620 621 622 623 624 625 626
{
  switch (atype)
    {
    case 4:  return boolean_type_node;
    case 5:  return char_type_node;
    case 6:  return float_type_node;
    case 7:  return double_type_node;
    case 8:  return byte_type_node;
    case 9:  return short_type_node;
    case 10: return int_type_node;
    case 11: return long_type_node;
    default: return NULL_TREE;
    }
}

627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649
/* Map primitive type to the code used by OPCODE_newarray. */

int
encode_newarray_type (type)
     tree type;
{
  if (type == boolean_type_node)
    return 4;
  else if (type == char_type_node)
    return 5;
  else if (type == float_type_node)
    return 6;
  else if (type == double_type_node)
    return 7;
  else if (type == byte_type_node)
    return 8;
  else if (type == short_type_node)
    return 9;
  else if (type == int_type_node)
    return 10;
  else if (type == long_type_node)
    return 11;
  else
650
    abort ();
651 652
}

Per Bothner committed
653 654
/* Build a call to _Jv_ThrowBadArrayIndex(), the
   ArrayIndexOfBoundsException exception handler.  */
Anthony Green committed
655 656

static tree
Per Bothner committed
657 658
build_java_throw_out_of_bounds_exception (index)
    tree index;
Anthony Green committed
659 660 661
{
  tree node = build (CALL_EXPR, int_type_node,
		     build_address_of (soft_badarrayindex_node), 
Per Bothner committed
662
		     build_tree_list (NULL_TREE, index), NULL_TREE);
Anthony Green committed
663 664 665 666 667 668 669 670 671 672 673 674
  TREE_SIDE_EFFECTS (node) = 1;	/* Allows expansion within ANDIF */
  return (node);
}

/* Return the length of an array. Doesn't perform any checking on the nature
   or value of the array NODE. May be used to implement some bytecodes.  */

tree
build_java_array_length_access (node)
    tree node;
{
  tree type = TREE_TYPE (node);
675
  tree array_type = TREE_TYPE (type);
Anthony Green committed
676
  HOST_WIDE_INT length;
677

678 679 680 681 682 683 684 685 686
  /* JVM spec: If the arrayref is null, the arraylength instruction
     throws a NullPointerException.  The only way we could get a node
     of type ptr_type_node at this point is `aconst_null; arraylength'
     or something equivalent.  */
  if (type == ptr_type_node)
    return build (CALL_EXPR, int_type_node, 
		  build_address_of (soft_nullpointer_node),
		  NULL_TREE, NULL_TREE);

Anthony Green committed
687
  if (!is_array_type_p (type))
688 689
    abort ();

Anthony Green committed
690 691 692
  length = java_array_type_length (type);
  if (length >= 0)
    return build_int_2 (length, 0);
693 694 695 696 697

  node = build (COMPONENT_REF, int_type_node,
		build_java_indirect_ref (array_type, node,
					 flag_check_references),
		lookup_field (&array_type, get_identifier ("length")));
698
  IS_ARRAY_LENGTH_ACCESS (node) = 1;
699
  return node;
Anthony Green committed
700 701
}

702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727
/* Optionally checks a reference against the NULL pointer.  ARG1: the
   expr, ARG2: we should check the reference.  Don't generate extra
   checks if we're not generating code.  */

tree 
java_check_reference (expr, check)
     tree expr;
     int check;
{
  if (!flag_syntax_only && check)
    {
      tree cond;
      expr = save_expr (expr);
      cond = build (COND_EXPR, void_type_node,
		    build (EQ_EXPR, boolean_type_node, expr, null_pointer_node),
		    build (CALL_EXPR, void_type_node, 
			   build_address_of (soft_nullpointer_node),
			   NULL_TREE, NULL_TREE),
		    empty_stmt_node);
      expr = build (COMPOUND_EXPR, TREE_TYPE (expr), cond, expr);
    }

  return expr;
}

/* Reference an object: just like an INDIRECT_REF, but with checking.  */
Anthony Green committed
728 729

tree
730 731 732 733
build_java_indirect_ref (type, expr, check)
     tree type;
     tree expr;
     int check;
Anthony Green committed
734
{
735
  return build1 (INDIRECT_REF, type, java_check_reference (expr, check));
Anthony Green committed
736 737 738 739 740 741 742 743 744 745 746
}

/* Implement array indexing (either as l-value or r-value).
   Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
   Optionally performs bounds checking and/or test to NULL.
   At this point, ARRAY should have been verified as an array.  */

tree
build_java_arrayaccess (array, type, index)
    tree array, type, index;
{
747 748 749 750
  tree node, throw = NULL_TREE;
  tree data_field;
  tree ref;
  tree array_type = TREE_TYPE (TREE_TYPE (array));
Anthony Green committed
751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767

  if (flag_bounds_check)
    {
      /* Generate:
       * (unsigned jint) INDEX >= (unsigned jint) LEN
       *    && throw ArrayIndexOutOfBoundsException.
       * Note this is equivalent to and more efficient than:
       * INDEX < 0 || INDEX >= LEN && throw ... */
      tree test;
      tree len = build_java_array_length_access (array);
      TREE_TYPE (len) = unsigned_int_type_node;
      test = fold (build (GE_EXPR, boolean_type_node, 
			       convert (unsigned_int_type_node, index),
			       len));
      if (! integer_zerop (test))
	{
	  throw = build (TRUTH_ANDIF_EXPR, int_type_node, test,
Per Bothner committed
768
			 build_java_throw_out_of_bounds_exception (index));
Anthony Green committed
769 770 771 772
	  /* allows expansion within COMPOUND */
	  TREE_SIDE_EFFECTS( throw ) = 1;
	}
    }
773

774 775 776 777 778 779 780 781 782 783 784 785 786
  /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
     to have the bounds check evaluated first. */
  if (throw != NULL_TREE)
    index = build (COMPOUND_EXPR, int_type_node, throw, index);
 
  data_field = lookup_field (&array_type, get_identifier ("data"));

  ref = build (COMPONENT_REF, TREE_TYPE (data_field),    
	       build_java_indirect_ref (array_type, array, 
					flag_check_references),
	       data_field);
  
  node = build (ARRAY_REF, type, ref, index);
787
  return node;
Anthony Green committed
788 789
}

790 791 792 793 794 795 796 797 798
/* Generate code to throw an ArrayStoreException if OBJECT is not assignable
   (at runtime) to an element of ARRAY.  A NOP_EXPR is returned if it can
   determine that no check is required. */

tree
build_java_arraystore_check (array, object)
   tree array; 
   tree object;
{
799
  tree check, element_type, source;
800 801 802 803 804 805 806 807 808 809 810 811 812 813
  tree array_type_p = TREE_TYPE (array);
  tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));

  if (! is_array_type_p (array_type_p))
    abort ();

  /* Get the TYPE_DECL for ARRAY's element type. */
  element_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));

  if (TREE_CODE (element_type) != TYPE_DECL   
      || TREE_CODE (object_type) != TYPE_DECL)
    abort ();

  if (!flag_store_check)
814
    return build1 (NOP_EXPR, array_type_p, array);
815 816 817 818 819 820 821

  /* No check is needed if the element type is final or is itself an array.  
     Also check that element_type matches object_type, since in the bytecode 
     compilation case element_type may be the actual element type of the arra
     rather than its declared type. */
  if (element_type == object_type
      && (TYPE_ARRAY_P (TREE_TYPE (element_type))
822
	  || CLASS_FINAL (element_type)))
823 824
    return build1 (NOP_EXPR, array_type_p, array);
  
825 826 827 828 829 830
  /* OBJECT might be wrapped by a SAVE_EXPR. */
  if (TREE_CODE (object) == SAVE_EXPR)
    source = TREE_OPERAND (object, 0);
  else
    source = object;
  
831
  /* Avoid the check if OBJECT was just loaded from the same array. */
832
  if (TREE_CODE (source) == ARRAY_REF)
833 834
    {
      tree target;
835
      source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
836 837 838 839 840 841 842 843 844 845
      source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
      source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
      if (TREE_CODE (source) == SAVE_EXPR)
	source = TREE_OPERAND (source, 0);
      
      target = array;
      if (TREE_CODE (target) == SAVE_EXPR)
	target = TREE_OPERAND (target, 0);
      
      if (source == target)
846
        return build1 (NOP_EXPR, array_type_p, array);
847 848 849 850 851 852 853 854 855 856 857 858 859
    }

  /* Build an invocation of _Jv_CheckArrayStore */
  check = build (CALL_EXPR, void_type_node,
		 build_address_of (soft_checkarraystore_node),
		 tree_cons (NULL_TREE, array,
		 	    build_tree_list (NULL_TREE, object)),
		 NULL_TREE);
  TREE_SIDE_EFFECTS (check) = 1;

  return check;
}

Anthony Green committed
860 861 862 863 864 865 866 867 868 869 870 871 872 873 874
/* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
   ARRAY_NODE. This function is used to retrieve something less vague than
   a pointer type when indexing the first dimension of something like [[<t>.
   May return a corrected type, if necessary, otherwise INDEXED_TYPE is
   return unchanged.
   As a side effect, it also makes sure that ARRAY_NODE is an array.  */

static tree
build_java_check_indexed_type (array_node, indexed_type)
    tree array_node;
    tree indexed_type;
{
  tree elt_type;

  if (!is_array_type_p (TREE_TYPE (array_node)))
875
    abort ();
Anthony Green committed
876 877 878 879 880 881 882 883 884 885 886

  elt_type = (TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (array_node))));

  if (indexed_type == ptr_type_node )
      return promote_type (elt_type);

  /* BYTE/BOOLEAN store and load are used for both type */
  if (indexed_type == byte_type_node && elt_type == boolean_type_node )
    return boolean_type_node;

  if (indexed_type != elt_type )
887
    abort ();
Anthony Green committed
888 889 890 891
  else
    return indexed_type;
}

892 893 894
/* newarray triggers a call to _Jv_NewPrimArray. This function should be 
   called with an integer code (the type of array to create), and the length
   of the array to create.  */
Anthony Green committed
895 896 897 898 899 900

tree
build_newarray (atype_value, length)
     int atype_value;
     tree length;
{
901 902 903
  tree type_arg;

  tree prim_type = decode_newarray_type (atype_value);
904
  tree type
905
    = build_java_array_type (prim_type,
906 907
			     host_integerp (length, 0) == INTEGER_CST
			     ? tree_low_cst (length, 0) : -1);
908

909 910 911 912 913 914 915 916
  /* If compiling to native, pass a reference to the primitive type class 
     and save the runtime some work. However, the bytecode generator
     expects to find the type_code int here. */
  if (flag_emit_class_files)
    type_arg = build_int_2 (atype_value, 0);
  else
    type_arg = build_class_ref (prim_type);

Anthony Green committed
917 918 919
  return build (CALL_EXPR, promote_type (type),
		build_address_of (soft_newarray_node),
		tree_cons (NULL_TREE, 
920
			   type_arg,
Anthony Green committed
921 922 923 924 925 926
			   build_tree_list (NULL_TREE, length)),
		NULL_TREE);
}

/* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
   of the dimension. */
927

Anthony Green committed
928 929 930 931 932
tree
build_anewarray (class_type, length)
    tree class_type;
    tree length;
{
933 934
  tree type
    = build_java_array_type (class_type,
935 936
			     host_integerp (length, 0)
			     ? tree_low_cst (length, 0) : -1);
937

Anthony Green committed
938 939 940 941 942 943 944 945 946
  return build (CALL_EXPR, promote_type (type),
		build_address_of (soft_anewarray_node),
		tree_cons (NULL_TREE, length,
			   tree_cons (NULL_TREE, build_class_ref (class_type),
				      build_tree_list (NULL_TREE,
						       null_pointer_node))),
		NULL_TREE);
}

947 948 949 950 951 952 953 954 955 956 957 958 959
/* Return a node the evaluates 'new TYPE[LENGTH]'. */

tree
build_new_array (type, length)
     tree type;
     tree length;
{
  if (JPRIMITIVE_TYPE_P (type))
    return build_newarray (encode_newarray_type (type), length);
  else
    return build_anewarray (TREE_TYPE (type), length);
}

Per Bothner committed
960 961 962
/* Generates a call to _Jv_NewMultiArray. multianewarray expects a
   class pointer, a number of dimensions and the matching number of
   dimensions. The argument list is NULL terminated.  */
Anthony Green committed
963

964
static void
Anthony Green committed
965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993
expand_java_multianewarray (class_type, ndim)
    tree class_type;
    int  ndim;
{
  int i;
  tree args = build_tree_list( NULL_TREE, null_pointer_node );

  for( i = 0; i < ndim; i++ )
    args = tree_cons (NULL_TREE, pop_value (int_type_node), args);

  push_value (build (CALL_EXPR,
		     promote_type (class_type),
		     build_address_of (soft_multianewarray_node),
		     tree_cons (NULL_TREE, build_class_ref (class_type),
				tree_cons (NULL_TREE, 
					   build_int_2 (ndim, 0), args )),
		     NULL_TREE));
}

/*  ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
    ARRAY is an array type. May expand some bound checking and NULL
    pointer checking. RHS_TYPE_NODE we are going to store. In the case
    of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
    INT. In those cases, we make the convertion.

    if ARRAy is a reference type, the assignment is checked at run-time
    to make sure that the RHS can be assigned to the array element
    type. It is not necessary to generate this code if ARRAY is final.  */

994
static void
Anthony Green committed
995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010
expand_java_arraystore (rhs_type_node)
     tree rhs_type_node;
{
  tree rhs_node    = pop_value ((INTEGRAL_TYPE_P (rhs_type_node) 
				 && TYPE_PRECISION (rhs_type_node) <= 32) ? 
				 int_type_node : rhs_type_node);
  tree index = pop_value (int_type_node);
  tree array = pop_value (ptr_type_node);

  rhs_type_node    = build_java_check_indexed_type (array, rhs_type_node);

  flush_quick_stack ();

  index = save_expr (index);
  array = save_expr (array);

1011
  if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
Anthony Green committed
1012
    {
1013
      tree check = build_java_arraystore_check (array, rhs_node);
Anthony Green committed
1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029
      expand_expr_stmt (check);
    }
  
  expand_assignment (build_java_arrayaccess (array,
					     rhs_type_node,
					     index),
		     rhs_node, 0, 0);
}

/* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes 
   sure that LHS is an array type. May expand some bound checking and NULL
   pointer checking.  
   LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
   BOOLEAN/SHORT, we push a promoted type back to the stack.
*/

1030
static void
Anthony Green committed
1031 1032 1033 1034 1035 1036 1037 1038 1039
expand_java_arrayload (lhs_type_node )
    tree lhs_type_node;
{
  tree load_node;
  tree index_node = pop_value (int_type_node);
  tree array_node = pop_value (ptr_type_node);

  index_node = save_expr (index_node);
  array_node = save_expr (array_node);
1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054
  
  if (TREE_TYPE (array_node) == ptr_type_node)
    /* The only way we could get a node of type ptr_type_node at this
       point is `aconst_null; arraylength' or something equivalent, so
       unconditionally throw NullPointerException.  */    
    load_node = build (CALL_EXPR, lhs_type_node, 
		       build_address_of (soft_nullpointer_node),
		       NULL_TREE, NULL_TREE);
  else
    {
      lhs_type_node = build_java_check_indexed_type (array_node, lhs_type_node);
      load_node = build_java_arrayaccess (array_node,
					  lhs_type_node,
					  index_node);
    }
Anthony Green committed
1055 1056 1057 1058 1059 1060 1061 1062
  if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
    load_node = fold (build1 (NOP_EXPR, int_type_node, load_node));
  push_value (load_node);
}

/* Expands .length. Makes sure that we deal with and array and may expand
   a NULL check on the array object.  */

1063
static void
Anthony Green committed
1064 1065 1066 1067 1068
expand_java_array_length ()
{
  tree array  = pop_value (ptr_type_node);
  tree length = build_java_array_length_access (array);

1069
  push_value (length);
Anthony Green committed
1070 1071
}

Per Bothner committed
1072 1073
/* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
   either soft_monitorenter_node or soft_monitorexit_node.  */
Anthony Green committed
1074

1075
static tree
Anthony Green committed
1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088
build_java_monitor (call, object)
    tree call;
    tree object;
{
  return (build (CALL_EXPR,
		 void_type_node,
		 build_address_of (call),
		 build_tree_list (NULL_TREE, object),
		 NULL_TREE));
}

/* Emit code for one of the PUSHC instructions. */

1089
static void
Anthony Green committed
1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108
expand_java_pushc (ival, type)
     int ival;
     tree type;
{
  tree value;
  if (type == ptr_type_node && ival == 0)
    value = null_pointer_node;
  else if (type == int_type_node || type == long_type_node)
    {
      value = build_int_2 (ival, ival < 0 ? -1 : 0);
      TREE_TYPE (value) = type;
    }
  else if (type == float_type_node || type == double_type_node)
    {
      REAL_VALUE_TYPE x;
      REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
      value = build_real (type, x);
    }
  else
1109 1110
    abort ();

Anthony Green committed
1111 1112 1113
  push_value (value);
}

1114
static void
Anthony Green committed
1115 1116 1117 1118 1119 1120 1121 1122 1123 1124
expand_java_return (type)
     tree type;
{
  if (type == void_type_node)
    expand_null_return ();
  else
    {
      tree retval = pop_value (type);
      tree res = DECL_RESULT (current_function_decl);
      retval = build (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1125 1126 1127 1128 1129 1130 1131 1132 1133 1134

      /* Handle the situation where the native integer type is smaller
	 than the JVM integer. It can happen for many cross compilers.
	 The whole if expression just goes away if INT_TYPE_SIZE < 32
	 is false. */
      if (INT_TYPE_SIZE < 32
	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
	      < GET_MODE_SIZE (TYPE_MODE (type))))
	retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
      
Anthony Green committed
1135 1136 1137 1138 1139
      TREE_SIDE_EFFECTS (retval) = 1;
      expand_return (retval);
    }
}

1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165
static void
expand_load_internal (index, type, pc)
     int index;
     tree type;
     int pc;
{
  tree copy;
  tree var = find_local_variable (index, type, pc);

  /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
     on the stack.  If there is an assignment to this VAR_DECL between
     the stack push and the use, then the wrong code could be
     generated.  To avoid this we create a new local and copy our
     value into it.  Then we push this new local on the stack.
     Hopefully this all gets optimized out.  */
  copy = build_decl (VAR_DECL, NULL_TREE, type);
  DECL_CONTEXT (copy) = current_function_decl;
  layout_decl (copy, 0);
  DECL_REGISTER (copy) = 1;
  expand_decl (copy);
  MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (copy);
  DECL_INITIAL (copy) = var;
  expand_decl_init (copy);
  push_value (copy);
}

Anthony Green committed
1166 1167 1168 1169 1170 1171 1172
tree
build_address_of (value)
     tree value;
{
  return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
}

1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184
bool class_has_finalize_method (type)
     tree type;
{
  tree super = CLASSTYPE_SUPER (type);

  if (super == NULL_TREE)
    return false;	/* Every class with a real finalizer inherits	*/
   			/* from java.lang.Object.			*/
  else
    return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
}

1185
static void
Anthony Green committed
1186 1187 1188
expand_java_NEW (type)
     tree type;
{
1189 1190 1191 1192
  tree alloc_node;

  alloc_node = (class_has_finalize_method (type) ? alloc_object_node
		  				 : alloc_no_finalizer_node);
Anthony Green committed
1193 1194
  if (! CLASS_LOADED_P (type))
    load_class (type, 1);
1195
  safe_layout_class (type);
Anthony Green committed
1196
  push_value (build (CALL_EXPR, promote_type (type),
1197
		     build_address_of (alloc_node),
Anthony Green committed
1198 1199 1200 1201 1202 1203
		     tree_cons (NULL_TREE, build_class_ref (type),
				build_tree_list (NULL_TREE,
						 size_in_bytes (type))),
		     NULL_TREE));
}

1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216
/* This returns an expression which will extract the class of an
   object.  */

tree
build_get_class (value)
     tree value;
{
  tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
  tree vtable_field = lookup_field (&object_type_node,
				    get_identifier ("vtable"));
  return build (COMPONENT_REF, class_ptr_type,
		build1 (INDIRECT_REF, dtable_type,
			build (COMPONENT_REF, dtable_ptr_type,
1217 1218
			       build_java_indirect_ref (object_type_node, value,
							flag_check_references),
1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241
			       vtable_field)),
		class_field);
}

/* This builds the tree representation of the `instanceof' operator.
   It tries various tricks to optimize this in cases where types are
   known.  */

tree
build_instanceof (value, type)
     tree value, type;
{
  tree expr;
  tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
  tree valtype = TREE_TYPE (TREE_TYPE (value));
  tree valclass = TYPE_NAME (valtype);
  tree klass;

  /* When compiling from bytecode, we need to ensure that TYPE has
     been loaded.  */
  if (CLASS_P (type) && ! CLASS_LOADED_P (type))
    {
      load_class (type, 1);
1242
      safe_layout_class (type);
1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256
      if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
	return error_mark_node;
    }
  klass = TYPE_NAME (type);

  if (type == object_type_node || inherits_from_p (valtype, type))
    {
      /* Anything except `null' is an instance of Object.  Likewise,
	 if the object is known to be an instance of the class, then
	 we only need to check for `null'.  */
      expr = build (COND_EXPR, itype,
		    value,
		    boolean_true_node, boolean_false_node);
    }
1257 1258 1259
  else if (! TYPE_ARRAY_P (type)
	   && ! TYPE_ARRAY_P (valtype)
	   && DECL_P (klass) && DECL_P (valclass)
1260
	   && ! CLASS_INTERFACE (valclass)
1261 1262 1263 1264 1265 1266 1267 1268 1269
	   && ! CLASS_INTERFACE (klass)
	   && ! inherits_from_p (type, valtype)
	   && (CLASS_FINAL (klass)
	       || ! inherits_from_p (valtype, type)))
    {
      /* The classes are from different branches of the derivation
	 tree, so we immediately know the answer.  */
      expr = boolean_false_node;
    }
1270
  else if (DECL_P (klass) && CLASS_FINAL (klass))
1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292
    {
      tree save = save_expr (value);
      expr = build (COND_EXPR, itype,
		    save,
		    build (EQ_EXPR, itype,
			   build_get_class (save),
			   build_class_ref (type)),
		    boolean_false_node);
    }
  else
    {
      expr = build (CALL_EXPR, itype,
		    build_address_of (soft_instanceof_node),
		    tree_cons (NULL_TREE, value,
			       build_tree_list (NULL_TREE,
						build_class_ref (type))),
		    NULL_TREE);
    }
  TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
  return expr;
}

1293
static void
Anthony Green committed
1294 1295 1296 1297
expand_java_INSTANCEOF (type)
     tree type;
{
  tree value = pop_value (object_ptr_type_node);
1298
  value = build_instanceof (value, type);
Anthony Green committed
1299 1300 1301
  push_value (value);
}

1302
static void
Anthony Green committed
1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314
expand_java_CHECKCAST (type)
     tree type;
{
  tree value = pop_value (ptr_type_node);
  value = build (CALL_EXPR, promote_type (type),
		 build_address_of (soft_checkcast_node),
		 tree_cons (NULL_TREE, build_class_ref (type),
			    build_tree_list (NULL_TREE, value)),
		 NULL_TREE);
  push_value (value);
}

1315 1316 1317 1318 1319
static void
expand_iinc (local_var_index, ival, pc)
     unsigned int local_var_index;
     int ival;
     int pc;
Anthony Green committed
1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330
{
    tree local_var, res;
    tree constant_value;

    flush_quick_stack ();
    local_var = find_local_variable (local_var_index, int_type_node, pc);
    constant_value = build_int_2 (ival, ival < 0 ? -1 : 0);
    res = fold (build (PLUS_EXPR, int_type_node, local_var, constant_value));
    expand_assignment (local_var, res, 0, 0);
}

1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350
      
tree
build_java_soft_divmod (op, type, op1, op2)
    enum tree_code op;
    tree type, op1, op2;
{
  tree call = NULL;
  tree arg1 = convert (type, op1);
  tree arg2 = convert (type, op2);

  if (type == int_type_node)
    {	  
      switch (op)
	{
	case TRUNC_DIV_EXPR:
	  call = soft_idiv_node;
	  break;
	case TRUNC_MOD_EXPR:
	  call = soft_irem_node;
	  break;
Kaveh R. Ghazi committed
1351 1352
	default:
	  break;
1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364
	}
    }
  else if (type == long_type_node)
    {	  
      switch (op)
	{
	case TRUNC_DIV_EXPR:
	  call = soft_ldiv_node;
	  break;
	case TRUNC_MOD_EXPR:
	  call = soft_lrem_node;
	  break;
Kaveh R. Ghazi committed
1365 1366
	default:
	  break;
1367 1368 1369 1370
	}
    }

  if (! call)
1371
    abort ();
1372 1373 1374 1375 1376 1377 1378 1379 1380 1381
		  
  call = build (CALL_EXPR, type,
		build_address_of (call),
		tree_cons (NULL_TREE, arg1,
			   build_tree_list (NULL_TREE, arg2)),
		NULL_TREE);
	  
  return call;
}

Anthony Green committed
1382 1383 1384 1385 1386 1387 1388 1389 1390 1391
tree
build_java_binop (op, type, arg1, arg2)
     enum tree_code op;
     tree type, arg1, arg2;
{
  tree mask;
  switch (op)
    {
    case URSHIFT_EXPR:
      {
1392
	tree u_type = java_unsigned_type (type);
Anthony Green committed
1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412
	arg1 = convert (u_type, arg1);
	arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
	return convert (type, arg1);
      }
    case LSHIFT_EXPR:
    case RSHIFT_EXPR:
      mask = build_int_2 (TYPE_PRECISION (TREE_TYPE (arg1)) - 1, 0);
      arg2 = fold (build (BIT_AND_EXPR, int_type_node, arg2, mask));
      break;

    case COMPARE_L_EXPR:  /* arg1 > arg2 ?  1 : arg1 == arg2 ? 0 : -1 */
    case COMPARE_G_EXPR:  /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 :  1 */
      arg1 = save_expr (arg1);  arg2 = save_expr (arg2);
      {
	tree ifexp1 = fold ( build (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
				    boolean_type_node, arg1, arg2));
	tree ifexp2 = fold ( build (EQ_EXPR, boolean_type_node, arg1, arg2));
	tree second_compare = fold (build (COND_EXPR, int_type_node,
					   ifexp2, integer_zero_node,
					   op == COMPARE_L_EXPR
1413
					   ? integer_minus_one_node
Anthony Green committed
1414 1415 1416
					   : integer_one_node));
	return fold (build (COND_EXPR, int_type_node, ifexp1,
			    op == COMPARE_L_EXPR ? integer_one_node
1417
			    : integer_minus_one_node,
Anthony Green committed
1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428
			    second_compare));
      }
    case COMPARE_EXPR:
      arg1 = save_expr (arg1);  arg2 = save_expr (arg2);
      {
	tree ifexp1 = fold ( build (LT_EXPR, boolean_type_node, arg1, arg2));
	tree ifexp2 = fold ( build (GT_EXPR, boolean_type_node, arg1, arg2));
	tree second_compare = fold ( build (COND_EXPR, int_type_node,
					    ifexp2, integer_one_node,
					    integer_zero_node));
	return fold (build (COND_EXPR, int_type_node,
1429
			    ifexp1, integer_minus_one_node, second_compare));
1430 1431
      }      
    case TRUNC_DIV_EXPR:
Anthony Green committed
1432
    case TRUNC_MOD_EXPR:
1433 1434
      if (TREE_CODE (type) == REAL_TYPE
	  && op == TRUNC_MOD_EXPR)
Anthony Green committed
1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450
	{
	  tree call;
	  if (type != double_type_node)
	    {
	      arg1 = convert (double_type_node, arg1);
	      arg2 = convert (double_type_node, arg2);
	    }
	  call = build (CALL_EXPR, double_type_node,
			build_address_of (soft_fmod_node),
			tree_cons (NULL_TREE, arg1,
				   build_tree_list (NULL_TREE, arg2)),
			NULL_TREE);
	  if (type != double_type_node)
	    call = convert (type, call);
	  return call;
	}
1451 1452 1453 1454 1455 1456
      
      if (TREE_CODE (type) == INTEGER_TYPE
	  && flag_use_divide_subroutine
	  && ! flag_syntax_only)
	return build_java_soft_divmod (op, type, arg1, arg2);
      
Anthony Green committed
1457
      break;
1458
    default:  ;
Anthony Green committed
1459 1460 1461 1462
    }
  return fold (build (op, type, arg1, arg2));
}

1463
static void
Anthony Green committed
1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486
expand_java_binop (type, op)
     tree type;  enum tree_code op;
{
  tree larg, rarg;
  tree ltype = type;
  tree rtype = type;
  switch (op)
    {
    case LSHIFT_EXPR:
    case RSHIFT_EXPR:
    case URSHIFT_EXPR:
      rtype = int_type_node;
      rarg = pop_value (rtype);
      break;
    default:
      rarg = pop_value (rtype);
    }
  larg = pop_value (ltype);
  push_value (build_java_binop (op, type, larg, rarg));
}

/* Lookup the field named NAME in *TYPEP or its super classes.
   If not found, return NULL_TREE.
1487 1488
   (If the *TYPEP is not found, or if the field reference is
   ambiguous, return error_mark_node.)
Anthony Green committed
1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499
   If found, return the FIELD_DECL, and set *TYPEP to the
   class containing the field. */

tree
lookup_field (typep, name)
     tree *typep;
     tree name;
{
  if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
    {
      load_class (*typep, 1);
1500
      safe_layout_class (*typep);
1501
      if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
Anthony Green committed
1502 1503 1504 1505
	return error_mark_node;
    }
  do
    {
1506
      tree field, basetype_vec;
1507
      tree save_field;
1508 1509 1510 1511 1512 1513 1514 1515 1516
      int n, i;

      for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
	if (DECL_NAME (field) == name)
	  return field;

      /* Process implemented interfaces. */
      basetype_vec = TYPE_BINFO_BASETYPES (*typep);
      n = TREE_VEC_LENGTH (basetype_vec);
1517
      save_field = NULL_TREE;
1518
      for (i = 0; i < n; i++)
Anthony Green committed
1519
	{
1520 1521
	  tree t = BINFO_TYPE (TREE_VEC_ELT (basetype_vec, i));
	  if ((field = lookup_field (&t, name)))
1522
	    {
1523 1524
	      if (save_field == field)
		continue;
1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537
	      if (save_field == NULL_TREE)
		save_field = field;
	      else
		{
		  tree i1 = DECL_CONTEXT (save_field);
		  tree i2 = DECL_CONTEXT (field);
		  error ("reference `%s' is ambiguous: appears in interface `%s' and interface `%s'",
			 IDENTIFIER_POINTER (name),
			 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
			 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
		  return error_mark_node;
		}
	    }
Anthony Green committed
1538
	}
1539 1540 1541 1542

      if (save_field != NULL_TREE)
	return save_field;

Anthony Green committed
1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568
      *typep = CLASSTYPE_SUPER (*typep);
    } while (*typep);
  return NULL_TREE;
}

/* Look up the field named NAME in object SELF_VALUE,
   which has class SELF_CLASS (a non-handle RECORD_TYPE).
   SELF_VALUE is NULL_TREE if looking for a static field. */

tree
build_field_ref (self_value, self_class, name)
     tree self_value, self_class, name;
{
  tree base_class = self_class;
  tree field_decl = lookup_field (&base_class, name);
  if (field_decl == NULL_TREE)
    {
      error ("field `%s' not found", IDENTIFIER_POINTER (name));
      return error_mark_node;
    }
  if (self_value == NULL_TREE)
    {
      return build_static_field_ref (field_decl);
    }
  else
    {
1569 1570 1571 1572
      int check = (flag_check_references
		   && ! (DECL_P (self_value)
			 && DECL_NAME (self_value) == this_identifier_node));

1573 1574 1575
      tree base_type = promote_type (base_class);
      if (base_type != TREE_TYPE (self_value))
	self_value = fold (build1 (NOP_EXPR, base_type, self_value));
1576
      self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1577
					    self_value, check);
Anthony Green committed
1578 1579 1580 1581 1582 1583 1584 1585 1586 1587
      return fold (build (COMPONENT_REF, TREE_TYPE (field_decl),
			  self_value, field_decl));
    }
}

tree
lookup_label (pc)
     int pc;
{
  tree name;
1588 1589
  char buf[32];
  ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", pc);
Anthony Green committed
1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602
  name = get_identifier (buf);
  if (IDENTIFIER_LOCAL_VALUE (name))
    return IDENTIFIER_LOCAL_VALUE (name);
  else
    {
      /* The type of the address of a label is return_address_type_node. */
      tree decl = create_label_decl (name);
      LABEL_PC (decl) = pc;
      label_rtx (decl);
      return pushdecl (decl);
    }
}

Per Bothner committed
1603 1604 1605 1606 1607 1608 1609
/* Generate a unique name for the purpose of loops and switches
   labels, and try-catch-finally blocks label or temporary variables.  */

tree
generate_name ()
{
  static int l_number = 0;
1610 1611 1612
  char buff [32];
  ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
  l_number++;
Per Bothner committed
1613 1614 1615
  return get_identifier (buff);
}

Anthony Green committed
1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630
tree
create_label_decl (name)
     tree name;
{
  tree decl;
  decl = build_decl (LABEL_DECL, name, 
		     TREE_TYPE (return_address_type_node));
  DECL_CONTEXT (decl) = current_function_decl;
  DECL_IGNORED_P (decl) = 1;
  return decl;
}

/* This maps a bytecode offset (PC) to various flags. */
char *instruction_bits;

1631
static void
Anthony Green committed
1632
note_label (current_pc, target_pc)
1633
     int current_pc ATTRIBUTE_UNUSED, target_pc;
Anthony Green committed
1634 1635 1636 1637 1638 1639 1640 1641
{
  lookup_label (target_pc);
  instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
}

/* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
   where CONDITION is one of one the compare operators. */

1642
static void
Anthony Green committed
1643 1644 1645 1646 1647 1648 1649
expand_compare (condition, value1, value2, target_pc)
     enum tree_code condition;
     tree value1, value2;
     int target_pc;
{
  tree target = lookup_label (target_pc);
  tree cond = fold (build (condition, boolean_type_node, value1, value2));
1650
  expand_start_cond (java_truthvalue_conversion (cond), 0);
Anthony Green committed
1651 1652 1653 1654 1655 1656
  expand_goto (target);
  expand_end_cond ();
}

/* Emit code for a TEST-type opcode. */

1657
static void
Anthony Green committed
1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671
expand_test (condition, type, target_pc)
     enum tree_code condition;
     tree type;
     int target_pc;
{
  tree value1, value2;
  flush_quick_stack ();
  value1 = pop_value (type);
  value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
  expand_compare (condition, value1, value2, target_pc);
}

/* Emit code for a COND-type opcode. */

1672
static void
Anthony Green committed
1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686
expand_cond (condition, type, target_pc)
     enum tree_code condition;
     tree type;
     int target_pc;
{
  tree value1, value2;
  flush_quick_stack ();
  /* note: pop values in opposite order */
  value2 = pop_value (type);
  value1 = pop_value (type);
  /* Maybe should check value1 and value2 for type compatibility ??? */
  expand_compare (condition, value1, value2, target_pc);
}

1687
static void
Anthony Green committed
1688 1689 1690 1691 1692 1693 1694 1695
expand_java_goto (target_pc)
     int target_pc;
{
  tree target_label = lookup_label (target_pc);
  flush_quick_stack ();
  expand_goto (target_label);
}

1696 1697
#if 0
static void
Anthony Green committed
1698 1699 1700 1701 1702 1703 1704 1705 1706 1707
expand_java_call (target_pc, return_address)
     int target_pc, return_address;
{
  tree target_label = lookup_label (target_pc);
  tree value = build_int_2 (return_address, return_address < 0 ? -1 : 0);
  push_value (value);
  flush_quick_stack ();
  expand_goto (target_label);
}

1708
static void
Anthony Green committed
1709
expand_java_ret (return_address)
1710
     tree return_address ATTRIBUTE_UNUSED;
Anthony Green committed
1711 1712 1713 1714 1715 1716 1717 1718
{
  warning ("ret instruction not implemented");
#if 0
  tree target_label = lookup_label (target_pc);
  flush_quick_stack ();
  expand_goto (target_label);
#endif
}
1719
#endif
Anthony Green committed
1720

1721
static tree
Anthony Green committed
1722 1723 1724
pop_arguments (arg_types)
     tree arg_types;
{
1725
  if (arg_types == end_params_node)
Anthony Green committed
1726 1727 1728 1729
    return NULL_TREE;
  if (TREE_CODE (arg_types) == TREE_LIST)
    {
      tree tail = pop_arguments (TREE_CHAIN (arg_types));
Per Bothner committed
1730 1731
      tree type = TREE_VALUE (arg_types);
      tree arg = pop_value (type);
1732 1733
      if (PROMOTE_PROTOTYPES
	  && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
Per Bothner committed
1734 1735 1736
	  && INTEGRAL_TYPE_P (type))
	arg = convert (integer_type_node, arg);
      return tree_cons (NULL_TREE, arg, tail);
Anthony Green committed
1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748
    }
  abort ();
}

/* Build an expression to initialize the class CLAS.
   if EXPR is non-NULL, returns an expression to first call the initializer
   (if it is needed) and then calls EXPR. */

tree
build_class_init (clas, expr)
     tree clas, expr;
{
1749
  tree init;
Anthony Green committed
1750 1751
  if (inherits_from_p (current_class, clas))
    return expr;
1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762

  if (always_initialize_class_p)
    {
      init = build (CALL_EXPR, void_type_node,
		    build_address_of (soft_initclass_node),
		    build_tree_list (NULL_TREE, build_class_ref (clas)),
		    NULL_TREE);
      TREE_SIDE_EFFECTS (init) = 1;
    }
  else
    {
1763 1764 1765 1766 1767
      tree *init_test_decl;
      init_test_decl = java_treetreehash_new
	(DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);

      if (*init_test_decl == NULL)
1768 1769 1770
	{
	  /* Build a declaration and mark it as a flag used to track
	     static class initializations. */
1771 1772 1773 1774 1775 1776
	  *init_test_decl = build_decl (VAR_DECL, NULL_TREE,
				       boolean_type_node);
	  MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (*init_test_decl);
	  LOCAL_CLASS_INITIALIZATION_FLAG (*init_test_decl) = 1;
	  DECL_CONTEXT (*init_test_decl) = current_function_decl;
	  DECL_FUNCTION_INIT_TEST_CLASS (*init_test_decl) = clas;
1777 1778 1779
	  /* Tell the check-init code to ignore this decl when not
             optimizing class initialization. */
	  if (!STATIC_CLASS_INIT_OPT_P ())
1780
	    DECL_BIT_INDEX(*init_test_decl) = -1;
1781
	}
1782 1783 1784 1785 1786 1787 1788 1789

      init = build (CALL_EXPR, void_type_node,
		    build_address_of (soft_initclass_node),
		    build_tree_list (NULL_TREE, build_class_ref (clas)),
		    NULL_TREE);
      TREE_SIDE_EFFECTS (init) = 1;
      init = build (COND_EXPR, void_type_node,
		    build (EQ_EXPR, boolean_type_node, 
1790
			   *init_test_decl, boolean_false_node),
1791 1792 1793 1794
		    init, integer_zero_node);
      TREE_SIDE_EFFECTS (init) = 1;
      init = build (COMPOUND_EXPR, TREE_TYPE (expr), init, 
		    build (MODIFY_EXPR, boolean_type_node,
1795
			   *init_test_decl, boolean_true_node));
1796 1797 1798
      TREE_SIDE_EFFECTS (init) = 1;
    }

Anthony Green committed
1799 1800 1801 1802 1803 1804 1805 1806 1807 1808
  if (expr != NULL_TREE)
    {
      expr = build (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
      TREE_SIDE_EFFECTS (expr) = 1;
      return expr;
    }
  return init;
}

tree
1809 1810
build_known_method_ref (method, method_type, self_type,
			method_signature, arg_list)
Kaveh R. Ghazi committed
1811 1812
     tree method, method_type ATTRIBUTE_UNUSED, self_type,
          method_signature ATTRIBUTE_UNUSED, arg_list ATTRIBUTE_UNUSED;
Anthony Green committed
1813 1814
{
  tree func;
Per Bothner committed
1815
  if (is_compiled_class (self_type))
Anthony Green committed
1816
    {
1817
      make_decl_rtl (method, NULL);
Anthony Green committed
1818 1819 1820 1821 1822 1823
      func = build1 (ADDR_EXPR, method_ptr_type_node, method);
    }
  else
    {
      /* We don't know whether the method has been (statically) compiled.
	 Compile this code to get a reference to the method's code:
1824

Anthony Green committed
1825
	 SELF_TYPE->methods[METHOD_INDEX].ncode
1826

Anthony Green committed
1827 1828 1829
	 This is guaranteed to work (assuming SELF_TYPE has
	 been initialized), since if the method is not compiled yet,
	 its ncode points to a trampoline that forces compilation. */
1830

Anthony Green committed
1831 1832 1833 1834 1835 1836 1837 1838 1839 1840
      int method_index = 0;
      tree meth;
      tree ref = build_class_ref (self_type);
      ref = build1 (INDIRECT_REF, class_type_node, ref);
      if (ncode_ident == NULL_TREE)
	ncode_ident = get_identifier ("ncode");
      if (methods_ident == NULL_TREE)
	methods_ident = get_identifier ("methods");
      ref = build (COMPONENT_REF, method_ptr_type_node, ref,
		   lookup_field (&class_type_node, methods_ident));
1841
      for (meth = TYPE_METHODS (self_type);
Anthony Green committed
1842 1843 1844 1845 1846
	   ; meth = TREE_CHAIN (meth))
	{
	  if (method == meth)
	    break;
	  if (meth == NULL_TREE)
1847 1848
	    fatal_error ("method '%s' not found in class",
			 IDENTIFIER_POINTER (DECL_NAME (method)));
Anthony Green committed
1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878
	  method_index++;
	}
      method_index *= int_size_in_bytes (method_type_node);
      ref = fold (build (PLUS_EXPR, method_ptr_type_node,
			 ref, build_int_2 (method_index, 0)));
      ref = build1 (INDIRECT_REF, method_type_node, ref);
      func = build (COMPONENT_REF, nativecode_ptr_type_node,
		    ref,
		    lookup_field (&method_type_node, ncode_ident));
    }
  return func;
}

tree
invoke_build_dtable (is_invoke_interface, arg_list)
     int is_invoke_interface;
     tree arg_list;
{
  tree dtable, objectref;

  TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));

  /* If we're dealing with interfaces and if the objectref
     argument is an array then get the dispatch table of the class
     Object rather than the one from the objectref.  */
  objectref = (is_invoke_interface 
	       && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list))) ?
	       object_type_node : TREE_VALUE (arg_list));
  
  if (dtable_ident == NULL_TREE)
1879
    dtable_ident = get_identifier ("vtable");
1880 1881
  dtable = build_java_indirect_ref (object_type_node, objectref, 
				    flag_check_references);
Anthony Green committed
1882 1883 1884 1885 1886 1887
  dtable = build (COMPONENT_REF, dtable_ptr_type, dtable,
		  lookup_field (&object_type_node, dtable_ident));

  return dtable;
}

1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921
/* Determine the index in the virtual offset table (otable) for a call to
   METHOD. If this method has not been seen before, it will be added to the 
   otable_methods. If it has, the existing otable slot will be reused. */

int
get_offset_table_index (method)
     tree method;
{
  int i = 1;
  tree method_list;
  
  if (otable_methods == NULL_TREE)
    {
      otable_methods = build_tree_list (method, method);
      return 1;
    }
  
  method_list = otable_methods;
  
  while (1)
    {
      if (TREE_VALUE (method_list) == method)
        return i;
      i++;
      if (TREE_CHAIN (method_list) == NULL_TREE)
        break;
      else
        method_list = TREE_CHAIN (method_list);
    }

  TREE_CHAIN (method_list) = build_tree_list (method, method);
  return i;
}

Anthony Green committed
1922 1923 1924 1925 1926 1927 1928
tree 
build_invokevirtual (dtable, method)
     tree dtable, method;
{
  tree func;
  tree nativecode_ptr_ptr_type_node
    = build_pointer_type (nativecode_ptr_type_node);
1929 1930
  tree method_index;
  tree otable_index;
1931

1932 1933 1934 1935 1936 1937
  if (flag_indirect_dispatch)
    {
      otable_index = build_int_2 (get_offset_table_index (method), 0);
      method_index = build (ARRAY_REF, integer_type_node, otable_decl, 
			    otable_index);
    }
1938
  else
1939 1940
    {
      method_index = convert (sizetype, DECL_VINDEX (method));
1941

1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955
      if (TARGET_VTABLE_USES_DESCRIPTORS)
	/* Add one to skip bogus descriptor for class and GC descriptor. */
	method_index = size_binop (PLUS_EXPR, method_index, size_int (1));
      else
	/* Add 1 to skip "class" field of dtable, and 1 to skip GC descriptor.  */
	method_index = size_binop (PLUS_EXPR, method_index, size_int (2));

      method_index = size_binop (MULT_EXPR, method_index,
				 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));

      if (TARGET_VTABLE_USES_DESCRIPTORS)
	method_index = size_binop (MULT_EXPR, method_index,
				   size_int (TARGET_VTABLE_USES_DESCRIPTORS));
    }
1956

1957 1958
  func = fold (build (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
		      convert (nativecode_ptr_ptr_type_node, method_index)));
1959 1960 1961 1962 1963

  if (TARGET_VTABLE_USES_DESCRIPTORS)
    func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
  else
    func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
Anthony Green committed
1964 1965 1966 1967

  return func;
}

1968
static GTY(()) tree class_ident;
1969
tree
1970 1971
build_invokeinterface (dtable, method)
     tree dtable, method;
1972 1973
{
  tree lookup_arg;
1974 1975 1976
  tree interface;
  tree idx;
  tree meth;
1977
  tree otable_index;
1978
  int i;
1979 1980 1981 1982 1983 1984

  /* We expand invokeinterface here.  _Jv_LookupInterfaceMethod() will
     ensure that the selected method exists, is public and not
     abstract nor static.  */
	    
  if (class_ident == NULL_TREE)
1985 1986 1987 1988
    {
      class_ident = get_identifier ("class");
    }

1989
  dtable = build_java_indirect_ref (dtable_type, dtable, flag_check_references);
1990 1991
  dtable = build (COMPONENT_REF, class_ptr_type, dtable,
		  lookup_field (&dtable_type, class_ident));
1992 1993

  interface = DECL_CONTEXT (method);
1994
  layout_class_methods (interface);
1995
  
1996
  if (flag_indirect_dispatch)
1997
    {
1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012
      otable_index = build_int_2 (get_offset_table_index (method), 0);
      idx = build (ARRAY_REF, integer_type_node, otable_decl, otable_index);
    }
  else
    {
      i = 1;
      for (meth = TYPE_METHODS (interface); ; meth = TREE_CHAIN (meth), i++)
	{
	  if (meth == method)
            {
	      idx = build_int_2 (i, 0);
	      break;
	    }
	  if (meth == NULL_TREE)
	    abort ();
2013 2014 2015
	}
    }

2016
  lookup_arg = tree_cons (NULL_TREE, dtable,
2017 2018 2019
                          tree_cons (NULL_TREE, build_class_ref (interface),
			             build_tree_list (NULL_TREE, idx)));
				     			  
2020 2021 2022 2023 2024
  return build (CALL_EXPR, ptr_type_node, 
		build_address_of (soft_lookupinterfacemethod_node),
		lookup_arg, NULL_TREE);
}
  
Anthony Green committed
2025 2026 2027 2028 2029
/* Expand one of the invoke_* opcodes.
   OCPODE is the specific opcode.
   METHOD_REF_INDEX is an index into the constant pool.
   NARGS is the number of arguments, or -1 if not specified. */

2030
static void
Anthony Green committed
2031 2032 2033
expand_invoke (opcode, method_ref_index, nargs)
     int opcode;
     int method_ref_index;
2034
     int nargs ATTRIBUTE_UNUSED;
Anthony Green committed
2035 2036 2037 2038 2039
{
  tree method_signature = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
  tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool, method_ref_index);
  tree self_type = get_class_constant
    (current_jcf, COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool, method_ref_index));
2040
  const char *const self_name
2041
    = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
Anthony Green committed
2042
  tree call, func, method, arg_list, method_type;
2043
  tree check = NULL_TREE;
Anthony Green committed
2044 2045 2046 2047

  if (! CLASS_LOADED_P (self_type))
    {
      load_class (self_type, 1);
2048
      safe_layout_class (self_type);
Anthony Green committed
2049
      if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2050
	fatal_error ("failed to find class '%s'", self_name);
Anthony Green committed
2051
    }
2052
  layout_class_methods (self_type);
Anthony Green committed
2053

2054
  if (ID_INIT_P (method_name))
2055
    method = lookup_java_constructor (self_type, method_signature);
Anthony Green committed
2056
  else
2057
    method = lookup_java_method (self_type, method_name, method_signature);
Anthony Green committed
2058 2059
  if (method == NULL_TREE)
    {
2060
      error ("class '%s' has no method named '%s' matching signature '%s'",
Anthony Green committed
2061 2062 2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103
	     self_name,
	     IDENTIFIER_POINTER (method_name),
	     IDENTIFIER_POINTER (method_signature));
    }
  /* Invoke static can't invoke static/abstract method */
  else if (opcode == OPCODE_invokestatic)
    {
      if (!METHOD_STATIC (method))
	{
	  error ("invokestatic on non static method");
	  method = NULL_TREE;
	}
      else if (METHOD_ABSTRACT (method))
	{
	  error ("invokestatic on abstract method");
	  method = NULL_TREE;
	}
    }
  else
    {
      if (METHOD_STATIC (method))
	{
	  error ("invoke[non-static] on static method");
	  method = NULL_TREE;
	}
    }

  if (method == NULL_TREE)
    {
      method_type = get_type_from_signature (method_signature);
      pop_arguments (TYPE_ARG_TYPES (method_type));
      if (opcode != OPCODE_invokestatic) 
	pop_type (self_type);
      method_type = promote_type (TREE_TYPE (method_type));
      push_value (convert (method_type, integer_zero_node));
      return;
    }

  method_type = TREE_TYPE (method);
  arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
  flush_quick_stack ();

  func = NULL_TREE;
2104
  if (opcode == OPCODE_invokestatic)
Anthony Green committed
2105 2106
    func = build_known_method_ref (method, method_type, self_type,
				   method_signature, arg_list);
2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117
  else if (opcode == OPCODE_invokespecial
	   || (opcode == OPCODE_invokevirtual
	       && (METHOD_PRIVATE (method)
		   || METHOD_FINAL (method) 
		   || CLASS_FINAL (TYPE_NAME (self_type)))))
    {
      /* If the object for the method call is null, we throw an
	 exception.  We don't do this if the object is the current
	 method's `this'.  In other cases we just rely on an
	 optimization pass to eliminate redundant checks.  FIXME:
	 Unfortunately there doesn't seem to be a way to determine
2118 2119
	 what the current method is right now.
	 We do omit the check if we're calling <init>.  */
2120 2121 2122 2123
      /* We use a SAVE_EXPR here to make sure we only evaluate
	 the new `self' expression once.  */
      tree save_arg = save_expr (TREE_VALUE (arg_list));
      TREE_VALUE (arg_list) = save_arg;
2124
      check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2125 2126 2127
      func = build_known_method_ref (method, method_type, self_type,
				     method_signature, arg_list);
    }
Anthony Green committed
2128 2129 2130 2131 2132 2133 2134
  else
    {
      tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface, 
					 arg_list);
      if (opcode == OPCODE_invokevirtual)
	func = build_invokevirtual (dtable, method);
      else
2135
	func = build_invokeinterface (dtable, method);
Anthony Green committed
2136 2137
    }
  func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2138

2139 2140 2141
  call = build (CALL_EXPR, TREE_TYPE (method_type), func, arg_list, NULL_TREE);
  TREE_SIDE_EFFECTS (call) = 1;
  call = check_for_builtin (method, call);
Anthony Green committed
2142

2143
  if (check != NULL_TREE)
2144
    {
2145
      call = build (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2146 2147 2148
      TREE_SIDE_EFFECTS (call) = 1;
    }

Anthony Green committed
2149 2150 2151 2152 2153 2154 2155 2156 2157
  if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
    expand_expr_stmt (call);
  else
    {
      push_value (call);
      flush_quick_stack ();
    }
}

2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168
/* Create a stub which will be put into the vtable but which will call
   a JNI function.  */

tree
build_jni_stub (method)
     tree method;
{
  tree jnifunc, call, args, body, lookup_arg, method_sig, arg_types;
  tree jni_func_type, tem;
  tree env_var, res_var = NULL_TREE, block;
  tree method_args, res_type;
2169
  tree meth_var;
2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181

  tree klass = DECL_CONTEXT (method);
  int from_class = ! CLASS_FROM_SOURCE_P (klass);
  klass = build_class_ref (klass);

  if (! METHOD_NATIVE (method) || ! flag_jni)
    abort ();

  DECL_ARTIFICIAL (method) = 1;
  DECL_EXTERNAL (method) = 0;

  env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2182 2183
  DECL_CONTEXT (env_var) = method;

2184 2185 2186 2187
  if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
    {
      res_var = build_decl (VAR_DECL, get_identifier ("res"),
			    TREE_TYPE (TREE_TYPE (method)));
2188
      DECL_CONTEXT (res_var) = method;
2189 2190 2191
      TREE_CHAIN (env_var) = res_var;
    }

2192 2193 2194 2195
  meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node);
  TREE_STATIC (meth_var) = 1;
  TREE_PUBLIC (meth_var) = 0;
  DECL_EXTERNAL (meth_var) = 0;
2196
  DECL_CONTEXT (meth_var) = method;
2197 2198 2199 2200 2201
  DECL_ARTIFICIAL (meth_var) = 1;
  DECL_INITIAL (meth_var) = null_pointer_node;
  TREE_USED (meth_var) = 1;
  chainon (env_var, meth_var);
  layout_decl (meth_var, 0);
2202
  make_decl_rtl (meth_var, NULL);
2203
  rest_of_decl_compilation (meth_var, NULL, 0, 0);
2204

2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266
  /* One strange way that the front ends are different is that they
     store arguments differently.  */
  if (from_class)
    method_args = DECL_ARGUMENTS (method);
  else
    method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method));
  block = build_block (env_var, NULL_TREE, NULL_TREE,
		       method_args, NULL_TREE);
  TREE_SIDE_EFFECTS (block) = 1;
  /* When compiling from source we don't set the type of the block,
     because that will prevent patch_return from ever being run.  */
  if (from_class)
    TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));

  /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame.  */
  body = build (MODIFY_EXPR, ptr_type_node, env_var,
		build (CALL_EXPR, ptr_type_node,
		       build_address_of (soft_getjnienvnewframe_node),
		       build_tree_list (NULL_TREE, klass),
		       NULL_TREE));
  CAN_COMPLETE_NORMALLY (body) = 1;

  /* All the arguments to this method become arguments to the
     underlying JNI function.  If we had to wrap object arguments in a
     special way, we would do that here.  */
  args = NULL_TREE;
  for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
    args = tree_cons (NULL_TREE, tem, args);
  args = nreverse (args);
  arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));

  /* For a static method the second argument is the class.  For a
     non-static method the second argument is `this'; that is already
     available in the argument list.  */
  if (METHOD_STATIC (method))
    {
      args = tree_cons (NULL_TREE, klass, args);
      arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
    }

  /* The JNIEnv structure is the first argument to the JNI function.  */
  args = tree_cons (NULL_TREE, env_var, args);
  arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);

  /* We call _Jv_LookupJNIMethod to find the actual underlying
     function pointer.  _Jv_LookupJNIMethod will throw the appropriate
     exception if this function is not found at runtime.  */
  method_sig = build_java_signature (TREE_TYPE (method));
  lookup_arg =
    build_tree_list (NULL_TREE,
		     build_utf8_ref (unmangle_classname
				     (IDENTIFIER_POINTER (method_sig),
				      IDENTIFIER_LENGTH (method_sig))));
  tem = DECL_NAME (method);
  lookup_arg
    = tree_cons (NULL_TREE, klass,
		 tree_cons (NULL_TREE, build_utf8_ref (tem), lookup_arg));

  jni_func_type
    = build_pointer_type (build_function_type (TREE_TYPE (TREE_TYPE (method)),
					       arg_types));

2267 2268 2269 2270 2271 2272 2273
  jnifunc = build (COND_EXPR, ptr_type_node,
		   meth_var, meth_var,
		   build (MODIFY_EXPR, ptr_type_node,
			  meth_var,
			  build (CALL_EXPR, ptr_type_node,
				 build_address_of (soft_lookupjnimethod_node),
				 lookup_arg, NULL_TREE)));
2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337

  /* Now we make the actual JNI call via the resulting function
     pointer.    */
  call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (method)),
		build1 (NOP_EXPR, jni_func_type, jnifunc),
		args, NULL_TREE);

  /* If the JNI call returned a result, capture it here.  If we had to
     unwrap JNI object results, we would do that here.  */
  if (res_var != NULL_TREE)
    call = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
		  res_var, call);

  TREE_SIDE_EFFECTS (call) = 1;
  CAN_COMPLETE_NORMALLY (call) = 1;

  body = build (COMPOUND_EXPR, void_type_node, body, call);
  TREE_SIDE_EFFECTS (body) = 1;

  /* Now free the environment we allocated.  */
  call = build (CALL_EXPR, ptr_type_node,
		build_address_of (soft_jnipopsystemframe_node),
		build_tree_list (NULL_TREE, env_var),
		NULL_TREE);
  TREE_SIDE_EFFECTS (call) = 1;
  CAN_COMPLETE_NORMALLY (call) = 1;
  body = build (COMPOUND_EXPR, void_type_node, body, call);
  TREE_SIDE_EFFECTS (body) = 1;

  /* Finally, do the return.  When compiling from source we rely on
     patch_return to patch the return value -- because DECL_RESULT is
     not set at the time this function is called.  */
  if (from_class)
    {
      res_type = void_type_node;
      if (res_var != NULL_TREE)
	{
	  tree drt;
	  if (! DECL_RESULT (method))
	    abort ();
	  /* Make sure we copy the result variable to the actual
	     result.  We use the type of the DECL_RESULT because it
	     might be different from the return type of the function:
	     it might be promoted.  */
	  drt = TREE_TYPE (DECL_RESULT (method));
	  if (drt != TREE_TYPE (res_var))
	    res_var = build1 (CONVERT_EXPR, drt, res_var);
	  res_var = build (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
	  TREE_SIDE_EFFECTS (res_var) = 1;
	}
    }
  else
    {
      /* This is necessary to get patch_return to run.  */
      res_type = NULL_TREE;
    }
  body = build (COMPOUND_EXPR, void_type_node, body,
		build1 (RETURN_EXPR, res_type, res_var));
  TREE_SIDE_EFFECTS (body) = 1;

  BLOCK_EXPR_BODY (block) = body;
  return block;
}

Anthony Green committed
2338 2339 2340 2341 2342
/* Expand an operation to extract from or store into a field.
   IS_STATIC is 1 iff the field is static.
   IS_PUTTING is 1 for putting into a field;  0 for getting from the field.
   FIELD_REF_INDEX is an index into the constant pool.  */

2343
static void
Anthony Green committed
2344 2345 2346 2347 2348
expand_java_field_op (is_static, is_putting, field_ref_index)
     int is_static;
     int is_putting;
     int field_ref_index;
{
2349 2350 2351 2352
  tree self_type = 
      get_class_constant (current_jcf, 
			  COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool, 
						     field_ref_index));
Kaveh R. Ghazi committed
2353
  const char *self_name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
Anthony Green committed
2354
  tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2355 2356
  tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool, 
						  field_ref_index);
Anthony Green committed
2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367
  tree field_type = get_type_from_signature (field_signature);
  tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
  tree field_ref;
  int is_error = 0;
  tree field_decl = lookup_field (&self_type, field_name);
  if (field_decl == error_mark_node)
    {
      is_error = 1;
    }
  else if (field_decl == NULL_TREE)
    {
2368
      error ("missing field '%s' in '%s'",
Anthony Green committed
2369 2370 2371 2372 2373
	     IDENTIFIER_POINTER (field_name), self_name);
      is_error = 1;
    }
  else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
    {
2374
      error ("mismatching signature for field '%s' in '%s'",
Anthony Green committed
2375 2376 2377 2378 2379 2380 2381
	     IDENTIFIER_POINTER (field_name), self_name);
      is_error = 1;
    }
  field_ref = is_static ? NULL_TREE : pop_value (self_type);
  if (is_error)
    {
      if (! is_putting)
Per Bothner committed
2382
	push_value (convert (field_type, integer_zero_node));
Anthony Green committed
2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399
      flush_quick_stack ();
      return;
    }

  field_ref = build_field_ref (field_ref, self_type, field_name);
  if (is_static)
    field_ref = build_class_init (self_type, field_ref);
  if (is_putting)
    {
      flush_quick_stack ();
      if (FIELD_FINAL (field_decl))
	{
	  if (DECL_CONTEXT (field_decl) != current_class)
	    error_with_decl (field_decl,
		     "assignment to final field `%s' not in field's class");
	  else if (FIELD_STATIC (field_decl))
	    {
2400
	      if (!DECL_CLINIT_P (current_function_decl))
2401
		warning_with_decl (field_decl, 
Anthony Green committed
2402 2403 2404 2405
             "assignment to final static field `%s' not in class initializer");
	    }
	  else
	    {
2406 2407
	      tree cfndecl_name = DECL_NAME (current_function_decl);
	      if (! DECL_CONSTRUCTOR_P (current_function_decl)
2408
		  && !ID_FINIT_P (cfndecl_name))
2409
		warning_with_decl (field_decl, "assignment to final field `%s' not in constructor");
Anthony Green committed
2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427 2428 2429
	    }
	}
      expand_assignment (field_ref, new_value, 0, 0);
    }
  else
    push_value (field_ref);
}

void
load_type_state (label)
     tree label;
{
  int i;
  tree vec = LABEL_TYPE_STATE (label);
  int cur_length = TREE_VEC_LENGTH (vec);
  stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
  for (i = 0; i < cur_length; i++)
    type_map [i] = TREE_VEC_ELT (vec, i);
}

Per Bothner committed
2430
/* Do the expansion of a Java switch. With Gcc, switches are front-end
2431
   dependent things, but they rely on gcc routines. This function is
Per Bothner committed
2432 2433 2434 2435 2436 2437 2438 2439 2440 2441
   placed here because it uses things defined locally in parse.y. */

static tree
case_identity (t, v)
     tree t __attribute__ ((__unused__));
     tree v;
{
  return v;
}

2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468
/* Return the name of the vtable for an array of a given primitive
   type.  */
static tree
get_primitive_array_vtable (tree elt)
{
  tree r;
  if (elt == boolean_type_node)
    r = boolean_array_vtable;
  else if (elt == byte_type_node)
    r = byte_array_vtable;
  else if (elt == char_type_node)
    r = char_array_vtable;
  else if (elt == short_type_node)
    r = short_array_vtable;
  else if (elt == int_type_node)
    r = int_array_vtable;
  else if (elt == long_type_node)
    r = long_array_vtable;
  else if (elt == float_type_node)
    r = float_array_vtable;
  else if (elt == double_type_node)
    r = double_array_vtable;
  else
    abort ();
  return build_address_of (r);
}

Anthony Green committed
2469
struct rtx_def *
2470
java_expand_expr (exp, target, tmode, modifier)
Anthony Green committed
2471
     register tree exp;
2472 2473
     rtx target;
     enum machine_mode tmode;
2474
     int modifier; /* Actually an enum expand_modifier.  */
Anthony Green committed
2475
{
Kaveh R. Ghazi committed
2476
  tree current;
Anthony Green committed
2477 2478 2479

  switch (TREE_CODE (exp))
    {
2480 2481
    case NEW_ARRAY_INIT:
      {
Kaveh R. Ghazi committed
2482
	rtx tmp;
2483 2484 2485 2486 2487 2488
	tree array_type = TREE_TYPE (TREE_TYPE (exp));
	tree element_type = TYPE_ARRAY_ELEMENT (array_type);
	tree data_fld = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (array_type)));
	HOST_WIDE_INT ilength = java_array_type_length (array_type);
	tree length = build_int_2 (ilength, 0);
	tree init = TREE_OPERAND (exp, 0);
Per Bothner committed
2489
	tree array_decl;
2490 2491

	/* See if we can generate the array statically.  */
2492
	if (TREE_CONSTANT (init) && TREE_STATIC (exp)
Per Bothner committed
2493 2494 2495
	    && JPRIMITIVE_TYPE_P (element_type))
	  {
	    tree temp, value, init_decl;
2496
	    struct rtx_def *r;
Per Bothner committed
2497 2498
	    START_RECORD_CONSTRUCTOR (temp, object_type_node);
	    PUSH_FIELD_VALUE (temp, "vtable",
2499
			      get_primitive_array_vtable (element_type));
2500 2501
	    if (! flag_hash_synchronization)
	      PUSH_FIELD_VALUE (temp, "sync_info", null_pointer_node);
Per Bothner committed
2502 2503 2504
	    FINISH_RECORD_CONSTRUCTOR (temp);
	    START_RECORD_CONSTRUCTOR (value, array_type);
	    PUSH_SUPER_VALUE (value, temp);
2505
	    PUSH_FIELD_VALUE (value, "length", length);
Per Bothner committed
2506 2507 2508 2509 2510 2511 2512 2513 2514
	    PUSH_FIELD_VALUE (value, "data", init);
	    FINISH_RECORD_CONSTRUCTOR (value);

	    init_decl = build_decl (VAR_DECL, generate_name (), array_type);
	    pushdecl_top_level (init_decl);
	    TREE_STATIC (init_decl) = 1;
	    DECL_INITIAL (init_decl) = value;
	    DECL_IGNORED_P (init_decl) = 1;
	    TREE_READONLY (init_decl) = 1;
2515 2516 2517
	    /* Hash synchronization requires at least 64-bit alignment. */
	    if (flag_hash_synchronization && POINTER_SIZE < 64)
	      DECL_ALIGN (init_decl) = 64;
2518
	    rest_of_decl_compilation (init_decl, NULL, 1, 0);
2519
	    TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (init_decl)) = 1;
Per Bothner committed
2520
	    init = build1 (ADDR_EXPR, TREE_TYPE (exp), init_decl);
2521 2522
	    r = expand_expr (init, target, tmode, modifier);
	    return r;
Per Bothner committed
2523
	  }
2524

Per Bothner committed
2525
	array_decl = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
2526 2527 2528 2529 2530 2531 2532
	expand_decl (array_decl);
	tmp = expand_assignment (array_decl,
				 build_new_array (element_type, length),
				 1, 0);
	if (TREE_CONSTANT (init)
	    && ilength >= 10 && JPRIMITIVE_TYPE_P (element_type))
	  {
2533 2534 2535
	    tree init_decl;
	    init_decl = build_decl (VAR_DECL, generate_name (),
				    TREE_TYPE (init));
2536 2537 2538 2539 2540
	    pushdecl_top_level (init_decl);
	    TREE_STATIC (init_decl) = 1;
	    DECL_INITIAL (init_decl) = init;
	    DECL_IGNORED_P (init_decl) = 1;
	    TREE_READONLY (init_decl) = 1;
2541
	    rest_of_decl_compilation (init_decl, NULL, 1, 0);
2542
	    TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (init_decl)) = 1;
2543 2544 2545
	    init = init_decl;
	  }
	expand_assignment (build (COMPONENT_REF, TREE_TYPE (data_fld),
2546 2547 2548
				  build_java_indirect_ref (array_type, 
					  array_decl, flag_check_references), 
				  data_fld), init, 0, 0);
2549 2550
	return tmp;
      }
Anthony Green committed
2551 2552 2553 2554
    case BLOCK:
      if (BLOCK_EXPR_BODY (exp))
	{
	  tree local;
2555
	  rtx last;
Per Bothner committed
2556
	  tree body = BLOCK_EXPR_BODY (exp);
2557 2558 2559 2560
	  /* Set to 1 or more when we found a static class
             initialization flag. */
	  int found_class_initialization_flag = 0;

Anthony Green committed
2561 2562 2563 2564 2565 2566
	  pushlevel (2);	/* 2 and above */
	  expand_start_bindings (0);
	  local = BLOCK_EXPR_DECLS (exp);
	  while (local)
	    {
	      tree next = TREE_CHAIN (local);
2567 2568
	      found_class_initialization_flag +=
		LOCAL_CLASS_INITIALIZATION_FLAG_P (local);
Anthony Green committed
2569 2570 2571 2572
	      layout_decl (local, 0);
	      expand_decl (pushdecl (local));
	      local = next;
	    }
2573 2574 2575 2576 2577

	  /* Emit initialization code for test flags if we saw one. */
	  if (! always_initialize_class_p 
	      && current_function_decl
	      && found_class_initialization_flag)
2578 2579
	    htab_traverse 
	      (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl),
2580 2581
	       emit_init_test_initialization, NULL);

Per Bothner committed
2582 2583 2584 2585
	  /* Avoid deep recursion for long block.  */
	  while (TREE_CODE (body) == COMPOUND_EXPR)
	    {
	      expand_expr (TREE_OPERAND (body, 0), const0_rtx, VOIDmode, 0);
2586
	      emit_queue ();
Per Bothner committed
2587 2588
	      body = TREE_OPERAND (body, 1);
	    }
2589
  	  last = expand_expr (body, NULL_RTX, VOIDmode, 0);
2590
	  emit_queue ();
Anthony Green committed
2591
	  expand_end_bindings (getdecls (), 1, 0);
2592
	  poplevel (1, 1, 0);
2593
	  return last;
Anthony Green committed
2594
	}
2595
      return const0_rtx;
Anthony Green committed
2596

Per Bothner committed
2597 2598 2599 2600
    case CASE_EXPR:
      {
	tree duplicate;
	if (pushcase (TREE_OPERAND (exp, 0), case_identity,
2601 2602
		      build_decl (LABEL_DECL, NULL_TREE, NULL_TREE), 
		      &duplicate) == 2)
Per Bothner committed
2603 2604 2605 2606 2607 2608 2609 2610 2611 2612
	  {
	    EXPR_WFL_LINECOL (wfl_operator) = EXPR_WFL_LINECOL (exp);
	    parse_error_context
	      (wfl_operator, "Duplicate case label: `%s'",
	       print_int_node (TREE_OPERAND (exp, 0)));
	  }
	return const0_rtx;
      }

    case DEFAULT_EXPR:
2613 2614
      pushcase (NULL_TREE, 0, 
		build_decl (LABEL_DECL, NULL_TREE, NULL_TREE), NULL);
Per Bothner committed
2615 2616
      return const0_rtx;

Per Bothner committed
2617
    case SWITCH_EXPR:
Per Bothner committed
2618 2619 2620
      expand_start_case (0, TREE_OPERAND (exp, 0), int_type_node, "switch");
      expand_expr_stmt (TREE_OPERAND (exp, 1));
      expand_end_case (TREE_OPERAND (exp, 0));
Per Bothner committed
2621 2622 2623
      return const0_rtx;

    case TRY_EXPR:
2624
      /* We expand a try[-catch] block */
Per Bothner committed
2625 2626 2627 2628 2629 2630 2631 2632 2633 2634

      /* Expand the try block */
      expand_eh_region_start ();
      expand_expr_stmt (TREE_OPERAND (exp, 0));
      expand_start_all_catch ();

      /* Expand all catch clauses (EH handlers) */
      for (current = TREE_OPERAND (exp, 1); current; 
	   current = TREE_CHAIN (current))
	{
2635 2636
	  tree catch = TREE_OPERAND (current, 0);
	  tree decl = BLOCK_EXPR_DECLS (catch);
2637
	  tree type = (decl ? TREE_TYPE (TREE_TYPE (decl)) : NULL_TREE);
Per Bothner committed
2638

2639 2640 2641
	  expand_start_catch (type);
	  expand_expr_stmt (TREE_OPERAND (current, 0));
	  expand_end_catch ();
Per Bothner committed
2642 2643
	}
      expand_end_all_catch ();
2644
      return const0_rtx;
Per Bothner committed
2645

2646 2647 2648 2649
    case JAVA_EXC_OBJ_EXPR:
      return expand_expr (build_exception_object_ref (TREE_TYPE (exp)),
			  target, tmode, modifier);

2650 2651 2652 2653 2654
    case LABEL_EXPR:
      /* Used only by expanded inline functions.  */
      expand_label (TREE_OPERAND (exp, 0));
      return const0_rtx;

Anthony Green committed
2655
    default:
2656
      internal_error ("can't expand %s", tree_code_name [TREE_CODE (exp)]);
Anthony Green committed
2657 2658 2659
    }
}

2660 2661 2662
/* Go over METHOD's bytecode and note instruction starts in
   instruction_bits[].  */

Anthony Green committed
2663
void
2664
note_instructions (jcf, method)
Anthony Green committed
2665 2666 2667
     JCF *jcf;
     tree method;
{
2668 2669 2670 2671
  int PC; 
  unsigned char* byte_ops;
  long length = DECL_CODE_LENGTH (method);

Anthony Green committed
2672
  int saw_index;
2673
  jint INT_temp;
Anthony Green committed
2674 2675 2676 2677 2678 2679 2680 2681 2682 2683 2684 2685 2686 2687 2688 2689 2690 2691 2692

#undef RET /* Defined by config/i386/i386.h */
#undef AND /* Causes problems with opcodes for iand and land. */
#undef PTR
#define BCODE byte_ops
#define BYTE_type_node byte_type_node
#define SHORT_type_node short_type_node
#define INT_type_node int_type_node
#define LONG_type_node long_type_node
#define CHAR_type_node char_type_node
#define PTR_type_node ptr_type_node
#define FLOAT_type_node float_type_node
#define DOUBLE_type_node double_type_node
#define VOID_type_node void_type_node
#define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
#define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
#define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
#define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)

2693
#define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
Anthony Green committed
2694

2695 2696
  JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
  byte_ops = jcf->read_ptr;
Mark Mitchell committed
2697
  instruction_bits = xrealloc (instruction_bits, length + 1);
2698
  memset (instruction_bits, 0, length + 1);
Anthony Green committed
2699

2700
  /* This pass figures out which PC can be the targets of jumps. */
Anthony Green committed
2701 2702 2703 2704 2705 2706 2707 2708 2709 2710 2711 2712 2713 2714 2715 2716 2717 2718 2719 2720 2721 2722 2723 2724 2725 2726 2727 2728 2729 2730 2731 2732 2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768
  for (PC = 0; PC < length;)
    {
      int oldpc = PC; /* PC at instruction start. */
      instruction_bits [PC] |=  BCODE_INSTRUCTION_START;
      switch (byte_ops[PC++])
	{
#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
        case OPCODE: \
	  PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
	  break;

#define NOTE_LABEL(PC) note_label(oldpc, PC)

#define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
#define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
#define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
#define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
#define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
#define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
#define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
#define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */

#define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
  PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
#define PRE_SPECIAL_IINC(OPERAND_TYPE) \
  ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
#define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
#define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
#define PRE_SPECIAL_THROW(IGNORE) /* nothing */
#define PRE_SPECIAL_BREAK(IGNORE) /* nothing */

/* two forms of wide instructions */
#define PRE_SPECIAL_WIDE(IGNORE) \
  { \
    int modified_opcode = IMMEDIATE_u1; \
    if (modified_opcode == OPCODE_iinc)	\
      { \
	(void) IMMEDIATE_u2;	/* indexbyte1 and indexbyte2 */ \
	(void) IMMEDIATE_s2;	/* constbyte1 and constbyte2 */ \
      } \
    else \
      { \
	(void) IMMEDIATE_u2;	/* indexbyte1 and indexbyte2 */ \
      } \
  }

#define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */

#define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */

#define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
#define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
	  PRE_ARRAY_##SUBOP(OPERAND_TYPE)
#define PRE_ARRAY_LOAD(TYPE) /* nothing */
#define PRE_ARRAY_STORE(TYPE) /* nothing */
#define PRE_ARRAY_LENGTH(TYPE) /* nothing */
#define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
#define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
#define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
#define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)

#define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
#define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
#define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
  saw_index = 0;  INT_temp = (OPERAND_VALUE); \
  if (!saw_index)  NOTE_LABEL(oldpc + INT_temp);
#define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
  saw_index = 0;  INT_temp = (OPERAND_VALUE); \
2769
  NOTE_LABEL (PC); \
Anthony Green committed
2770 2771 2772 2773 2774 2775 2776 2777 2778 2779 2780 2781
  if (!saw_index)  NOTE_LABEL(oldpc + INT_temp);

#define PRE_RET(OPERAND_TYPE, OPERAND_VALUE)  (void)(OPERAND_VALUE)

#define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
  PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH

#define PRE_LOOKUP_SWITCH						\
  { jint default_offset = IMMEDIATE_s4;  jint npairs = IMMEDIATE_s4;	\
    NOTE_LABEL (default_offset+oldpc);					\
    if (npairs >= 0)							\
      while (--npairs >= 0) {						\
2782 2783
       jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4;			\
       jint offset = IMMEDIATE_s4;					\
Anthony Green committed
2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806
       NOTE_LABEL (offset+oldpc); }					\
  }

#define PRE_TABLE_SWITCH				\
  { jint default_offset = IMMEDIATE_s4;			\
    jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4;	\
    NOTE_LABEL (default_offset+oldpc);			\
    if (low <= high)					\
     while (low++ <= high) {				\
       jint offset = IMMEDIATE_s4;			\
       NOTE_LABEL (offset+oldpc); }			\
  }

#define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
#define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
#define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
  (void)(IMMEDIATE_u2); \
  PC += 2 * IS_INTERFACE /* for invokeinterface */;

#include "javaop.def"
#undef JAVAOP
	}
    } /* for */
2807 2808 2809 2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840
}

void
expand_byte_code (jcf, method)
     JCF *jcf;
     tree method;
{
  int PC;
  int i;
  const unsigned char *linenumber_pointer;
  int dead_code_index = -1;
  unsigned char* byte_ops;
  long length = DECL_CODE_LENGTH (method);

  stack_pointer = 0;
  JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
  byte_ops = jcf->read_ptr;

  /* We make an initial pass of the line number table, to note
     which instructions have associated line number entries. */
  linenumber_pointer = linenumber_table;
  for (i = 0; i < linenumber_count; i++)
    {
      int pc = GET_u2 (linenumber_pointer);
      linenumber_pointer += 4;
      if (pc >= length)
	warning ("invalid PC in line number table");
      else
	{
	  if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
	    instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
	  instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
	}
    }  
Anthony Green committed
2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858 2859 2860

  if (! verify_jvm_instructions (jcf, byte_ops, length))
    return;

  /* Translate bytecodes to rtl instructions. */
  linenumber_pointer = linenumber_table;
  for (PC = 0; PC < length;)
    {
      if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
	{
	  tree label = lookup_label (PC);
          flush_quick_stack ();
	  if ((instruction_bits [PC] & BCODE_TARGET) != 0)
	    expand_label (label);
	  if (LABEL_VERIFIED (label) || PC == 0)
	    load_type_state (label);
	}

      if (! (instruction_bits [PC] & BCODE_VERIFIED))
	{
Anthony Green committed
2861 2862 2863 2864 2865 2866 2867 2868 2869 2870 2871 2872 2873 2874 2875 2876 2877
	  if (dead_code_index == -1)
	    {
	      /* This is the start of a region of unreachable bytecodes.
                 They still need to be processed in order for EH ranges
                 to get handled correctly.  However, we can simply
                 replace these bytecodes with nops.  */
	      dead_code_index = PC;
            }
          
          /* Turn this bytecode into a nop.  */
          byte_ops[PC] = 0x0;
        }
       else
        {
	  if (dead_code_index != -1)
	    {
              /* We've just reached the end of a region of dead code.  */
2878
              warning ("unreachable bytecode from %d to before %d",
Anthony Green committed
2879 2880 2881
                       dead_code_index, PC);
              dead_code_index = -1;
            }
Anthony Green committed
2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910
	}

      /* Handle possible line number entry for this PC.

	 This code handles out-of-order and multiple linenumbers per PC,
	 but is optimized for the case of line numbers increasing
	 monotonically with PC. */
      if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
	{
	  if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
	      || GET_u2 (linenumber_pointer) != PC)
	    linenumber_pointer = linenumber_table;
	  while (linenumber_pointer < linenumber_table + linenumber_count * 4)
	    {
	      int pc = GET_u2 (linenumber_pointer);
	      linenumber_pointer += 4;
	      if (pc == PC)
		{
		  lineno = GET_u2 (linenumber_pointer - 2);
		  emit_line_note (input_filename, lineno);
		  if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
		    break;
		}
	    }
	}
      maybe_pushlevels (PC);
      PC = process_jvm_instruction (PC, byte_ops, length);
      maybe_poplevels (PC);
    } /* for */
Anthony Green committed
2911 2912 2913 2914
  
  if (dead_code_index != -1)
    {
      /* We've just reached the end of a region of dead code.  */
2915
      warning ("unreachable bytecode from %d to the end of the method", 
Anthony Green committed
2916 2917
              dead_code_index);
    }
Anthony Green committed
2918 2919
}

2920
static void
Anthony Green committed
2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941
java_push_constant_from_pool (jcf, index)
     JCF *jcf;
     int index;
{
  tree c;
  if (JPOOL_TAG (jcf, index) == CONSTANT_String)
    {
      tree name;
      name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
      index = alloc_name_constant (CONSTANT_String, name);
      c = build_ref_from_constant_pool (index);
      TREE_TYPE (c) = promote_type (string_type_node);
    }
  else
    c = get_constant (jcf, index);
  push_value (c);
} 

int
process_jvm_instruction (PC, byte_ops, length)
     int PC;
2942
     const unsigned char* byte_ops;
Kaveh R. Ghazi committed
2943
     long length ATTRIBUTE_UNUSED;
Anthony Green committed
2944
{ 
Kaveh R. Ghazi committed
2945
  const char *opname; /* Temporary ??? */
Anthony Green committed
2946
  int oldpc = PC; /* PC at instruction start. */
Per Bothner committed
2947 2948 2949 2950 2951

  /* If the instruction is at the beginning of a exception handler,
     replace the top of the stack with the thrown object reference */
  if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
    {
2952
      tree type = pop_type (ptr_type_node);
2953
      push_value (build (JAVA_EXC_OBJ_EXPR, type));
Per Bothner committed
2954 2955
    }

Anthony Green committed
2956 2957 2958 2959 2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970
  switch (byte_ops[PC++])
    {
#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
    case OPCODE: \
      opname = #OPNAME; \
      OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
      break;

#define RET(OPERAND_TYPE, OPERAND_VALUE) 				\
  {									\
    int saw_index = 0;							\
    int index     = OPERAND_VALUE;					\
    build_java_ret (find_local_variable (index, ptr_type_node, oldpc));	\
  }

2971
#define JSR(OPERAND_TYPE, OPERAND_VALUE) \
2972 2973 2974 2975 2976
  {						    \
    /* OPERAND_VALUE may have side-effects on PC */ \
    int opvalue = OPERAND_VALUE;		    \
    build_java_jsr (oldpc + opvalue, PC);	    \
  }
Anthony Green committed
2977 2978 2979 2980 2981 2982 2983 2984 2985

/* Push a constant onto the stack. */
#define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
  { int saw_index = 0;  int ival = (OPERAND_VALUE); \
    if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
    else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }

/* internal macro added for use by the WIDE case */
#define LOAD_INTERNAL(OPTYPE, OPVALUE) \
2986
  expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
Anthony Green committed
2987 2988 2989 2990 2991 2992 2993 2994 2995 2996 2997 2998 2999 3000 3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021 3022 3023 3024 3025 3026 3027 3028 3029 3030 3031 3032 3033 3034 3035 3036 3037 3038 3039 3040 3041 3042 3043 3044 3045 3046 3047 3048 3049 3050 3051 3052 3053 3054 3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067 3068 3069 3070 3071 3072 3073 3074 3075 3076 3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160 3161 3162 3163 3164 3165 3166 3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199 3200 3201 3202 3203 3204 3205 3206 3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223 3224 3225 3226 3227 3228 3229 3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249

/* Push local variable onto the opcode stack. */
#define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
  { \
    /* have to do this since OPERAND_VALUE may have side-effects */ \
    int opvalue = OPERAND_VALUE; \
    LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
  }

#define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
  expand_java_return (OPERAND_TYPE##_type_node)

#define REM_EXPR TRUNC_MOD_EXPR
#define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
  expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)

#define FIELD(IS_STATIC, IS_PUT) \
  expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)

#define TEST(OPERAND_TYPE, CONDITION) \
  expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)

#define COND(OPERAND_TYPE, CONDITION) \
  expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)

#define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
  BRANCH_##OPERAND_TYPE (OPERAND_VALUE)

#define BRANCH_GOTO(OPERAND_VALUE) \
  expand_java_goto (oldpc + OPERAND_VALUE)

#define BRANCH_CALL(OPERAND_VALUE) \
  expand_java_call (oldpc + OPERAND_VALUE, oldpc)

#if 0
#define BRANCH_RETURN(OPERAND_VALUE) \
  { \
    tree type = OPERAND_TYPE##_type_node; \
    tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
    expand_java_ret (value); \
  }
#endif

#define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
	  fprintf (stderr, "%3d: %s ", oldpc, opname); \
	  fprintf (stderr, "(not implemented)\n")
#define NOT_IMPL1(OPERAND_VALUE) \
	  fprintf (stderr, "%3d: %s ", oldpc, opname); \
	  fprintf (stderr, "(not implemented)\n")

#define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)

#define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)

#define STACK_POP(COUNT) java_stack_pop (COUNT)

#define STACK_SWAP(COUNT) java_stack_swap()

#define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
#define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
#define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)

#define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
  PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH

#define LOOKUP_SWITCH \
  { jint default_offset = IMMEDIATE_s4;  jint npairs = IMMEDIATE_s4; \
    tree selector = pop_value (INT_type_node); \
    tree duplicate, label; \
    tree type = TREE_TYPE (selector); \
    flush_quick_stack (); \
    expand_start_case (0, selector, type, "switch statement");\
    while (--npairs >= 0) \
      { \
	jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
	tree value = build_int_2 (match, match < 0 ? -1 : 0); \
	TREE_TYPE (value) = type; \
	label =  build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
	pushcase (value, convert, label, &duplicate); \
	expand_java_goto (oldpc + offset); \
      } \
    label =  build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
    pushcase (NULL_TREE, 0, label, &duplicate); \
    expand_java_goto (oldpc + default_offset); \
    expand_end_case (selector); \
  }

#define TABLE_SWITCH \
  { jint default_offset = IMMEDIATE_s4; \
    jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
    tree selector = pop_value (INT_type_node); \
    tree duplicate, label; \
    tree type = TREE_TYPE (selector); \
    flush_quick_stack (); \
    expand_start_case (0, selector, type, "switch statement");\
    for (; low <= high; low++) \
      { \
        jint offset = IMMEDIATE_s4; \
        tree value = build_int_2 (low, low < 0 ? -1 : 0); \
        TREE_TYPE (value) = type; \
        label =  build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
        pushcase (value, convert, label, &duplicate); \
        expand_java_goto (oldpc + offset); \
      } \
    label =  build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
    pushcase (NULL_TREE, 0, label, &duplicate); \
    expand_java_goto (oldpc + default_offset); \
    expand_end_case (selector); \
  }

#define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
  { int opcode = byte_ops[PC-1]; \
    int method_ref_index = IMMEDIATE_u2; \
    int nargs; \
    if (IS_INTERFACE) { nargs = IMMEDIATE_u1;  (void) IMMEDIATE_u1; } \
    else nargs = -1; \
    expand_invoke (opcode, method_ref_index, nargs); \
  }

/* Handle new, checkcast, instanceof */
#define OBJECT(TYPE, OP) \
  expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))

#define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)

#define ARRAY_LOAD(OPERAND_TYPE) 			\
  {							\
    expand_java_arrayload( OPERAND_TYPE##_type_node );	\
  }

#define ARRAY_STORE(OPERAND_TYPE)			\
  {							\
    expand_java_arraystore( OPERAND_TYPE##_type_node );	\
  }

#define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
#define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
#define ARRAY_NEW_PTR()							\
    push_value (build_anewarray (get_class_constant (current_jcf,	\
						     IMMEDIATE_u2),	\
				 pop_value (int_type_node)));
#define ARRAY_NEW_NUM()				\
  {						\
    int atype = IMMEDIATE_u1;			\
    push_value (build_newarray (atype, pop_value (int_type_node)));\
  }
#define ARRAY_NEW_MULTI()					\
  {								\
    tree class = get_class_constant (current_jcf, IMMEDIATE_u2 );	\
    int  ndims = IMMEDIATE_u1;					\
    expand_java_multianewarray( class, ndims );			\
  }

#define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
  push_value (fold (build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
			    pop_value (OPERAND_TYPE##_type_node))));

#define CONVERT2(FROM_TYPE, TO_TYPE)					 \
  {									 \
    push_value (build1 (NOP_EXPR, int_type_node,			 \
			(convert (TO_TYPE##_type_node,			 \
				  pop_value (FROM_TYPE##_type_node))))); \
  }

#define CONVERT(FROM_TYPE, TO_TYPE)				\
  {								\
    push_value (convert (TO_TYPE##_type_node,	                \
			 pop_value (FROM_TYPE##_type_node)));	\
  }

/* internal macro added for use by the WIDE case 
   Added TREE_TYPE (decl) assignment, apbianco  */
#define STORE_INTERNAL(OPTYPE, OPVALUE)			\
  {							\
    tree decl, value;					\
    int var = OPVALUE;					\
    tree type = OPTYPE;					\
    value = pop_value (type);				\
    type = TREE_TYPE (value);				\
    decl = find_local_variable (var, type, oldpc);	\
    set_local_type (var, type );			\
    expand_assignment (decl, value, 0, 0);		\
  }

#define STORE(OPERAND_TYPE, OPERAND_VALUE) \
  { \
    /* have to do this since OPERAND_VALUE may have side-effects */ \
    int opvalue = OPERAND_VALUE; \
    STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
  }

#define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
  SPECIAL_##INSTRUCTION(OPERAND_TYPE)

#define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
#define SPECIAL_EXIT(IGNORED)  MONITOR_OPERATION (soft_monitorexit_node)

#define MONITOR_OPERATION(call)			\
  {						\
    tree o = pop_value (ptr_type_node);		\
    tree c;					\
    flush_quick_stack ();			\
    c = build_java_monitor (call, o);		\
    TREE_SIDE_EFFECTS (c) = 1;			\
    expand_expr_stmt (c);			\
  }

#define SPECIAL_IINC(IGNORED) \
  { \
    unsigned int local_var_index = IMMEDIATE_u1; \
    int ival = IMMEDIATE_s1; \
    expand_iinc(local_var_index, ival, oldpc); \
  }

#define SPECIAL_WIDE(IGNORED) \
  { \
    int modified_opcode = IMMEDIATE_u1; \
    unsigned int local_var_index = IMMEDIATE_u2; \
    switch (modified_opcode) \
      { \
      case OPCODE_iinc: \
	{ \
	  int ival = IMMEDIATE_s2; \
	  expand_iinc (local_var_index, ival, oldpc); \
	  break; \
	} \
      case OPCODE_iload: \
      case OPCODE_lload: \
      case OPCODE_fload: \
      case OPCODE_dload: \
      case OPCODE_aload: \
	{ \
	  /* duplicate code from LOAD macro */ \
	  LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
	  break; \
	} \
      case OPCODE_istore: \
      case OPCODE_lstore: \
      case OPCODE_fstore: \
      case OPCODE_dstore: \
      case OPCODE_astore: \
	{ \
	  STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
	  break; \
	} \
      default: \
        error ("unrecogized wide sub-instruction"); \
      } \
  }

#define SPECIAL_THROW(IGNORED) \
  build_java_athrow (pop_value (throwable_type_node))

#define SPECIAL_BREAK NOT_IMPL1
#define IMPL          NOT_IMPL

#include "javaop.def"
#undef JAVAOP
   default:
    fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
  }
  return PC;
}
3250

3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269 3270 3271 3272 3273 3274 3275 3276 3277 3278 3279 3280 3281 3282 3283 3284 3285 3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302 3303 3304 3305 3306 3307 3308 3309 3310 3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377 3378
/* Return the opcode at PC in the code section pointed to by
   CODE_OFFSET.  */

static unsigned char
peek_opcode_at_pc (jcf, code_offset, pc)
    JCF *jcf;
    int code_offset, pc;
{
  unsigned char opcode;
  long absolute_offset = (long)JCF_TELL (jcf);

  JCF_SEEK (jcf, code_offset);
  opcode = jcf->read_ptr [pc];
  JCF_SEEK (jcf, absolute_offset);
  return opcode;
}

/* Some bytecode compilers are emitting accurate LocalVariableTable
   attributes. Here's an example:
   
     PC   <t>store_<n>
     PC+1 ...
     
     Attribute "LocalVariableTable"
     slot #<n>: ... (PC: PC+1 length: L)
   
   This is accurate because the local in slot <n> really exists after
   the opcode at PC is executed, hence from PC+1 to PC+1+L.

   This procedure recognizes this situation and extends the live range
   of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
   length of the store instruction.)

   This function is used by `give_name_to_locals' so that a local's
   DECL features a DECL_LOCAL_START_PC such that the first related
   store operation will use DECL as a destination, not a unrelated
   temporary created for the occasion.

   This function uses a global (instruction_bits) `note_instructions' should
   have allocated and filled properly.  */

int
maybe_adjust_start_pc (jcf, code_offset, start_pc, slot)
     struct JCF *jcf;
     int code_offset, start_pc, slot;
{
  int first, index, opcode;
  int pc, insn_pc;
  int wide_found = 0;

  if (!start_pc)
    return start_pc;

  first = index = -1;

  /* Find last previous instruction and remember it */
  for (pc = start_pc-1; pc; pc--) 
    if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
      break;
  insn_pc = pc;

  /* Retrieve the instruction, handle `wide'. */  
  opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
  if (opcode == OPCODE_wide)
    {
      wide_found = 1;
      opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
    }

  switch (opcode)
    {
    case OPCODE_astore_0:
    case OPCODE_astore_1:
    case OPCODE_astore_2:
    case OPCODE_astore_3:
      first = OPCODE_astore_0;
      break;

    case OPCODE_istore_0:
    case OPCODE_istore_1:
    case OPCODE_istore_2:
    case OPCODE_istore_3:
      first = OPCODE_istore_0;
      break;
      
    case OPCODE_lstore_0:
    case OPCODE_lstore_1:
    case OPCODE_lstore_2:
    case OPCODE_lstore_3:
      first = OPCODE_lstore_0;
      break;

    case OPCODE_fstore_0:
    case OPCODE_fstore_1:
    case OPCODE_fstore_2:
    case OPCODE_fstore_3:
      first = OPCODE_fstore_0;
      break;

    case OPCODE_dstore_0:
    case OPCODE_dstore_1:
    case OPCODE_dstore_2:
    case OPCODE_dstore_3:
      first = OPCODE_dstore_0;
      break;

    case OPCODE_astore:
    case OPCODE_istore:
    case OPCODE_lstore:
    case OPCODE_fstore:
    case OPCODE_dstore:
      index = peek_opcode_at_pc (jcf, code_offset, pc);
      if (wide_found)
	{
	  int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
	  index = (other << 8) + index;
	}
      break;
    }

  /* Now we decide: first >0 means we have a <t>store_<n>, index >0
     means we have a <t>store. */
  if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
    start_pc = insn_pc;

  return start_pc;
}

3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390
/* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
   order, as specified by Java Language Specification.

   The problem is that while expand_expr will evaluate its sub-operands in
   left-to-right order, for variables it will just return an rtx (i.e.
   an lvalue) for the variable (rather than an rvalue).  So it is possible
   that a later sub-operand will change the register, and when the
   actual operation is done, it will use the new value, when it should
   have used the original value.

   We fix this by using save_expr.  This forces the sub-operand to be
   copied into a fresh virtual register,
3391 3392 3393 3394

   For method invocation, we modify the arguments so that a
   left-to-right order evaluation is performed. Saved expressions
   will, in CALL_EXPR order, be reused when the call will be expanded.
3395 3396 3397 3398 3399 3400 3401 3402
*/

tree
force_evaluation_order (node)
     tree  node;
{
  if (flag_syntax_only)
    return node;
3403
  if (TREE_CODE_CLASS (TREE_CODE (node)) == '2')
3404 3405 3406 3407
    {
      if (TREE_SIDE_EFFECTS (TREE_OPERAND (node, 1)))
	TREE_OPERAND (node, 0) = save_expr (TREE_OPERAND (node, 0));
    }
3408 3409 3410 3411 3412
  else if (TREE_CODE (node) == CALL_EXPR
           || TREE_CODE (node) == NEW_CLASS_EXPR
           || (TREE_CODE (node) == COMPOUND_EXPR
               && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
               && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR)) 
3413
    {
3414 3415 3416 3417 3418
      tree arg, cmp;

      if (!TREE_OPERAND (node, 1))
	return node;

3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430
      arg = node;
      
      /* Position arg properly, account for wrapped around ctors. */
      if (TREE_CODE (node) == COMPOUND_EXPR)
        arg = TREE_OPERAND (node, 0);
      
      arg = TREE_OPERAND (arg, 1);
      
      /* Not having a list of argument here is an error. */ 
      if (TREE_CODE (arg) != TREE_LIST)
        abort ();

3431
      /* This reverses the evaluation order. This is a desired effect. */
3432
      for (cmp = NULL_TREE; arg; arg = TREE_CHAIN (arg))
3433
	{
3434
	  tree saved = save_expr (force_evaluation_order (TREE_VALUE (arg)));
3435 3436 3437
	  cmp = (cmp == NULL_TREE ? saved :
		 build (COMPOUND_EXPR, void_type_node, cmp, saved));
	  TREE_VALUE (arg) = saved;
3438
	}
3439 3440 3441 3442 3443
      
      if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
	TREE_SIDE_EFFECTS (cmp) = 1;

      if (cmp)
3444
	{
3445 3446 3447 3448
	  cmp = save_expr (build (COMPOUND_EXPR, TREE_TYPE (node), cmp, node));
	  CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node);
	  TREE_SIDE_EFFECTS (cmp) = 1;
	  node = cmp;
3449 3450 3451 3452
	}
    }
  return node;
}
3453 3454 3455 3456

/* Called for every element in DECL_FUNCTION_INIT_TEST_TABLE of a
   method in order to emit initialization code for each test flag.  */

3457 3458 3459 3460
static int
emit_init_test_initialization (entry, x)
     void * * entry;
     void * x ATTRIBUTE_UNUSED;
3461
{
3462 3463
  struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
  tree klass = build_class_ref (ite->key);
3464 3465 3466 3467 3468
  tree rhs;

  /* If the DECL_INITIAL of the test flag is set to true, it
     means that the class is already initialized the time it
     is in use. */
3469
  if (DECL_INITIAL (ite->value) == boolean_true_node)
3470 3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483
    rhs = boolean_true_node;
  /* Otherwise, we initialize the class init check variable by looking
     at the `state' field of the class to see if it is already
     initialized.  This makes things a bit faster if the class is
     already initialized, which should be the common case.  */
  else
    rhs = build (GE_EXPR, boolean_type_node,
		 build (COMPONENT_REF, byte_type_node,
			build1 (INDIRECT_REF, class_type_node, klass),
			lookup_field (&class_type_node,
				      get_identifier ("state"))),
		 build_int_2 (JV_STATE_DONE, 0));

  expand_expr_stmt (build (MODIFY_EXPR, boolean_type_node, 
3484
			   ite->value, rhs));
3485 3486
  return true;
}
3487 3488 3489

#include "gt-java-expr.h"