1/* Process expressions for the GNU compiler for the Java(TM) language. 2 Copyright (C) 1996-2015 Free Software Foundation, Inc. 3 4This file is part of GCC. 5 6GCC is free software; you can redistribute it and/or modify 7it under the terms of the GNU General Public License as published by 8the Free Software Foundation; either version 3, or (at your option) 9any later version. 10 11GCC is distributed in the hope that it will be useful, 12but WITHOUT ANY WARRANTY; without even the implied warranty of 13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14GNU General Public License for more details. 15 16You should have received a copy of the GNU General Public License 17along with GCC; see the file COPYING3. If not see 18<http://www.gnu.org/licenses/>. 19 20Java and all Java-based marks are trademarks or registered trademarks 21of Sun Microsystems, Inc. in the United States and other countries. 22The Free Software Foundation is independent of Sun Microsystems, Inc. */ 23 24/* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */ 25 26#include "config.h" 27#include "system.h" 28#include "coretypes.h" 29#include "tm.h" /* For INT_TYPE_SIZE, 30 TARGET_VTABLE_USES_DESCRIPTORS, 31 BITS_PER_UNIT, 32 MODIFY_JNI_METHOD_CALL and 33 PARM_BOUNDARY. */ 34 35#include "hash-set.h" 36#include "machmode.h" 37#include "vec.h" 38#include "double-int.h" 39#include "input.h" 40#include "alias.h" 41#include "symtab.h" 42#include "wide-int.h" 43#include "inchash.h" 44#include "real.h" 45#include "tree.h" 46#include "fold-const.h" 47#include "stringpool.h" 48#include "stor-layout.h" 49#include "flags.h" 50#include "java-tree.h" 51#include "javaop.h" 52#include "java-opcodes.h" 53#include "jcf.h" 54#include "java-except.h" 55#include "parse.h" 56#include "diagnostic-core.h" 57#include "ggc.h" 58#include "tree-iterator.h" 59#include "target.h" 60 61static void flush_quick_stack (void); 62static void push_value (tree); 63static tree pop_value (tree); 64static void java_stack_swap (void); 65static void java_stack_dup (int, int); 66static void build_java_athrow (tree); 67static void build_java_jsr (int, int); 68static void build_java_ret (tree); 69static void expand_java_multianewarray (tree, int); 70static void expand_java_arraystore (tree); 71static void expand_java_arrayload (tree); 72static void expand_java_array_length (void); 73static tree build_java_monitor (tree, tree); 74static void expand_java_pushc (int, tree); 75static void expand_java_return (tree); 76static void expand_load_internal (int, tree, int); 77static void expand_java_NEW (tree); 78static void expand_java_INSTANCEOF (tree); 79static void expand_java_CHECKCAST (tree); 80static void expand_iinc (unsigned int, int, int); 81static void expand_java_binop (tree, enum tree_code); 82static void note_label (int, int); 83static void expand_compare (enum tree_code, tree, tree, int); 84static void expand_test (enum tree_code, tree, int); 85static void expand_cond (enum tree_code, tree, int); 86static void expand_java_goto (int); 87static tree expand_java_switch (tree, int); 88static void expand_java_add_case (tree, int, int); 89static vec<tree, va_gc> *pop_arguments (tree); 90static void expand_invoke (int, int, int); 91static void expand_java_field_op (int, int, int); 92static void java_push_constant_from_pool (struct JCF *, int); 93static void java_stack_pop (int); 94static tree build_java_throw_out_of_bounds_exception (tree); 95static tree build_java_check_indexed_type (tree, tree); 96static unsigned char peek_opcode_at_pc (struct JCF *, int, int); 97static void promote_arguments (void); 98static void cache_cpool_data_ref (void); 99 100static GTY(()) tree operand_type[59]; 101 102static GTY(()) tree methods_ident; 103static GTY(()) tree ncode_ident; 104tree dtable_ident = NULL_TREE; 105 106/* Set to nonzero value in order to emit class initialization code 107 before static field references. */ 108int always_initialize_class_p = 0; 109 110/* We store the stack state in two places: 111 Within a basic block, we use the quick_stack, which is a vec of expression 112 nodes. 113 This is the top part of the stack; below that we use find_stack_slot. 114 At the end of a basic block, the quick_stack must be flushed 115 to the stack slot array (as handled by find_stack_slot). 116 Using quick_stack generates better code (especially when 117 compiled without optimization), because we do not have to 118 explicitly store and load trees to temporary variables. 119 120 If a variable is on the quick stack, it means the value of variable 121 when the quick stack was last flushed. Conceptually, flush_quick_stack 122 saves all the quick_stack elements in parallel. However, that is 123 complicated, so it actually saves them (i.e. copies each stack value 124 to is home virtual register) from low indexes. This allows a quick_stack 125 element at index i (counting from the bottom of stack the) to references 126 slot virtuals for register that are >= i, but not those that are deeper. 127 This convention makes most operations easier. For example iadd works 128 even when the stack contains (reg[0], reg[1]): It results in the 129 stack containing (reg[0]+reg[1]), which is OK. However, some stack 130 operations are more complicated. For example dup given a stack 131 containing (reg[0]) would yield (reg[0], reg[0]), which would violate 132 the convention, since stack value 1 would refer to a register with 133 lower index (reg[0]), which flush_quick_stack does not safely handle. 134 So dup cannot just add an extra element to the quick_stack, but iadd can. 135*/ 136 137static GTY(()) vec<tree, va_gc> *quick_stack; 138 139/* The physical memory page size used in this computer. See 140 build_field_ref(). */ 141static GTY(()) tree page_size; 142 143/* The stack pointer of the Java virtual machine. 144 This does include the size of the quick_stack. */ 145 146int stack_pointer; 147 148const unsigned char *linenumber_table; 149int linenumber_count; 150 151/* Largest pc so far in this method that has been passed to lookup_label. */ 152int highest_label_pc_this_method = -1; 153 154/* Base value for this method to add to pc to get generated label. */ 155int start_label_pc_this_method = 0; 156 157void 158init_expr_processing (void) 159{ 160 operand_type[21] = operand_type[54] = int_type_node; 161 operand_type[22] = operand_type[55] = long_type_node; 162 operand_type[23] = operand_type[56] = float_type_node; 163 operand_type[24] = operand_type[57] = double_type_node; 164 operand_type[25] = operand_type[58] = ptr_type_node; 165} 166 167tree 168java_truthvalue_conversion (tree expr) 169{ 170 /* It is simpler and generates better code to have only TRUTH_*_EXPR 171 or comparison expressions as truth values at this level. 172 173 This function should normally be identity for Java. */ 174 175 switch (TREE_CODE (expr)) 176 { 177 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR: 178 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR: 179 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR: 180 case ORDERED_EXPR: case UNORDERED_EXPR: 181 case TRUTH_ANDIF_EXPR: 182 case TRUTH_ORIF_EXPR: 183 case TRUTH_AND_EXPR: 184 case TRUTH_OR_EXPR: 185 case TRUTH_XOR_EXPR: 186 case TRUTH_NOT_EXPR: 187 case ERROR_MARK: 188 return expr; 189 190 case INTEGER_CST: 191 return integer_zerop (expr) ? boolean_false_node : boolean_true_node; 192 193 case REAL_CST: 194 return real_zerop (expr) ? boolean_false_node : boolean_true_node; 195 196 /* are these legal? XXX JH */ 197 case NEGATE_EXPR: 198 case ABS_EXPR: 199 case FLOAT_EXPR: 200 /* These don't change whether an object is nonzero or zero. */ 201 return java_truthvalue_conversion (TREE_OPERAND (expr, 0)); 202 203 case COND_EXPR: 204 /* Distribute the conversion into the arms of a COND_EXPR. */ 205 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0), 206 java_truthvalue_conversion (TREE_OPERAND (expr, 1)), 207 java_truthvalue_conversion (TREE_OPERAND (expr, 2))); 208 209 case NOP_EXPR: 210 /* If this is widening the argument, we can ignore it. */ 211 if (TYPE_PRECISION (TREE_TYPE (expr)) 212 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0)))) 213 return java_truthvalue_conversion (TREE_OPERAND (expr, 0)); 214 /* fall through to default */ 215 216 default: 217 return fold_build2 (NE_EXPR, boolean_type_node, 218 expr, boolean_false_node); 219 } 220} 221 222/* Save any stack slots that happen to be in the quick_stack into their 223 home virtual register slots. 224 225 The copy order is from low stack index to high, to support the invariant 226 that the expression for a slot may contain decls for stack slots with 227 higher (or the same) index, but not lower. */ 228 229static void 230flush_quick_stack (void) 231{ 232 int stack_index = stack_pointer; 233 unsigned ix; 234 tree t; 235 236 /* Count the number of slots the quick stack is holding. */ 237 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++) 238 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t)); 239 240 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++) 241 { 242 tree decl, type = TREE_TYPE (t); 243 244 decl = find_stack_slot (stack_index, type); 245 if (decl != t) 246 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t)); 247 stack_index += 1 + TYPE_IS_WIDE (type); 248 } 249 250 vec_safe_truncate (quick_stack, 0); 251} 252 253/* Push TYPE on the type stack. 254 Return true on success, 0 on overflow. */ 255 256int 257push_type_0 (tree type) 258{ 259 int n_words; 260 type = promote_type (type); 261 n_words = 1 + TYPE_IS_WIDE (type); 262 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl)) 263 return 0; 264 /* Allocate decl for this variable now, so we get a temporary that 265 survives the whole method. */ 266 find_stack_slot (stack_pointer, type); 267 stack_type_map[stack_pointer++] = type; 268 n_words--; 269 while (--n_words >= 0) 270 stack_type_map[stack_pointer++] = TYPE_SECOND; 271 return 1; 272} 273 274void 275push_type (tree type) 276{ 277 int r = push_type_0 (type); 278 gcc_assert (r); 279} 280 281static void 282push_value (tree value) 283{ 284 tree type = TREE_TYPE (value); 285 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type)) 286 { 287 type = promote_type (type); 288 value = convert (type, value); 289 } 290 push_type (type); 291 vec_safe_push (quick_stack, value); 292 293 /* If the value has a side effect, then we need to evaluate it 294 whether or not the result is used. If the value ends up on the 295 quick stack and is then popped, this won't happen -- so we flush 296 the quick stack. It is safest to simply always flush, though, 297 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for 298 the latter we may need to strip conversions. */ 299 flush_quick_stack (); 300} 301 302/* Pop a type from the type stack. 303 TYPE is the expected type. Return the actual type, which must be 304 convertible to TYPE. 305 On an error, *MESSAGEP is set to a freshly malloc'd error message. */ 306 307tree 308pop_type_0 (tree type, char **messagep) 309{ 310 int n_words; 311 tree t; 312 *messagep = NULL; 313 if (TREE_CODE (type) == RECORD_TYPE) 314 type = promote_type (type); 315 n_words = 1 + TYPE_IS_WIDE (type); 316 if (stack_pointer < n_words) 317 { 318 *messagep = xstrdup ("stack underflow"); 319 return type; 320 } 321 while (--n_words > 0) 322 { 323 if (stack_type_map[--stack_pointer] != void_type_node) 324 { 325 *messagep = xstrdup ("Invalid multi-word value on type stack"); 326 return type; 327 } 328 } 329 t = stack_type_map[--stack_pointer]; 330 if (type == NULL_TREE || t == type) 331 return t; 332 if (TREE_CODE (t) == TREE_LIST) 333 { 334 do 335 { 336 tree tt = TREE_PURPOSE (t); 337 if (! can_widen_reference_to (tt, type)) 338 { 339 t = tt; 340 goto fail; 341 } 342 t = TREE_CHAIN (t); 343 } 344 while (t); 345 return t; 346 } 347 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t) 348 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32) 349 return t; 350 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE) 351 { 352 /* If the expected type we've been passed is object or ptr 353 (i.e. void*), the caller needs to know the real type. */ 354 if (type == ptr_type_node || type == object_ptr_type_node) 355 return t; 356 357 /* Since the verifier has already run, we know that any 358 types we see will be compatible. In BC mode, this fact 359 may be checked at runtime, but if that is so then we can 360 assume its truth here as well. So, we always succeed 361 here, with the expected type. */ 362 return type; 363 } 364 365 if (! flag_verify_invocations && flag_indirect_dispatch 366 && t == object_ptr_type_node) 367 { 368 if (type != ptr_type_node) 369 warning (0, "need to insert runtime check for %s", 370 xstrdup (lang_printable_name (type, 0))); 371 return type; 372 } 373 374 /* lang_printable_name uses a static buffer, so we must save the result 375 from calling it the first time. */ 376 fail: 377 { 378 char *temp = xstrdup (lang_printable_name (type, 0)); 379 /* If the stack contains a multi-word type, keep popping the stack until 380 the real type is found. */ 381 while (t == void_type_node) 382 t = stack_type_map[--stack_pointer]; 383 *messagep = concat ("expected type '", temp, 384 "' but stack contains '", lang_printable_name (t, 0), 385 "'", NULL); 386 free (temp); 387 } 388 return type; 389} 390 391/* Pop a type from the type stack. 392 TYPE is the expected type. Return the actual type, which must be 393 convertible to TYPE, otherwise call error. */ 394 395tree 396pop_type (tree type) 397{ 398 char *message = NULL; 399 type = pop_type_0 (type, &message); 400 if (message != NULL) 401 { 402 error ("%s", message); 403 free (message); 404 } 405 return type; 406} 407 408 409/* Return true if two type assertions are equal. */ 410 411bool 412type_assertion_hasher::equal (type_assertion *k1, type_assertion *k2) 413{ 414 return (k1->assertion_code == k2->assertion_code 415 && k1->op1 == k2->op1 416 && k1->op2 == k2->op2); 417} 418 419/* Hash a type assertion. */ 420 421hashval_t 422type_assertion_hasher::hash (type_assertion *k_p) 423{ 424 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof 425 k_p->assertion_code, 0); 426 427 switch (k_p->assertion_code) 428 { 429 case JV_ASSERT_TYPES_COMPATIBLE: 430 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2), 431 hash); 432 /* Fall through. */ 433 434 case JV_ASSERT_IS_INSTANTIABLE: 435 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1), 436 hash); 437 /* Fall through. */ 438 439 case JV_ASSERT_END_OF_TABLE: 440 break; 441 442 default: 443 gcc_unreachable (); 444 } 445 446 return hash; 447} 448 449/* Add an entry to the type assertion table for the given class. 450 KLASS is the class for which this assertion will be evaluated by the 451 runtime during loading/initialization. 452 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h. 453 OP1 and OP2 are the operands. The tree type of these arguments may be 454 specific to each assertion_code. */ 455 456void 457add_type_assertion (tree klass, int assertion_code, tree op1, tree op2) 458{ 459 hash_table<type_assertion_hasher> *assertions_htab; 460 type_assertion as; 461 type_assertion **as_pp; 462 463 assertions_htab = TYPE_ASSERTIONS (klass); 464 if (assertions_htab == NULL) 465 { 466 assertions_htab = hash_table<type_assertion_hasher>::create_ggc (7); 467 TYPE_ASSERTIONS (current_class) = assertions_htab; 468 } 469 470 as.assertion_code = assertion_code; 471 as.op1 = op1; 472 as.op2 = op2; 473 474 as_pp = assertions_htab->find_slot (&as, INSERT); 475 476 /* Don't add the same assertion twice. */ 477 if (*as_pp) 478 return; 479 480 *as_pp = ggc_alloc<type_assertion> (); 481 **as_pp = as; 482} 483 484 485/* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE. 486 Handles array types and interfaces. */ 487 488int 489can_widen_reference_to (tree source_type, tree target_type) 490{ 491 if (source_type == ptr_type_node || target_type == object_ptr_type_node) 492 return 1; 493 494 /* Get rid of pointers */ 495 if (TREE_CODE (source_type) == POINTER_TYPE) 496 source_type = TREE_TYPE (source_type); 497 if (TREE_CODE (target_type) == POINTER_TYPE) 498 target_type = TREE_TYPE (target_type); 499 500 if (source_type == target_type) 501 return 1; 502 503 /* FIXME: This is very pessimistic, in that it checks everything, 504 even if we already know that the types are compatible. If we're 505 to support full Java class loader semantics, we need this. 506 However, we could do something more optimal. */ 507 if (! flag_verify_invocations) 508 { 509 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE, 510 source_type, target_type); 511 512 if (!quiet_flag) 513 warning (0, "assert: %s is assign compatible with %s", 514 xstrdup (lang_printable_name (target_type, 0)), 515 xstrdup (lang_printable_name (source_type, 0))); 516 /* Punt everything to runtime. */ 517 return 1; 518 } 519 520 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type)) 521 { 522 return 1; 523 } 524 else 525 { 526 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type)) 527 { 528 HOST_WIDE_INT source_length, target_length; 529 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type)) 530 { 531 /* An array implements Cloneable and Serializable. */ 532 tree name = DECL_NAME (TYPE_NAME (target_type)); 533 return (name == java_lang_cloneable_identifier_node 534 || name == java_io_serializable_identifier_node); 535 } 536 target_length = java_array_type_length (target_type); 537 if (target_length >= 0) 538 { 539 source_length = java_array_type_length (source_type); 540 if (source_length != target_length) 541 return 0; 542 } 543 source_type = TYPE_ARRAY_ELEMENT (source_type); 544 target_type = TYPE_ARRAY_ELEMENT (target_type); 545 if (source_type == target_type) 546 return 1; 547 if (TREE_CODE (source_type) != POINTER_TYPE 548 || TREE_CODE (target_type) != POINTER_TYPE) 549 return 0; 550 return can_widen_reference_to (source_type, target_type); 551 } 552 else 553 { 554 int source_depth = class_depth (source_type); 555 int target_depth = class_depth (target_type); 556 557 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type)) 558 { 559 if (! quiet_flag) 560 warning (0, "assert: %s is assign compatible with %s", 561 xstrdup (lang_printable_name (target_type, 0)), 562 xstrdup (lang_printable_name (source_type, 0))); 563 return 1; 564 } 565 566 /* class_depth can return a negative depth if an error occurred */ 567 if (source_depth < 0 || target_depth < 0) 568 return 0; 569 570 if (CLASS_INTERFACE (TYPE_NAME (target_type))) 571 { 572 /* target_type is OK if source_type or source_type ancestors 573 implement target_type. We handle multiple sub-interfaces */ 574 tree binfo, base_binfo; 575 int i; 576 577 for (binfo = TYPE_BINFO (source_type), i = 0; 578 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 579 if (can_widen_reference_to 580 (BINFO_TYPE (base_binfo), target_type)) 581 return 1; 582 583 if (!i) 584 return 0; 585 } 586 587 for ( ; source_depth > target_depth; source_depth--) 588 { 589 source_type 590 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0)); 591 } 592 return source_type == target_type; 593 } 594 } 595} 596 597static tree 598pop_value (tree type) 599{ 600 type = pop_type (type); 601 if (vec_safe_length (quick_stack) != 0) 602 return quick_stack->pop (); 603 else 604 return find_stack_slot (stack_pointer, promote_type (type)); 605} 606 607 608/* Pop and discard the top COUNT stack slots. */ 609 610static void 611java_stack_pop (int count) 612{ 613 while (count > 0) 614 { 615 tree type; 616 617 gcc_assert (stack_pointer != 0); 618 619 type = stack_type_map[stack_pointer - 1]; 620 if (type == TYPE_SECOND) 621 { 622 count--; 623 gcc_assert (stack_pointer != 1 && count > 0); 624 625 type = stack_type_map[stack_pointer - 2]; 626 } 627 pop_value (type); 628 count--; 629 } 630} 631 632/* Implement the 'swap' operator (to swap two top stack slots). */ 633 634static void 635java_stack_swap (void) 636{ 637 tree type1, type2; 638 tree temp; 639 tree decl1, decl2; 640 641 if (stack_pointer < 2 642 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND 643 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND 644 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2)) 645 /* Bad stack swap. */ 646 abort (); 647 /* Bad stack swap. */ 648 649 flush_quick_stack (); 650 decl1 = find_stack_slot (stack_pointer - 1, type1); 651 decl2 = find_stack_slot (stack_pointer - 2, type2); 652 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1); 653 java_add_local_var (temp); 654 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1)); 655 java_add_stmt (build2 (MODIFY_EXPR, type2, 656 find_stack_slot (stack_pointer - 1, type2), 657 decl2)); 658 java_add_stmt (build2 (MODIFY_EXPR, type1, 659 find_stack_slot (stack_pointer - 2, type1), 660 temp)); 661 stack_type_map[stack_pointer - 1] = type2; 662 stack_type_map[stack_pointer - 2] = type1; 663} 664 665static void 666java_stack_dup (int size, int offset) 667{ 668 int low_index = stack_pointer - size - offset; 669 int dst_index; 670 if (low_index < 0) 671 error ("stack underflow - dup* operation"); 672 673 flush_quick_stack (); 674 675 stack_pointer += size; 676 dst_index = stack_pointer; 677 678 for (dst_index = stack_pointer; --dst_index >= low_index; ) 679 { 680 tree type; 681 int src_index = dst_index - size; 682 if (src_index < low_index) 683 src_index = dst_index + size + offset; 684 type = stack_type_map [src_index]; 685 if (type == TYPE_SECOND) 686 { 687 /* Dup operation splits 64-bit number. */ 688 gcc_assert (src_index > low_index); 689 690 stack_type_map[dst_index] = type; 691 src_index--; dst_index--; 692 type = stack_type_map[src_index]; 693 gcc_assert (TYPE_IS_WIDE (type)); 694 } 695 else 696 gcc_assert (! TYPE_IS_WIDE (type)); 697 698 if (src_index != dst_index) 699 { 700 tree src_decl = find_stack_slot (src_index, type); 701 tree dst_decl = find_stack_slot (dst_index, type); 702 703 java_add_stmt 704 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl)); 705 stack_type_map[dst_index] = type; 706 } 707 } 708} 709 710/* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the 711 value stack. */ 712 713static void 714build_java_athrow (tree node) 715{ 716 tree call; 717 718 call = build_call_nary (void_type_node, 719 build_address_of (throw_node), 720 1, node); 721 TREE_SIDE_EFFECTS (call) = 1; 722 java_add_stmt (call); 723 java_stack_pop (stack_pointer); 724} 725 726/* Implementation for jsr/ret */ 727 728static void 729build_java_jsr (int target_pc, int return_pc) 730{ 731 tree where = lookup_label (target_pc); 732 tree ret = lookup_label (return_pc); 733 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret); 734 push_value (ret_label); 735 flush_quick_stack (); 736 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where)); 737 738 /* Do not need to emit the label here. We noted the existence of the 739 label as a jump target in note_instructions; we'll emit the label 740 for real at the beginning of the expand_byte_code loop. */ 741} 742 743static void 744build_java_ret (tree location) 745{ 746 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location)); 747} 748 749/* Implementation of operations on array: new, load, store, length */ 750 751tree 752decode_newarray_type (int atype) 753{ 754 switch (atype) 755 { 756 case 4: return boolean_type_node; 757 case 5: return char_type_node; 758 case 6: return float_type_node; 759 case 7: return double_type_node; 760 case 8: return byte_type_node; 761 case 9: return short_type_node; 762 case 10: return int_type_node; 763 case 11: return long_type_node; 764 default: return NULL_TREE; 765 } 766} 767 768/* Map primitive type to the code used by OPCODE_newarray. */ 769 770int 771encode_newarray_type (tree type) 772{ 773 if (type == boolean_type_node) 774 return 4; 775 else if (type == char_type_node) 776 return 5; 777 else if (type == float_type_node) 778 return 6; 779 else if (type == double_type_node) 780 return 7; 781 else if (type == byte_type_node) 782 return 8; 783 else if (type == short_type_node) 784 return 9; 785 else if (type == int_type_node) 786 return 10; 787 else if (type == long_type_node) 788 return 11; 789 else 790 gcc_unreachable (); 791} 792 793/* Build a call to _Jv_ThrowBadArrayIndex(), the 794 ArrayIndexOfBoundsException exception handler. */ 795 796static tree 797build_java_throw_out_of_bounds_exception (tree index) 798{ 799 tree node; 800 801 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex() 802 has void return type. We cannot just set the type of the CALL_EXPR below 803 to int_type_node because we would lose it during gimplification. */ 804 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node)))); 805 node = build_call_nary (void_type_node, 806 build_address_of (soft_badarrayindex_node), 807 1, index); 808 TREE_SIDE_EFFECTS (node) = 1; 809 810 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node); 811 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */ 812 813 return (node); 814} 815 816/* Return the length of an array. Doesn't perform any checking on the nature 817 or value of the array NODE. May be used to implement some bytecodes. */ 818 819tree 820build_java_array_length_access (tree node) 821{ 822 tree type = TREE_TYPE (node); 823 tree array_type = TREE_TYPE (type); 824 HOST_WIDE_INT length; 825 826 if (!is_array_type_p (type)) 827 { 828 /* With the new verifier, we will see an ordinary pointer type 829 here. In this case, we just use an arbitrary array type. */ 830 array_type = build_java_array_type (object_ptr_type_node, -1); 831 type = promote_type (array_type); 832 } 833 834 length = java_array_type_length (type); 835 if (length >= 0) 836 return build_int_cst (NULL_TREE, length); 837 838 node = build3 (COMPONENT_REF, int_type_node, 839 build_java_indirect_ref (array_type, node, 840 flag_check_references), 841 lookup_field (&array_type, get_identifier ("length")), 842 NULL_TREE); 843 IS_ARRAY_LENGTH_ACCESS (node) = 1; 844 return node; 845} 846 847/* Optionally checks a reference against the NULL pointer. ARG1: the 848 expr, ARG2: we should check the reference. Don't generate extra 849 checks if we're not generating code. */ 850 851tree 852java_check_reference (tree expr, int check) 853{ 854 if (!flag_syntax_only && check) 855 { 856 expr = save_expr (expr); 857 expr = build3 (COND_EXPR, TREE_TYPE (expr), 858 build2 (EQ_EXPR, boolean_type_node, 859 expr, null_pointer_node), 860 build_call_nary (void_type_node, 861 build_address_of (soft_nullpointer_node), 862 0), 863 expr); 864 } 865 866 return expr; 867} 868 869/* Reference an object: just like an INDIRECT_REF, but with checking. */ 870 871tree 872build_java_indirect_ref (tree type, tree expr, int check) 873{ 874 tree t; 875 t = java_check_reference (expr, check); 876 t = convert (build_pointer_type (type), t); 877 return build1 (INDIRECT_REF, type, t); 878} 879 880/* Implement array indexing (either as l-value or r-value). 881 Returns a tree for ARRAY[INDEX], assume TYPE is the element type. 882 Optionally performs bounds checking and/or test to NULL. 883 At this point, ARRAY should have been verified as an array. */ 884 885tree 886build_java_arrayaccess (tree array, tree type, tree index) 887{ 888 tree node, throw_expr = NULL_TREE; 889 tree data_field; 890 tree ref; 891 tree array_type = TREE_TYPE (TREE_TYPE (array)); 892 tree size_exp = fold_convert (sizetype, size_in_bytes (type)); 893 894 if (!is_array_type_p (TREE_TYPE (array))) 895 { 896 /* With the new verifier, we will see an ordinary pointer type 897 here. In this case, we just use the correct array type. */ 898 array_type = build_java_array_type (type, -1); 899 } 900 901 if (flag_bounds_check) 902 { 903 /* Generate: 904 * (unsigned jint) INDEX >= (unsigned jint) LEN 905 * && throw ArrayIndexOutOfBoundsException. 906 * Note this is equivalent to and more efficient than: 907 * INDEX < 0 || INDEX >= LEN && throw ... */ 908 tree test; 909 tree len = convert (unsigned_int_type_node, 910 build_java_array_length_access (array)); 911 test = fold_build2 (GE_EXPR, boolean_type_node, 912 convert (unsigned_int_type_node, index), 913 len); 914 if (! integer_zerop (test)) 915 { 916 throw_expr 917 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test, 918 build_java_throw_out_of_bounds_exception (index)); 919 /* allows expansion within COMPOUND */ 920 TREE_SIDE_EFFECTS( throw_expr ) = 1; 921 } 922 } 923 924 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order 925 to have the bounds check evaluated first. */ 926 if (throw_expr != NULL_TREE) 927 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index); 928 929 data_field = lookup_field (&array_type, get_identifier ("data")); 930 931 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field), 932 build_java_indirect_ref (array_type, array, 933 flag_check_references), 934 data_field, NULL_TREE); 935 936 /* Take the address of the data field and convert it to a pointer to 937 the element type. */ 938 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref)); 939 940 /* Multiply the index by the size of an element to obtain a byte 941 offset. Convert the result to a pointer to the element type. */ 942 index = build2 (MULT_EXPR, sizetype, 943 fold_convert (sizetype, index), 944 size_exp); 945 946 /* Sum the byte offset and the address of the data field. */ 947 node = fold_build_pointer_plus (node, index); 948 949 /* Finally, return 950 951 *((&array->data) + index*size_exp) 952 953 */ 954 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node); 955} 956 957/* Generate code to throw an ArrayStoreException if OBJECT is not assignable 958 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can 959 determine that no check is required. */ 960 961tree 962build_java_arraystore_check (tree array, tree object) 963{ 964 tree check, element_type, source; 965 tree array_type_p = TREE_TYPE (array); 966 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object))); 967 968 if (! flag_verify_invocations) 969 { 970 /* With the new verifier, we don't track precise types. FIXME: 971 performance regression here. */ 972 element_type = TYPE_NAME (object_type_node); 973 } 974 else 975 { 976 gcc_assert (is_array_type_p (array_type_p)); 977 978 /* Get the TYPE_DECL for ARRAY's element type. */ 979 element_type 980 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p)))); 981 } 982 983 gcc_assert (TREE_CODE (element_type) == TYPE_DECL 984 && TREE_CODE (object_type) == TYPE_DECL); 985 986 if (!flag_store_check) 987 return build1 (NOP_EXPR, array_type_p, array); 988 989 /* No check is needed if the element type is final. Also check that 990 element_type matches object_type, since in the bytecode 991 compilation case element_type may be the actual element type of 992 the array rather than its declared type. However, if we're doing 993 indirect dispatch, we can't do the `final' optimization. */ 994 if (element_type == object_type 995 && ! flag_indirect_dispatch 996 && CLASS_FINAL (element_type)) 997 return build1 (NOP_EXPR, array_type_p, array); 998 999 /* OBJECT might be wrapped by a SAVE_EXPR. */ 1000 if (TREE_CODE (object) == SAVE_EXPR) 1001 source = TREE_OPERAND (object, 0); 1002 else 1003 source = object; 1004 1005 /* Avoid the check if OBJECT was just loaded from the same array. */ 1006 if (TREE_CODE (source) == ARRAY_REF) 1007 { 1008 tree target; 1009 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */ 1010 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */ 1011 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */ 1012 if (TREE_CODE (source) == SAVE_EXPR) 1013 source = TREE_OPERAND (source, 0); 1014 1015 target = array; 1016 if (TREE_CODE (target) == SAVE_EXPR) 1017 target = TREE_OPERAND (target, 0); 1018 1019 if (source == target) 1020 return build1 (NOP_EXPR, array_type_p, array); 1021 } 1022 1023 /* Build an invocation of _Jv_CheckArrayStore */ 1024 check = build_call_nary (void_type_node, 1025 build_address_of (soft_checkarraystore_node), 1026 2, array, object); 1027 TREE_SIDE_EFFECTS (check) = 1; 1028 1029 return check; 1030} 1031 1032/* Makes sure that INDEXED_TYPE is appropriate. If not, make it from 1033 ARRAY_NODE. This function is used to retrieve something less vague than 1034 a pointer type when indexing the first dimension of something like [[<t>. 1035 May return a corrected type, if necessary, otherwise INDEXED_TYPE is 1036 return unchanged. */ 1037 1038static tree 1039build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED, 1040 tree indexed_type) 1041{ 1042 /* We used to check to see if ARRAY_NODE really had array type. 1043 However, with the new verifier, this is not necessary, as we know 1044 that the object will be an array of the appropriate type. */ 1045 1046 return indexed_type; 1047} 1048 1049/* newarray triggers a call to _Jv_NewPrimArray. This function should be 1050 called with an integer code (the type of array to create), and the length 1051 of the array to create. */ 1052 1053tree 1054build_newarray (int atype_value, tree length) 1055{ 1056 tree type_arg; 1057 1058 tree prim_type = decode_newarray_type (atype_value); 1059 tree type 1060 = build_java_array_type (prim_type, 1061 tree_fits_shwi_p (length) 1062 ? tree_to_shwi (length) : -1); 1063 1064 /* Pass a reference to the primitive type class and save the runtime 1065 some work. */ 1066 type_arg = build_class_ref (prim_type); 1067 1068 return build_call_nary (promote_type (type), 1069 build_address_of (soft_newarray_node), 1070 2, type_arg, length); 1071} 1072 1073/* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size 1074 of the dimension. */ 1075 1076tree 1077build_anewarray (tree class_type, tree length) 1078{ 1079 tree type 1080 = build_java_array_type (class_type, 1081 tree_fits_shwi_p (length) 1082 ? tree_to_shwi (length) : -1); 1083 1084 return build_call_nary (promote_type (type), 1085 build_address_of (soft_anewarray_node), 1086 3, 1087 length, 1088 build_class_ref (class_type), 1089 null_pointer_node); 1090} 1091 1092/* Return a node the evaluates 'new TYPE[LENGTH]'. */ 1093 1094tree 1095build_new_array (tree type, tree length) 1096{ 1097 if (JPRIMITIVE_TYPE_P (type)) 1098 return build_newarray (encode_newarray_type (type), length); 1099 else 1100 return build_anewarray (TREE_TYPE (type), length); 1101} 1102 1103/* Generates a call to _Jv_NewMultiArray. multianewarray expects a 1104 class pointer, a number of dimensions and the matching number of 1105 dimensions. The argument list is NULL terminated. */ 1106 1107static void 1108expand_java_multianewarray (tree class_type, int ndim) 1109{ 1110 int i; 1111 vec<tree, va_gc> *args = NULL; 1112 1113 vec_safe_grow (args, 3 + ndim); 1114 1115 (*args)[0] = build_class_ref (class_type); 1116 (*args)[1] = build_int_cst (NULL_TREE, ndim); 1117 1118 for(i = ndim - 1; i >= 0; i-- ) 1119 (*args)[(unsigned)(2 + i)] = pop_value (int_type_node); 1120 1121 (*args)[2 + ndim] = null_pointer_node; 1122 1123 push_value (build_call_vec (promote_type (class_type), 1124 build_address_of (soft_multianewarray_node), 1125 args)); 1126} 1127 1128/* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that 1129 ARRAY is an array type. May expand some bound checking and NULL 1130 pointer checking. RHS_TYPE_NODE we are going to store. In the case 1131 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an 1132 INT. In those cases, we make the conversion. 1133 1134 if ARRAy is a reference type, the assignment is checked at run-time 1135 to make sure that the RHS can be assigned to the array element 1136 type. It is not necessary to generate this code if ARRAY is final. */ 1137 1138static void 1139expand_java_arraystore (tree rhs_type_node) 1140{ 1141 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node) 1142 && TYPE_PRECISION (rhs_type_node) <= 32) ? 1143 int_type_node : rhs_type_node); 1144 tree index = pop_value (int_type_node); 1145 tree array_type, array, temp, access; 1146 1147 /* If we're processing an `aaload' we might as well just pick 1148 `Object'. */ 1149 if (TREE_CODE (rhs_type_node) == POINTER_TYPE) 1150 { 1151 array_type = build_java_array_type (object_ptr_type_node, -1); 1152 rhs_type_node = object_ptr_type_node; 1153 } 1154 else 1155 array_type = build_java_array_type (rhs_type_node, -1); 1156 1157 array = pop_value (array_type); 1158 array = build1 (NOP_EXPR, promote_type (array_type), array); 1159 1160 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node); 1161 1162 flush_quick_stack (); 1163 1164 index = save_expr (index); 1165 array = save_expr (array); 1166 1167 /* We want to perform the bounds check (done by 1168 build_java_arrayaccess) before the type check (done by 1169 build_java_arraystore_check). So, we call build_java_arrayaccess 1170 -- which returns an ARRAY_REF lvalue -- and we then generate code 1171 to stash the address of that lvalue in a temp. Then we call 1172 build_java_arraystore_check, and finally we generate a 1173 MODIFY_EXPR to set the array element. */ 1174 1175 access = build_java_arrayaccess (array, rhs_type_node, index); 1176 temp = build_decl (input_location, VAR_DECL, NULL_TREE, 1177 build_pointer_type (TREE_TYPE (access))); 1178 java_add_local_var (temp); 1179 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp), 1180 temp, 1181 build_fold_addr_expr (access))); 1182 1183 if (TREE_CODE (rhs_type_node) == POINTER_TYPE) 1184 { 1185 tree check = build_java_arraystore_check (array, rhs_node); 1186 java_add_stmt (check); 1187 } 1188 1189 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access), 1190 build1 (INDIRECT_REF, TREE_TYPE (access), temp), 1191 rhs_node)); 1192} 1193 1194/* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes 1195 sure that LHS is an array type. May expand some bound checking and NULL 1196 pointer checking. 1197 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/ 1198 BOOLEAN/SHORT, we push a promoted type back to the stack. 1199*/ 1200 1201static void 1202expand_java_arrayload (tree lhs_type_node) 1203{ 1204 tree load_node; 1205 tree index_node = pop_value (int_type_node); 1206 tree array_type; 1207 tree array_node; 1208 1209 /* If we're processing an `aaload' we might as well just pick 1210 `Object'. */ 1211 if (TREE_CODE (lhs_type_node) == POINTER_TYPE) 1212 { 1213 array_type = build_java_array_type (object_ptr_type_node, -1); 1214 lhs_type_node = object_ptr_type_node; 1215 } 1216 else 1217 array_type = build_java_array_type (lhs_type_node, -1); 1218 array_node = pop_value (array_type); 1219 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node); 1220 1221 index_node = save_expr (index_node); 1222 array_node = save_expr (array_node); 1223 1224 lhs_type_node = build_java_check_indexed_type (array_node, 1225 lhs_type_node); 1226 load_node = build_java_arrayaccess (array_node, 1227 lhs_type_node, 1228 index_node); 1229 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32) 1230 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node); 1231 push_value (load_node); 1232} 1233 1234/* Expands .length. Makes sure that we deal with and array and may expand 1235 a NULL check on the array object. */ 1236 1237static void 1238expand_java_array_length (void) 1239{ 1240 tree array = pop_value (ptr_type_node); 1241 tree length = build_java_array_length_access (array); 1242 1243 push_value (length); 1244} 1245 1246/* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be 1247 either soft_monitorenter_node or soft_monitorexit_node. */ 1248 1249static tree 1250build_java_monitor (tree call, tree object) 1251{ 1252 return build_call_nary (void_type_node, 1253 build_address_of (call), 1254 1, object); 1255} 1256 1257/* Emit code for one of the PUSHC instructions. */ 1258 1259static void 1260expand_java_pushc (int ival, tree type) 1261{ 1262 tree value; 1263 if (type == ptr_type_node && ival == 0) 1264 value = null_pointer_node; 1265 else if (type == int_type_node || type == long_type_node) 1266 value = build_int_cst (type, ival); 1267 else if (type == float_type_node || type == double_type_node) 1268 { 1269 REAL_VALUE_TYPE x; 1270 real_from_integer (&x, TYPE_MODE (type), ival, SIGNED); 1271 value = build_real (type, x); 1272 } 1273 else 1274 gcc_unreachable (); 1275 1276 push_value (value); 1277} 1278 1279static void 1280expand_java_return (tree type) 1281{ 1282 if (type == void_type_node) 1283 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL)); 1284 else 1285 { 1286 tree retval = pop_value (type); 1287 tree res = DECL_RESULT (current_function_decl); 1288 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval); 1289 1290 /* Handle the situation where the native integer type is smaller 1291 than the JVM integer. It can happen for many cross compilers. 1292 The whole if expression just goes away if INT_TYPE_SIZE < 32 1293 is false. */ 1294 if (INT_TYPE_SIZE < 32 1295 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res))) 1296 < GET_MODE_SIZE (TYPE_MODE (type)))) 1297 retval = build1(NOP_EXPR, TREE_TYPE(res), retval); 1298 1299 TREE_SIDE_EFFECTS (retval) = 1; 1300 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval)); 1301 } 1302} 1303 1304static void 1305expand_load_internal (int index, tree type, int pc) 1306{ 1307 tree copy; 1308 tree var = find_local_variable (index, type, pc); 1309 1310 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push 1311 on the stack. If there is an assignment to this VAR_DECL between 1312 the stack push and the use, then the wrong code could be 1313 generated. To avoid this we create a new local and copy our 1314 value into it. Then we push this new local on the stack. 1315 Hopefully this all gets optimized out. */ 1316 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type); 1317 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)) 1318 && TREE_TYPE (copy) != TREE_TYPE (var)) 1319 var = convert (type, var); 1320 java_add_local_var (copy); 1321 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var)); 1322 1323 push_value (copy); 1324} 1325 1326tree 1327build_address_of (tree value) 1328{ 1329 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value); 1330} 1331 1332bool 1333class_has_finalize_method (tree type) 1334{ 1335 tree super = CLASSTYPE_SUPER (type); 1336 1337 if (super == NULL_TREE) 1338 return false; /* Every class with a real finalizer inherits */ 1339 /* from java.lang.Object. */ 1340 else 1341 return HAS_FINALIZER_P (type) || class_has_finalize_method (super); 1342} 1343 1344tree 1345java_create_object (tree type) 1346{ 1347 tree alloc_node = (class_has_finalize_method (type) 1348 ? alloc_object_node 1349 : alloc_no_finalizer_node); 1350 1351 return build_call_nary (promote_type (type), 1352 build_address_of (alloc_node), 1353 1, build_class_ref (type)); 1354} 1355 1356static void 1357expand_java_NEW (tree type) 1358{ 1359 tree alloc_node; 1360 1361 alloc_node = (class_has_finalize_method (type) ? alloc_object_node 1362 : alloc_no_finalizer_node); 1363 if (! CLASS_LOADED_P (type)) 1364 load_class (type, 1); 1365 safe_layout_class (type); 1366 push_value (build_call_nary (promote_type (type), 1367 build_address_of (alloc_node), 1368 1, build_class_ref (type))); 1369} 1370 1371/* This returns an expression which will extract the class of an 1372 object. */ 1373 1374tree 1375build_get_class (tree value) 1376{ 1377 tree class_field = lookup_field (&dtable_type, get_identifier ("class")); 1378 tree vtable_field = lookup_field (&object_type_node, 1379 get_identifier ("vtable")); 1380 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type, 1381 build_java_indirect_ref (object_type_node, value, 1382 flag_check_references), 1383 vtable_field, NULL_TREE); 1384 return build3 (COMPONENT_REF, class_ptr_type, 1385 build1 (INDIRECT_REF, dtable_type, tmp), 1386 class_field, NULL_TREE); 1387} 1388 1389/* This builds the tree representation of the `instanceof' operator. 1390 It tries various tricks to optimize this in cases where types are 1391 known. */ 1392 1393tree 1394build_instanceof (tree value, tree type) 1395{ 1396 tree expr; 1397 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node)); 1398 tree valtype = TREE_TYPE (TREE_TYPE (value)); 1399 tree valclass = TYPE_NAME (valtype); 1400 tree klass; 1401 1402 /* When compiling from bytecode, we need to ensure that TYPE has 1403 been loaded. */ 1404 if (CLASS_P (type) && ! CLASS_LOADED_P (type)) 1405 { 1406 load_class (type, 1); 1407 safe_layout_class (type); 1408 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK) 1409 return error_mark_node; 1410 } 1411 klass = TYPE_NAME (type); 1412 1413 if (type == object_type_node || inherits_from_p (valtype, type)) 1414 { 1415 /* Anything except `null' is an instance of Object. Likewise, 1416 if the object is known to be an instance of the class, then 1417 we only need to check for `null'. */ 1418 expr = build2 (NE_EXPR, itype, value, null_pointer_node); 1419 } 1420 else if (flag_verify_invocations 1421 && ! TYPE_ARRAY_P (type) 1422 && ! TYPE_ARRAY_P (valtype) 1423 && DECL_P (klass) && DECL_P (valclass) 1424 && ! CLASS_INTERFACE (valclass) 1425 && ! CLASS_INTERFACE (klass) 1426 && ! inherits_from_p (type, valtype) 1427 && (CLASS_FINAL (klass) 1428 || ! inherits_from_p (valtype, type))) 1429 { 1430 /* The classes are from different branches of the derivation 1431 tree, so we immediately know the answer. */ 1432 expr = boolean_false_node; 1433 } 1434 else if (DECL_P (klass) && CLASS_FINAL (klass)) 1435 { 1436 tree save = save_expr (value); 1437 expr = build3 (COND_EXPR, itype, 1438 build2 (NE_EXPR, boolean_type_node, 1439 save, null_pointer_node), 1440 build2 (EQ_EXPR, itype, 1441 build_get_class (save), 1442 build_class_ref (type)), 1443 boolean_false_node); 1444 } 1445 else 1446 { 1447 expr = build_call_nary (itype, 1448 build_address_of (soft_instanceof_node), 1449 2, value, build_class_ref (type)); 1450 } 1451 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value); 1452 return expr; 1453} 1454 1455static void 1456expand_java_INSTANCEOF (tree type) 1457{ 1458 tree value = pop_value (object_ptr_type_node); 1459 value = build_instanceof (value, type); 1460 push_value (value); 1461} 1462 1463static void 1464expand_java_CHECKCAST (tree type) 1465{ 1466 tree value = pop_value (ptr_type_node); 1467 value = build_call_nary (promote_type (type), 1468 build_address_of (soft_checkcast_node), 1469 2, build_class_ref (type), value); 1470 push_value (value); 1471} 1472 1473static void 1474expand_iinc (unsigned int local_var_index, int ival, int pc) 1475{ 1476 tree local_var, res; 1477 tree constant_value; 1478 1479 flush_quick_stack (); 1480 local_var = find_local_variable (local_var_index, int_type_node, pc); 1481 constant_value = build_int_cst (NULL_TREE, ival); 1482 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value); 1483 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res)); 1484} 1485 1486 1487tree 1488build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2) 1489{ 1490 tree call = NULL; 1491 tree arg1 = convert (type, op1); 1492 tree arg2 = convert (type, op2); 1493 1494 if (type == int_type_node) 1495 { 1496 switch (op) 1497 { 1498 case TRUNC_DIV_EXPR: 1499 call = soft_idiv_node; 1500 break; 1501 case TRUNC_MOD_EXPR: 1502 call = soft_irem_node; 1503 break; 1504 default: 1505 break; 1506 } 1507 } 1508 else if (type == long_type_node) 1509 { 1510 switch (op) 1511 { 1512 case TRUNC_DIV_EXPR: 1513 call = soft_ldiv_node; 1514 break; 1515 case TRUNC_MOD_EXPR: 1516 call = soft_lrem_node; 1517 break; 1518 default: 1519 break; 1520 } 1521 } 1522 1523 gcc_assert (call); 1524 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2); 1525 return call; 1526} 1527 1528tree 1529build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2) 1530{ 1531 tree mask; 1532 switch (op) 1533 { 1534 case URSHIFT_EXPR: 1535 { 1536 tree u_type = unsigned_type_for (type); 1537 arg1 = convert (u_type, arg1); 1538 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2); 1539 return convert (type, arg1); 1540 } 1541 case LSHIFT_EXPR: 1542 case RSHIFT_EXPR: 1543 mask = build_int_cst (int_type_node, 1544 TYPE_PRECISION (TREE_TYPE (arg1)) - 1); 1545 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask); 1546 break; 1547 1548 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */ 1549 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */ 1550 arg1 = save_expr (arg1); arg2 = save_expr (arg2); 1551 { 1552 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR, 1553 boolean_type_node, arg1, arg2); 1554 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2); 1555 tree second_compare = fold_build3 (COND_EXPR, int_type_node, 1556 ifexp2, integer_zero_node, 1557 op == COMPARE_L_EXPR 1558 ? integer_minus_one_node 1559 : integer_one_node); 1560 return fold_build3 (COND_EXPR, int_type_node, ifexp1, 1561 op == COMPARE_L_EXPR ? integer_one_node 1562 : integer_minus_one_node, 1563 second_compare); 1564 } 1565 case COMPARE_EXPR: 1566 arg1 = save_expr (arg1); arg2 = save_expr (arg2); 1567 { 1568 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2); 1569 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2); 1570 tree second_compare = fold_build3 (COND_EXPR, int_type_node, 1571 ifexp2, integer_one_node, 1572 integer_zero_node); 1573 return fold_build3 (COND_EXPR, int_type_node, 1574 ifexp1, integer_minus_one_node, second_compare); 1575 } 1576 case TRUNC_DIV_EXPR: 1577 case TRUNC_MOD_EXPR: 1578 if (TREE_CODE (type) == REAL_TYPE 1579 && op == TRUNC_MOD_EXPR) 1580 { 1581 tree call; 1582 if (type != double_type_node) 1583 { 1584 arg1 = convert (double_type_node, arg1); 1585 arg2 = convert (double_type_node, arg2); 1586 } 1587 call = build_call_nary (double_type_node, 1588 build_address_of (soft_fmod_node), 1589 2, arg1, arg2); 1590 if (type != double_type_node) 1591 call = convert (type, call); 1592 return call; 1593 } 1594 1595 if (TREE_CODE (type) == INTEGER_TYPE 1596 && flag_use_divide_subroutine 1597 && ! flag_syntax_only) 1598 return build_java_soft_divmod (op, type, arg1, arg2); 1599 1600 break; 1601 default: ; 1602 } 1603 return fold_build2 (op, type, arg1, arg2); 1604} 1605 1606static void 1607expand_java_binop (tree type, enum tree_code op) 1608{ 1609 tree larg, rarg; 1610 tree ltype = type; 1611 tree rtype = type; 1612 switch (op) 1613 { 1614 case LSHIFT_EXPR: 1615 case RSHIFT_EXPR: 1616 case URSHIFT_EXPR: 1617 rtype = int_type_node; 1618 rarg = pop_value (rtype); 1619 break; 1620 default: 1621 rarg = pop_value (rtype); 1622 } 1623 larg = pop_value (ltype); 1624 push_value (build_java_binop (op, type, larg, rarg)); 1625} 1626 1627/* Lookup the field named NAME in *TYPEP or its super classes. 1628 If not found, return NULL_TREE. 1629 (If the *TYPEP is not found, or if the field reference is 1630 ambiguous, return error_mark_node.) 1631 If found, return the FIELD_DECL, and set *TYPEP to the 1632 class containing the field. */ 1633 1634tree 1635lookup_field (tree *typep, tree name) 1636{ 1637 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep)) 1638 { 1639 load_class (*typep, 1); 1640 safe_layout_class (*typep); 1641 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK) 1642 return error_mark_node; 1643 } 1644 do 1645 { 1646 tree field, binfo, base_binfo; 1647 tree save_field; 1648 int i; 1649 1650 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field)) 1651 if (DECL_NAME (field) == name) 1652 return field; 1653 1654 /* Process implemented interfaces. */ 1655 save_field = NULL_TREE; 1656 for (binfo = TYPE_BINFO (*typep), i = 0; 1657 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 1658 { 1659 tree t = BINFO_TYPE (base_binfo); 1660 if ((field = lookup_field (&t, name))) 1661 { 1662 if (save_field == field) 1663 continue; 1664 if (save_field == NULL_TREE) 1665 save_field = field; 1666 else 1667 { 1668 tree i1 = DECL_CONTEXT (save_field); 1669 tree i2 = DECL_CONTEXT (field); 1670 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs", 1671 IDENTIFIER_POINTER (name), 1672 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))), 1673 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2)))); 1674 return error_mark_node; 1675 } 1676 } 1677 } 1678 1679 if (save_field != NULL_TREE) 1680 return save_field; 1681 1682 *typep = CLASSTYPE_SUPER (*typep); 1683 } while (*typep); 1684 return NULL_TREE; 1685} 1686 1687/* Look up the field named NAME in object SELF_VALUE, 1688 which has class SELF_CLASS (a non-handle RECORD_TYPE). 1689 SELF_VALUE is NULL_TREE if looking for a static field. */ 1690 1691tree 1692build_field_ref (tree self_value, tree self_class, tree name) 1693{ 1694 tree base_class = self_class; 1695 tree field_decl = lookup_field (&base_class, name); 1696 if (field_decl == NULL_TREE) 1697 { 1698 error ("field %qs not found", IDENTIFIER_POINTER (name)); 1699 return error_mark_node; 1700 } 1701 if (self_value == NULL_TREE) 1702 { 1703 return build_static_field_ref (field_decl); 1704 } 1705 else 1706 { 1707 tree base_type = promote_type (base_class); 1708 1709 /* CHECK is true if self_value is not the this pointer. */ 1710 int check = (! (DECL_P (self_value) 1711 && DECL_NAME (self_value) == this_identifier_node)); 1712 1713 /* Determine whether a field offset from NULL will lie within 1714 Page 0: this is necessary on those GNU/Linux/BSD systems that 1715 trap SEGV to generate NullPointerExceptions. 1716 1717 We assume that Page 0 will be mapped with NOPERM, and that 1718 memory may be allocated from any other page, so only field 1719 offsets < pagesize are guaranteed to trap. We also assume 1720 the smallest page size we'll encounter is 4k bytes. */ 1721 if (! flag_syntax_only && check && ! flag_check_references 1722 && ! flag_indirect_dispatch) 1723 { 1724 tree field_offset = byte_position (field_decl); 1725 if (! page_size) 1726 page_size = size_int (4096); 1727 check = !tree_int_cst_lt (field_offset, page_size); 1728 } 1729 1730 if (base_type != TREE_TYPE (self_value)) 1731 self_value = fold_build1 (NOP_EXPR, base_type, self_value); 1732 if (! flag_syntax_only && flag_indirect_dispatch) 1733 { 1734 tree otable_index 1735 = build_int_cst (NULL_TREE, get_symbol_table_index 1736 (field_decl, NULL_TREE, 1737 &TYPE_OTABLE_METHODS (output_class))); 1738 tree field_offset 1739 = build4 (ARRAY_REF, integer_type_node, 1740 TYPE_OTABLE_DECL (output_class), otable_index, 1741 NULL_TREE, NULL_TREE); 1742 tree address; 1743 1744 if (DECL_CONTEXT (field_decl) != output_class) 1745 field_offset 1746 = build3 (COND_EXPR, TREE_TYPE (field_offset), 1747 build2 (EQ_EXPR, boolean_type_node, 1748 field_offset, integer_zero_node), 1749 build_call_nary (void_type_node, 1750 build_address_of (soft_nosuchfield_node), 1751 1, otable_index), 1752 field_offset); 1753 1754 self_value = java_check_reference (self_value, check); 1755 address = fold_build_pointer_plus (self_value, field_offset); 1756 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)), 1757 address); 1758 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address); 1759 } 1760 1761 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)), 1762 self_value, check); 1763 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl), 1764 self_value, field_decl, NULL_TREE); 1765 } 1766} 1767 1768tree 1769lookup_label (int pc) 1770{ 1771 tree name; 1772 char buf[32]; 1773 if (pc > highest_label_pc_this_method) 1774 highest_label_pc_this_method = pc; 1775 targetm.asm_out.generate_internal_label (buf, "LJpc=", 1776 start_label_pc_this_method + pc); 1777 name = get_identifier (buf); 1778 if (IDENTIFIER_LOCAL_VALUE (name)) 1779 return IDENTIFIER_LOCAL_VALUE (name); 1780 else 1781 { 1782 /* The type of the address of a label is return_address_type_node. */ 1783 tree decl = create_label_decl (name); 1784 return pushdecl (decl); 1785 } 1786} 1787 1788/* Generate a unique name for the purpose of loops and switches 1789 labels, and try-catch-finally blocks label or temporary variables. */ 1790 1791tree 1792generate_name (void) 1793{ 1794 static int l_number = 0; 1795 char buff [32]; 1796 targetm.asm_out.generate_internal_label (buff, "LJv", l_number); 1797 l_number++; 1798 return get_identifier (buff); 1799} 1800 1801tree 1802create_label_decl (tree name) 1803{ 1804 tree decl; 1805 decl = build_decl (input_location, LABEL_DECL, name, 1806 TREE_TYPE (return_address_type_node)); 1807 DECL_CONTEXT (decl) = current_function_decl; 1808 DECL_IGNORED_P (decl) = 1; 1809 return decl; 1810} 1811 1812/* This maps a bytecode offset (PC) to various flags. */ 1813char *instruction_bits; 1814 1815/* This is a vector of type states for the current method. It is 1816 indexed by PC. Each element is a tree vector holding the type 1817 state at that PC. We only note type states at basic block 1818 boundaries. */ 1819vec<tree, va_gc> *type_states; 1820 1821static void 1822note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc) 1823{ 1824 lookup_label (target_pc); 1825 instruction_bits [target_pc] |= BCODE_JUMP_TARGET; 1826} 1827 1828/* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2, 1829 where CONDITION is one of one the compare operators. */ 1830 1831static void 1832expand_compare (enum tree_code condition, tree value1, tree value2, 1833 int target_pc) 1834{ 1835 tree target = lookup_label (target_pc); 1836 tree cond = fold_build2 (condition, boolean_type_node, value1, value2); 1837 java_add_stmt 1838 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond), 1839 build1 (GOTO_EXPR, void_type_node, target), 1840 build_java_empty_stmt ())); 1841} 1842 1843/* Emit code for a TEST-type opcode. */ 1844 1845static void 1846expand_test (enum tree_code condition, tree type, int target_pc) 1847{ 1848 tree value1, value2; 1849 flush_quick_stack (); 1850 value1 = pop_value (type); 1851 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node; 1852 expand_compare (condition, value1, value2, target_pc); 1853} 1854 1855/* Emit code for a COND-type opcode. */ 1856 1857static void 1858expand_cond (enum tree_code condition, tree type, int target_pc) 1859{ 1860 tree value1, value2; 1861 flush_quick_stack (); 1862 /* note: pop values in opposite order */ 1863 value2 = pop_value (type); 1864 value1 = pop_value (type); 1865 /* Maybe should check value1 and value2 for type compatibility ??? */ 1866 expand_compare (condition, value1, value2, target_pc); 1867} 1868 1869static void 1870expand_java_goto (int target_pc) 1871{ 1872 tree target_label = lookup_label (target_pc); 1873 flush_quick_stack (); 1874 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label)); 1875} 1876 1877static tree 1878expand_java_switch (tree selector, int default_pc) 1879{ 1880 tree switch_expr, x; 1881 1882 flush_quick_stack (); 1883 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector, 1884 NULL_TREE, NULL_TREE); 1885 java_add_stmt (switch_expr); 1886 1887 x = build_case_label (NULL_TREE, NULL_TREE, 1888 create_artificial_label (input_location)); 1889 append_to_statement_list (x, &SWITCH_BODY (switch_expr)); 1890 1891 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc)); 1892 append_to_statement_list (x, &SWITCH_BODY (switch_expr)); 1893 1894 return switch_expr; 1895} 1896 1897static void 1898expand_java_add_case (tree switch_expr, int match, int target_pc) 1899{ 1900 tree value, x; 1901 1902 value = build_int_cst (TREE_TYPE (switch_expr), match); 1903 1904 x = build_case_label (value, NULL_TREE, 1905 create_artificial_label (input_location)); 1906 append_to_statement_list (x, &SWITCH_BODY (switch_expr)); 1907 1908 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc)); 1909 append_to_statement_list (x, &SWITCH_BODY (switch_expr)); 1910} 1911 1912static vec<tree, va_gc> * 1913pop_arguments (tree method_type) 1914{ 1915 function_args_iterator fnai; 1916 tree type; 1917 vec<tree, va_gc> *args = NULL; 1918 int arity; 1919 1920 FOREACH_FUNCTION_ARGS (method_type, type, fnai) 1921 { 1922 /* XXX: leaky abstraction. */ 1923 if (type == void_type_node) 1924 break; 1925 1926 vec_safe_push (args, type); 1927 } 1928 1929 arity = vec_safe_length (args); 1930 1931 while (arity--) 1932 { 1933 tree arg = pop_value ((*args)[arity]); 1934 1935 /* We simply cast each argument to its proper type. This is 1936 needed since we lose type information coming out of the 1937 verifier. We also have to do this when we pop an integer 1938 type that must be promoted for the function call. */ 1939 if (TREE_CODE (type) == POINTER_TYPE) 1940 arg = build1 (NOP_EXPR, type, arg); 1941 else if (targetm.calls.promote_prototypes (type) 1942 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node) 1943 && INTEGRAL_TYPE_P (type)) 1944 arg = convert (integer_type_node, arg); 1945 1946 (*args)[arity] = arg; 1947 } 1948 1949 return args; 1950} 1951 1952/* Attach to PTR (a block) the declaration found in ENTRY. */ 1953 1954int 1955attach_init_test_initialization_flags (treetreehash_entry **slot, tree block) 1956{ 1957 treetreehash_entry *ite = *slot; 1958 1959 if (block != error_mark_node) 1960 { 1961 if (TREE_CODE (block) == BIND_EXPR) 1962 { 1963 tree body = BIND_EXPR_BODY (block); 1964 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block); 1965 BIND_EXPR_VARS (block) = ite->value; 1966 body = build2 (COMPOUND_EXPR, void_type_node, 1967 build1 (DECL_EXPR, void_type_node, ite->value), body); 1968 BIND_EXPR_BODY (block) = body; 1969 } 1970 else 1971 { 1972 tree body = BLOCK_SUBBLOCKS (block); 1973 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block); 1974 BLOCK_EXPR_DECLS (block) = ite->value; 1975 body = build2 (COMPOUND_EXPR, void_type_node, 1976 build1 (DECL_EXPR, void_type_node, ite->value), body); 1977 BLOCK_SUBBLOCKS (block) = body; 1978 } 1979 1980 } 1981 return true; 1982} 1983 1984/* Build an expression to initialize the class CLAS. 1985 if EXPR is non-NULL, returns an expression to first call the initializer 1986 (if it is needed) and then calls EXPR. */ 1987 1988tree 1989build_class_init (tree clas, tree expr) 1990{ 1991 tree init; 1992 1993 /* An optimization: if CLAS is a superclass of the class we're 1994 compiling, we don't need to initialize it. However, if CLAS is 1995 an interface, it won't necessarily be initialized, even if we 1996 implement it. */ 1997 if ((! CLASS_INTERFACE (TYPE_NAME (clas)) 1998 && inherits_from_p (current_class, clas)) 1999 || current_class == clas) 2000 return expr; 2001 2002 if (always_initialize_class_p) 2003 { 2004 init = build_call_nary (void_type_node, 2005 build_address_of (soft_initclass_node), 2006 1, build_class_ref (clas)); 2007 TREE_SIDE_EFFECTS (init) = 1; 2008 } 2009 else 2010 { 2011 tree *init_test_decl; 2012 tree decl; 2013 init_test_decl = java_treetreehash_new 2014 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas); 2015 2016 if (*init_test_decl == NULL) 2017 { 2018 /* Build a declaration and mark it as a flag used to track 2019 static class initializations. */ 2020 decl = build_decl (input_location, VAR_DECL, NULL_TREE, 2021 boolean_type_node); 2022 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl); 2023 DECL_CONTEXT (decl) = current_function_decl; 2024 DECL_INITIAL (decl) = boolean_false_node; 2025 /* Don't emit any symbolic debugging info for this decl. */ 2026 DECL_IGNORED_P (decl) = 1; 2027 *init_test_decl = decl; 2028 } 2029 2030 init = build_call_nary (void_type_node, 2031 build_address_of (soft_initclass_node), 2032 1, build_class_ref (clas)); 2033 TREE_SIDE_EFFECTS (init) = 1; 2034 init = build3 (COND_EXPR, void_type_node, 2035 build2 (EQ_EXPR, boolean_type_node, 2036 *init_test_decl, boolean_false_node), 2037 init, integer_zero_node); 2038 TREE_SIDE_EFFECTS (init) = 1; 2039 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, 2040 build2 (MODIFY_EXPR, boolean_type_node, 2041 *init_test_decl, boolean_true_node)); 2042 TREE_SIDE_EFFECTS (init) = 1; 2043 } 2044 2045 if (expr != NULL_TREE) 2046 { 2047 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr); 2048 TREE_SIDE_EFFECTS (expr) = 1; 2049 return expr; 2050 } 2051 return init; 2052} 2053 2054 2055 2056/* Rewrite expensive calls that require stack unwinding at runtime to 2057 cheaper alternatives. The logic here performs these 2058 transformations: 2059 2060 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$) 2061 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$) 2062 2063*/ 2064 2065typedef struct 2066{ 2067 const char *classname; 2068 const char *method; 2069 const char *signature; 2070 const char *new_classname; 2071 const char *new_signature; 2072 int flags; 2073 void (*rewrite_arglist) (vec<tree, va_gc> **); 2074} rewrite_rule; 2075 2076/* Add __builtin_return_address(0) to the end of an arglist. */ 2077 2078 2079static void 2080rewrite_arglist_getcaller (vec<tree, va_gc> **arglist) 2081{ 2082 tree retaddr 2083 = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS), 2084 1, integer_zero_node); 2085 2086 DECL_UNINLINABLE (current_function_decl) = 1; 2087 2088 vec_safe_push (*arglist, retaddr); 2089} 2090 2091/* Add this.class to the end of an arglist. */ 2092 2093static void 2094rewrite_arglist_getclass (vec<tree, va_gc> **arglist) 2095{ 2096 vec_safe_push (*arglist, build_class_ref (output_class)); 2097} 2098 2099static rewrite_rule rules[] = 2100 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;", 2101 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;", 2102 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass}, 2103 2104 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;", 2105 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;", 2106 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass}, 2107 2108 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;", 2109 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;", 2110 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller}, 2111 2112 {"gnu.classpath.VMStackWalker", "getCallingClassLoader", 2113 "()Ljava/lang/ClassLoader;", 2114 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;", 2115 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller}, 2116 2117 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;", 2118 "java.lang.String", "([CII)Ljava/lang/String;", 2119 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL}, 2120 2121 {NULL, NULL, NULL, NULL, NULL, 0, NULL}}; 2122 2123/* True if this method is special, i.e. it's a private method that 2124 should be exported from a DSO. */ 2125 2126bool 2127special_method_p (tree candidate_method) 2128{ 2129 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method))); 2130 tree method = DECL_NAME (candidate_method); 2131 rewrite_rule *p; 2132 2133 for (p = rules; p->classname; p++) 2134 { 2135 if (get_identifier (p->classname) == context 2136 && get_identifier (p->method) == method) 2137 return true; 2138 } 2139 return false; 2140} 2141 2142/* Scan the rules list for replacements for *METHOD_P and replace the 2143 args accordingly. If the rewrite results in an access to a private 2144 method, update SPECIAL.*/ 2145 2146void 2147maybe_rewrite_invocation (tree *method_p, vec<tree, va_gc> **arg_list_p, 2148 tree *method_signature_p, tree *special) 2149{ 2150 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p))); 2151 rewrite_rule *p; 2152 *special = NULL_TREE; 2153 2154 for (p = rules; p->classname; p++) 2155 { 2156 if (get_identifier (p->classname) == context) 2157 { 2158 tree method = DECL_NAME (*method_p); 2159 if (get_identifier (p->method) == method 2160 && get_identifier (p->signature) == *method_signature_p) 2161 { 2162 tree maybe_method; 2163 tree destination_class 2164 = lookup_class (get_identifier (p->new_classname)); 2165 gcc_assert (destination_class); 2166 maybe_method 2167 = lookup_java_method (destination_class, 2168 method, 2169 get_identifier (p->new_signature)); 2170 if (! maybe_method && ! flag_verify_invocations) 2171 { 2172 maybe_method 2173 = add_method (destination_class, p->flags, 2174 method, get_identifier (p->new_signature)); 2175 DECL_EXTERNAL (maybe_method) = 1; 2176 } 2177 *method_p = maybe_method; 2178 gcc_assert (*method_p); 2179 if (p->rewrite_arglist) 2180 p->rewrite_arglist (arg_list_p); 2181 *method_signature_p = get_identifier (p->new_signature); 2182 *special = integer_one_node; 2183 2184 break; 2185 } 2186 } 2187 } 2188} 2189 2190 2191 2192tree 2193build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED, 2194 tree self_type, tree method_signature ATTRIBUTE_UNUSED, 2195 vec<tree, va_gc> *arg_list ATTRIBUTE_UNUSED, tree special) 2196{ 2197 tree func; 2198 if (is_compiled_class (self_type)) 2199 { 2200 /* With indirect dispatch we have to use indirect calls for all 2201 publicly visible methods or gcc will use PLT indirections 2202 to reach them. We also have to use indirect dispatch for all 2203 external methods. */ 2204 if (! flag_indirect_dispatch 2205 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method))) 2206 { 2207 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)), 2208 method); 2209 } 2210 else 2211 { 2212 tree table_index 2213 = build_int_cst (NULL_TREE, 2214 (get_symbol_table_index 2215 (method, special, 2216 &TYPE_ATABLE_METHODS (output_class)))); 2217 func 2218 = build4 (ARRAY_REF, 2219 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))), 2220 TYPE_ATABLE_DECL (output_class), table_index, 2221 NULL_TREE, NULL_TREE); 2222 } 2223 func = convert (method_ptr_type_node, func); 2224 } 2225 else 2226 { 2227 /* We don't know whether the method has been (statically) compiled. 2228 Compile this code to get a reference to the method's code: 2229 2230 SELF_TYPE->methods[METHOD_INDEX].ncode 2231 2232 */ 2233 2234 int method_index = 0; 2235 tree meth, ref; 2236 2237 /* The method might actually be declared in some superclass, so 2238 we have to use its class context, not the caller's notion of 2239 where the method is. */ 2240 self_type = DECL_CONTEXT (method); 2241 ref = build_class_ref (self_type); 2242 ref = build1 (INDIRECT_REF, class_type_node, ref); 2243 if (ncode_ident == NULL_TREE) 2244 ncode_ident = get_identifier ("ncode"); 2245 if (methods_ident == NULL_TREE) 2246 methods_ident = get_identifier ("methods"); 2247 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref, 2248 lookup_field (&class_type_node, methods_ident), 2249 NULL_TREE); 2250 for (meth = TYPE_METHODS (self_type); 2251 ; meth = DECL_CHAIN (meth)) 2252 { 2253 if (method == meth) 2254 break; 2255 if (meth == NULL_TREE) 2256 fatal_error (input_location, "method '%s' not found in class", 2257 IDENTIFIER_POINTER (DECL_NAME (method))); 2258 method_index++; 2259 } 2260 method_index *= int_size_in_bytes (method_type_node); 2261 ref = fold_build_pointer_plus_hwi (ref, method_index); 2262 ref = build1 (INDIRECT_REF, method_type_node, ref); 2263 func = build3 (COMPONENT_REF, nativecode_ptr_type_node, 2264 ref, lookup_field (&method_type_node, ncode_ident), 2265 NULL_TREE); 2266 } 2267 return func; 2268} 2269 2270tree 2271invoke_build_dtable (int is_invoke_interface, vec<tree, va_gc> *arg_list) 2272{ 2273 tree dtable, objectref; 2274 tree saved = save_expr ((*arg_list)[0]); 2275 2276 (*arg_list)[0] = saved; 2277 2278 /* If we're dealing with interfaces and if the objectref 2279 argument is an array then get the dispatch table of the class 2280 Object rather than the one from the objectref. */ 2281 objectref = (is_invoke_interface 2282 && is_array_type_p (TREE_TYPE (saved)) 2283 ? build_class_ref (object_type_node) : saved); 2284 2285 if (dtable_ident == NULL_TREE) 2286 dtable_ident = get_identifier ("vtable"); 2287 dtable = build_java_indirect_ref (object_type_node, objectref, 2288 flag_check_references); 2289 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable, 2290 lookup_field (&object_type_node, dtable_ident), NULL_TREE); 2291 2292 return dtable; 2293} 2294 2295/* Determine the index in SYMBOL_TABLE for a reference to the decl 2296 T. If this decl has not been seen before, it will be added to the 2297 [oa]table_methods. If it has, the existing table slot will be 2298 reused. */ 2299 2300int 2301get_symbol_table_index (tree t, tree special, 2302 vec<method_entry, va_gc> **symbol_table) 2303{ 2304 method_entry *e; 2305 unsigned i; 2306 method_entry elem = {t, special}; 2307 2308 FOR_EACH_VEC_SAFE_ELT (*symbol_table, i, e) 2309 if (t == e->method && special == e->special) 2310 goto done; 2311 2312 vec_safe_push (*symbol_table, elem); 2313 2314 done: 2315 return i + 1; 2316} 2317 2318tree 2319build_invokevirtual (tree dtable, tree method, tree special) 2320{ 2321 tree func; 2322 tree nativecode_ptr_ptr_type_node 2323 = build_pointer_type (nativecode_ptr_type_node); 2324 tree method_index; 2325 tree otable_index; 2326 2327 if (flag_indirect_dispatch) 2328 { 2329 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))); 2330 2331 otable_index 2332 = build_int_cst (NULL_TREE, get_symbol_table_index 2333 (method, special, 2334 &TYPE_OTABLE_METHODS (output_class))); 2335 method_index = build4 (ARRAY_REF, integer_type_node, 2336 TYPE_OTABLE_DECL (output_class), 2337 otable_index, NULL_TREE, NULL_TREE); 2338 } 2339 else 2340 { 2341 /* We fetch the DECL_VINDEX field directly here, rather than 2342 using get_method_index(). DECL_VINDEX is the true offset 2343 from the vtable base to a method, regrdless of any extra 2344 words inserted at the start of the vtable. */ 2345 method_index = DECL_VINDEX (method); 2346 method_index = size_binop (MULT_EXPR, method_index, 2347 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node)); 2348 if (TARGET_VTABLE_USES_DESCRIPTORS) 2349 method_index = size_binop (MULT_EXPR, method_index, 2350 size_int (TARGET_VTABLE_USES_DESCRIPTORS)); 2351 } 2352 2353 func = fold_build_pointer_plus (dtable, method_index); 2354 2355 if (TARGET_VTABLE_USES_DESCRIPTORS) 2356 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func); 2357 else 2358 { 2359 func = fold_convert (nativecode_ptr_ptr_type_node, func); 2360 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func); 2361 } 2362 2363 return func; 2364} 2365 2366static GTY(()) tree class_ident; 2367tree 2368build_invokeinterface (tree dtable, tree method) 2369{ 2370 tree interface; 2371 tree idx; 2372 2373 /* We expand invokeinterface here. */ 2374 2375 if (class_ident == NULL_TREE) 2376 class_ident = get_identifier ("class"); 2377 2378 dtable = build_java_indirect_ref (dtable_type, dtable, 2379 flag_check_references); 2380 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable, 2381 lookup_field (&dtable_type, class_ident), NULL_TREE); 2382 2383 interface = DECL_CONTEXT (method); 2384 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface))); 2385 layout_class_methods (interface); 2386 2387 if (flag_indirect_dispatch) 2388 { 2389 int itable_index 2390 = 2 * (get_symbol_table_index 2391 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class))); 2392 interface 2393 = build4 (ARRAY_REF, 2394 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))), 2395 TYPE_ITABLE_DECL (output_class), 2396 build_int_cst (NULL_TREE, itable_index-1), 2397 NULL_TREE, NULL_TREE); 2398 idx 2399 = build4 (ARRAY_REF, 2400 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))), 2401 TYPE_ITABLE_DECL (output_class), 2402 build_int_cst (NULL_TREE, itable_index), 2403 NULL_TREE, NULL_TREE); 2404 interface = convert (class_ptr_type, interface); 2405 idx = convert (integer_type_node, idx); 2406 } 2407 else 2408 { 2409 idx = build_int_cst (NULL_TREE, 2410 get_interface_method_index (method, interface)); 2411 interface = build_class_ref (interface); 2412 } 2413 2414 return build_call_nary (ptr_type_node, 2415 build_address_of (soft_lookupinterfacemethod_node), 2416 3, dtable, interface, idx); 2417} 2418 2419/* Expand one of the invoke_* opcodes. 2420 OPCODE is the specific opcode. 2421 METHOD_REF_INDEX is an index into the constant pool. 2422 NARGS is the number of arguments, or -1 if not specified. */ 2423 2424static void 2425expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) 2426{ 2427 tree method_signature 2428 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index); 2429 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, 2430 method_ref_index); 2431 tree self_type 2432 = get_class_constant (current_jcf, 2433 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool, 2434 method_ref_index)); 2435 const char *const self_name 2436 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type))); 2437 tree call, func, method, method_type; 2438 vec<tree, va_gc> *arg_list; 2439 tree check = NULL_TREE; 2440 2441 tree special = NULL_TREE; 2442 2443 if (! CLASS_LOADED_P (self_type)) 2444 { 2445 load_class (self_type, 1); 2446 safe_layout_class (self_type); 2447 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK) 2448 fatal_error (input_location, "failed to find class '%s'", self_name); 2449 } 2450 layout_class_methods (self_type); 2451 2452 if (ID_INIT_P (method_name)) 2453 method = lookup_java_constructor (self_type, method_signature); 2454 else 2455 method = lookup_java_method (self_type, method_name, method_signature); 2456 2457 /* We've found a method in a class other than the one in which it 2458 was wanted. This can happen if, for instance, we're trying to 2459 compile invokespecial super.equals(). 2460 FIXME: This is a kludge. Rather than nullifying the result, we 2461 should change lookup_java_method() so that it doesn't search the 2462 superclass chain when we're BC-compiling. */ 2463 if (! flag_verify_invocations 2464 && method 2465 && ! TYPE_ARRAY_P (self_type) 2466 && self_type != DECL_CONTEXT (method)) 2467 method = NULL_TREE; 2468 2469 /* We've found a method in an interface, but this isn't an interface 2470 call. */ 2471 if (opcode != OPCODE_invokeinterface 2472 && method 2473 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))) 2474 method = NULL_TREE; 2475 2476 /* We've found a non-interface method but we are making an 2477 interface call. This can happen if the interface overrides a 2478 method in Object. */ 2479 if (! flag_verify_invocations 2480 && opcode == OPCODE_invokeinterface 2481 && method 2482 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))) 2483 method = NULL_TREE; 2484 2485 if (method == NULL_TREE) 2486 { 2487 if (flag_verify_invocations || ! flag_indirect_dispatch) 2488 { 2489 error ("class '%s' has no method named '%s' matching signature '%s'", 2490 self_name, 2491 IDENTIFIER_POINTER (method_name), 2492 IDENTIFIER_POINTER (method_signature)); 2493 } 2494 else 2495 { 2496 int flags = ACC_PUBLIC; 2497 if (opcode == OPCODE_invokestatic) 2498 flags |= ACC_STATIC; 2499 if (opcode == OPCODE_invokeinterface) 2500 { 2501 flags |= ACC_INTERFACE | ACC_ABSTRACT; 2502 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1; 2503 } 2504 method = add_method (self_type, flags, method_name, 2505 method_signature); 2506 DECL_ARTIFICIAL (method) = 1; 2507 METHOD_DUMMY (method) = 1; 2508 layout_class_method (self_type, NULL, 2509 method, NULL); 2510 } 2511 } 2512 2513 /* Invoke static can't invoke static/abstract method */ 2514 if (method != NULL_TREE) 2515 { 2516 if (opcode == OPCODE_invokestatic) 2517 { 2518 if (!METHOD_STATIC (method)) 2519 { 2520 error ("invokestatic on non static method"); 2521 method = NULL_TREE; 2522 } 2523 else if (METHOD_ABSTRACT (method)) 2524 { 2525 error ("invokestatic on abstract method"); 2526 method = NULL_TREE; 2527 } 2528 } 2529 else 2530 { 2531 if (METHOD_STATIC (method)) 2532 { 2533 error ("invoke[non-static] on static method"); 2534 method = NULL_TREE; 2535 } 2536 } 2537 } 2538 2539 if (method == NULL_TREE) 2540 { 2541 /* If we got here, we emitted an error message above. So we 2542 just pop the arguments, push a properly-typed zero, and 2543 continue. */ 2544 method_type = get_type_from_signature (method_signature); 2545 pop_arguments (method_type); 2546 if (opcode != OPCODE_invokestatic) 2547 pop_type (self_type); 2548 method_type = promote_type (TREE_TYPE (method_type)); 2549 push_value (convert (method_type, integer_zero_node)); 2550 return; 2551 } 2552 2553 arg_list = pop_arguments (TREE_TYPE (method)); 2554 flush_quick_stack (); 2555 2556 maybe_rewrite_invocation (&method, &arg_list, &method_signature, 2557 &special); 2558 method_type = TREE_TYPE (method); 2559 2560 func = NULL_TREE; 2561 if (opcode == OPCODE_invokestatic) 2562 func = build_known_method_ref (method, method_type, self_type, 2563 method_signature, arg_list, special); 2564 else if (opcode == OPCODE_invokespecial 2565 || (opcode == OPCODE_invokevirtual 2566 && (METHOD_PRIVATE (method) 2567 || METHOD_FINAL (method) 2568 || CLASS_FINAL (TYPE_NAME (self_type))))) 2569 { 2570 /* If the object for the method call is null, we throw an 2571 exception. We don't do this if the object is the current 2572 method's `this'. In other cases we just rely on an 2573 optimization pass to eliminate redundant checks. FIXME: 2574 Unfortunately there doesn't seem to be a way to determine 2575 what the current method is right now. 2576 We do omit the check if we're calling <init>. */ 2577 /* We use a SAVE_EXPR here to make sure we only evaluate 2578 the new `self' expression once. */ 2579 tree save_arg = save_expr ((*arg_list)[0]); 2580 (*arg_list)[0] = save_arg; 2581 check = java_check_reference (save_arg, ! DECL_INIT_P (method)); 2582 func = build_known_method_ref (method, method_type, self_type, 2583 method_signature, arg_list, special); 2584 } 2585 else 2586 { 2587 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface, 2588 arg_list); 2589 if (opcode == OPCODE_invokevirtual) 2590 func = build_invokevirtual (dtable, method, special); 2591 else 2592 func = build_invokeinterface (dtable, method); 2593 } 2594 2595 if (TREE_CODE (func) == ADDR_EXPR) 2596 TREE_TYPE (func) = build_pointer_type (method_type); 2597 else 2598 func = build1 (NOP_EXPR, build_pointer_type (method_type), func); 2599 2600 call = build_call_vec (TREE_TYPE (method_type), func, arg_list); 2601 TREE_SIDE_EFFECTS (call) = 1; 2602 call = check_for_builtin (method, call); 2603 2604 if (check != NULL_TREE) 2605 { 2606 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call); 2607 TREE_SIDE_EFFECTS (call) = 1; 2608 } 2609 2610 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE) 2611 java_add_stmt (call); 2612 else 2613 { 2614 push_value (call); 2615 flush_quick_stack (); 2616 } 2617} 2618 2619/* Create a stub which will be put into the vtable but which will call 2620 a JNI function. */ 2621 2622tree 2623build_jni_stub (tree method) 2624{ 2625 tree jnifunc, call, body, method_sig, arg_types; 2626 tree jniarg0, jniarg1, jniarg2, jniarg3; 2627 tree jni_func_type, tem; 2628 tree env_var, res_var = NULL_TREE, block; 2629 tree method_args; 2630 tree meth_var; 2631 tree bind; 2632 vec<tree, va_gc> *args = NULL; 2633 int args_size = 0; 2634 2635 tree klass = DECL_CONTEXT (method); 2636 klass = build_class_ref (klass); 2637 2638 gcc_assert (METHOD_NATIVE (method) && flag_jni); 2639 2640 DECL_ARTIFICIAL (method) = 1; 2641 DECL_EXTERNAL (method) = 0; 2642 2643 env_var = build_decl (input_location, 2644 VAR_DECL, get_identifier ("env"), ptr_type_node); 2645 DECL_CONTEXT (env_var) = method; 2646 2647 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node) 2648 { 2649 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"), 2650 TREE_TYPE (TREE_TYPE (method))); 2651 DECL_CONTEXT (res_var) = method; 2652 DECL_CHAIN (env_var) = res_var; 2653 } 2654 2655 method_args = DECL_ARGUMENTS (method); 2656 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE); 2657 TREE_SIDE_EFFECTS (block) = 1; 2658 2659 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */ 2660 body = build2 (MODIFY_EXPR, ptr_type_node, env_var, 2661 build_call_nary (ptr_type_node, 2662 build_address_of (soft_getjnienvnewframe_node), 2663 1, klass)); 2664 2665 /* The JNIEnv structure is the first argument to the JNI function. */ 2666 args_size += int_size_in_bytes (TREE_TYPE (env_var)); 2667 vec_safe_push (args, env_var); 2668 2669 /* For a static method the second argument is the class. For a 2670 non-static method the second argument is `this'; that is already 2671 available in the argument list. */ 2672 if (METHOD_STATIC (method)) 2673 { 2674 args_size += int_size_in_bytes (TREE_TYPE (klass)); 2675 vec_safe_push (args, klass); 2676 } 2677 2678 /* All the arguments to this method become arguments to the 2679 underlying JNI function. If we had to wrap object arguments in a 2680 special way, we would do that here. */ 2681 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem)) 2682 { 2683 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem))); 2684#ifdef PARM_BOUNDARY 2685 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY) 2686 * PARM_BOUNDARY); 2687#endif 2688 args_size += (arg_bits / BITS_PER_UNIT); 2689 2690 vec_safe_push (args, tem); 2691 } 2692 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method)); 2693 2694 /* Argument types for static methods and the JNIEnv structure. 2695 FIXME: Write and use build_function_type_vec to avoid this. */ 2696 if (METHOD_STATIC (method)) 2697 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types); 2698 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types); 2699 2700 /* We call _Jv_LookupJNIMethod to find the actual underlying 2701 function pointer. _Jv_LookupJNIMethod will throw the appropriate 2702 exception if this function is not found at runtime. */ 2703 method_sig = build_java_signature (TREE_TYPE (method)); 2704 jniarg0 = klass; 2705 jniarg1 = build_utf8_ref (DECL_NAME (method)); 2706 jniarg2 = build_utf8_ref (unmangle_classname 2707 (IDENTIFIER_POINTER (method_sig), 2708 IDENTIFIER_LENGTH (method_sig))); 2709 jniarg3 = build_int_cst (NULL_TREE, args_size); 2710 2711 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types); 2712 2713#ifdef MODIFY_JNI_METHOD_CALL 2714 tem = MODIFY_JNI_METHOD_CALL (tem); 2715#endif 2716 2717 jni_func_type = build_pointer_type (tem); 2718 2719 /* Use the actual function type, rather than a generic pointer type, 2720 such that this decl keeps the actual pointer type from being 2721 garbage-collected. If it is, we end up using canonical types 2722 with different uids for equivalent function types, and this in 2723 turn causes utf8 identifiers and output order to vary. */ 2724 meth_var = build_decl (input_location, 2725 VAR_DECL, get_identifier ("meth"), jni_func_type); 2726 TREE_STATIC (meth_var) = 1; 2727 TREE_PUBLIC (meth_var) = 0; 2728 DECL_EXTERNAL (meth_var) = 0; 2729 DECL_CONTEXT (meth_var) = method; 2730 DECL_ARTIFICIAL (meth_var) = 1; 2731 DECL_INITIAL (meth_var) = null_pointer_node; 2732 TREE_USED (meth_var) = 1; 2733 chainon (env_var, meth_var); 2734 build_result_decl (method); 2735 2736 jnifunc = build3 (COND_EXPR, jni_func_type, 2737 build2 (NE_EXPR, boolean_type_node, 2738 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)), 2739 meth_var, 2740 build2 (MODIFY_EXPR, jni_func_type, meth_var, 2741 build1 2742 (NOP_EXPR, jni_func_type, 2743 build_call_nary (ptr_type_node, 2744 build_address_of 2745 (soft_lookupjnimethod_node), 2746 4, 2747 jniarg0, jniarg1, 2748 jniarg2, jniarg3)))); 2749 2750 /* Now we make the actual JNI call via the resulting function 2751 pointer. */ 2752 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args); 2753 2754 /* If the JNI call returned a result, capture it here. If we had to 2755 unwrap JNI object results, we would do that here. */ 2756 if (res_var != NULL_TREE) 2757 { 2758 /* If the call returns an object, it may return a JNI weak 2759 reference, in which case we must unwrap it. */ 2760 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method)))) 2761 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)), 2762 build_address_of (soft_unwrapjni_node), 2763 1, call); 2764 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)), 2765 res_var, call); 2766 } 2767 2768 TREE_SIDE_EFFECTS (call) = 1; 2769 2770 body = build2 (COMPOUND_EXPR, void_type_node, body, call); 2771 TREE_SIDE_EFFECTS (body) = 1; 2772 2773 /* Now free the environment we allocated. */ 2774 call = build_call_nary (ptr_type_node, 2775 build_address_of (soft_jnipopsystemframe_node), 2776 1, env_var); 2777 TREE_SIDE_EFFECTS (call) = 1; 2778 body = build2 (COMPOUND_EXPR, void_type_node, body, call); 2779 TREE_SIDE_EFFECTS (body) = 1; 2780 2781 /* Finally, do the return. */ 2782 if (res_var != NULL_TREE) 2783 { 2784 tree drt; 2785 gcc_assert (DECL_RESULT (method)); 2786 /* Make sure we copy the result variable to the actual 2787 result. We use the type of the DECL_RESULT because it 2788 might be different from the return type of the function: 2789 it might be promoted. */ 2790 drt = TREE_TYPE (DECL_RESULT (method)); 2791 if (drt != TREE_TYPE (res_var)) 2792 res_var = build1 (CONVERT_EXPR, drt, res_var); 2793 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var); 2794 TREE_SIDE_EFFECTS (res_var) = 1; 2795 } 2796 2797 body = build2 (COMPOUND_EXPR, void_type_node, body, 2798 build1 (RETURN_EXPR, void_type_node, res_var)); 2799 TREE_SIDE_EFFECTS (body) = 1; 2800 2801 /* Prepend class initialization for static methods reachable from 2802 other classes. */ 2803 if (METHOD_STATIC (method) 2804 && (! METHOD_PRIVATE (method) 2805 || INNER_CLASS_P (DECL_CONTEXT (method)))) 2806 { 2807 tree init = build_call_expr (soft_initclass_node, 1, 2808 klass); 2809 body = build2 (COMPOUND_EXPR, void_type_node, init, body); 2810 TREE_SIDE_EFFECTS (body) = 1; 2811 } 2812 2813 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block), 2814 body, block); 2815 return bind; 2816} 2817 2818 2819/* Given lvalue EXP, return a volatile expression that references the 2820 same object. */ 2821 2822tree 2823java_modify_addr_for_volatile (tree exp) 2824{ 2825 tree exp_type = TREE_TYPE (exp); 2826 tree v_type 2827 = build_qualified_type (exp_type, 2828 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE); 2829 tree addr = build_fold_addr_expr (exp); 2830 v_type = build_pointer_type (v_type); 2831 addr = fold_convert (v_type, addr); 2832 exp = build_fold_indirect_ref (addr); 2833 return exp; 2834} 2835 2836 2837/* Expand an operation to extract from or store into a field. 2838 IS_STATIC is 1 iff the field is static. 2839 IS_PUTTING is 1 for putting into a field; 0 for getting from the field. 2840 FIELD_REF_INDEX is an index into the constant pool. */ 2841 2842static void 2843expand_java_field_op (int is_static, int is_putting, int field_ref_index) 2844{ 2845 tree self_type 2846 = get_class_constant (current_jcf, 2847 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, 2848 field_ref_index)); 2849 const char *self_name 2850 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type))); 2851 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index); 2852 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, 2853 field_ref_index); 2854 tree field_type = get_type_from_signature (field_signature); 2855 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE; 2856 tree field_ref; 2857 int is_error = 0; 2858 tree original_self_type = self_type; 2859 tree field_decl; 2860 tree modify_expr; 2861 2862 if (! CLASS_LOADED_P (self_type)) 2863 load_class (self_type, 1); 2864 field_decl = lookup_field (&self_type, field_name); 2865 if (field_decl == error_mark_node) 2866 { 2867 is_error = 1; 2868 } 2869 else if (field_decl == NULL_TREE) 2870 { 2871 if (! flag_verify_invocations) 2872 { 2873 int flags = ACC_PUBLIC; 2874 if (is_static) 2875 flags |= ACC_STATIC; 2876 self_type = original_self_type; 2877 field_decl = add_field (original_self_type, field_name, 2878 field_type, flags); 2879 DECL_ARTIFICIAL (field_decl) = 1; 2880 DECL_IGNORED_P (field_decl) = 1; 2881#if 0 2882 /* FIXME: We should be pessimistic about volatility. We 2883 don't know one way or another, but this is safe. 2884 However, doing this has bad effects on code quality. We 2885 need to look at better ways to do this. */ 2886 TREE_THIS_VOLATILE (field_decl) = 1; 2887#endif 2888 } 2889 else 2890 { 2891 error ("missing field '%s' in '%s'", 2892 IDENTIFIER_POINTER (field_name), self_name); 2893 is_error = 1; 2894 } 2895 } 2896 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature) 2897 { 2898 error ("mismatching signature for field '%s' in '%s'", 2899 IDENTIFIER_POINTER (field_name), self_name); 2900 is_error = 1; 2901 } 2902 field_ref = is_static ? NULL_TREE : pop_value (self_type); 2903 if (is_error) 2904 { 2905 if (! is_putting) 2906 push_value (convert (field_type, integer_zero_node)); 2907 flush_quick_stack (); 2908 return; 2909 } 2910 2911 field_ref = build_field_ref (field_ref, self_type, field_name); 2912 if (is_static 2913 && ! flag_indirect_dispatch) 2914 { 2915 tree context = DECL_CONTEXT (field_ref); 2916 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context))) 2917 field_ref = build_class_init (context, field_ref); 2918 else 2919 field_ref = build_class_init (self_type, field_ref); 2920 } 2921 if (is_putting) 2922 { 2923 flush_quick_stack (); 2924 if (FIELD_FINAL (field_decl)) 2925 { 2926 if (DECL_CONTEXT (field_decl) != current_class) 2927 error ("assignment to final field %q+D not in field%'s class", 2928 field_decl); 2929 /* We used to check for assignments to final fields not 2930 occurring in the class initializer or in a constructor 2931 here. However, this constraint doesn't seem to be 2932 enforced by the JVM. */ 2933 } 2934 2935 if (TREE_THIS_VOLATILE (field_decl)) 2936 field_ref = java_modify_addr_for_volatile (field_ref); 2937 2938 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), 2939 field_ref, new_value); 2940 2941 if (TREE_THIS_VOLATILE (field_decl)) 2942 { 2943 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE); 2944 java_add_stmt (build_call_expr (sync, 0)); 2945 } 2946 2947 java_add_stmt (modify_expr); 2948 } 2949 else 2950 { 2951 tree temp = build_decl (input_location, 2952 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref)); 2953 java_add_local_var (temp); 2954 2955 if (TREE_THIS_VOLATILE (field_decl)) 2956 field_ref = java_modify_addr_for_volatile (field_ref); 2957 2958 modify_expr 2959 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref); 2960 java_add_stmt (modify_expr); 2961 2962 if (TREE_THIS_VOLATILE (field_decl)) 2963 { 2964 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE); 2965 java_add_stmt (build_call_expr (sync, 0)); 2966 } 2967 2968 push_value (temp); 2969 } 2970 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl); 2971} 2972 2973static void 2974load_type_state (int pc) 2975{ 2976 int i; 2977 tree vec = (*type_states)[pc]; 2978 int cur_length = TREE_VEC_LENGTH (vec); 2979 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl); 2980 for (i = 0; i < cur_length; i++) 2981 type_map [i] = TREE_VEC_ELT (vec, i); 2982} 2983 2984/* Go over METHOD's bytecode and note instruction starts in 2985 instruction_bits[]. */ 2986 2987void 2988note_instructions (JCF *jcf, tree method) 2989{ 2990 int PC; 2991 unsigned char* byte_ops; 2992 long length = DECL_CODE_LENGTH (method); 2993 2994 int saw_index; 2995 jint INT_temp; 2996 2997#undef RET /* Defined by config/i386/i386.h */ 2998#undef PTR 2999#define BCODE byte_ops 3000#define BYTE_type_node byte_type_node 3001#define SHORT_type_node short_type_node 3002#define INT_type_node int_type_node 3003#define LONG_type_node long_type_node 3004#define CHAR_type_node char_type_node 3005#define PTR_type_node ptr_type_node 3006#define FLOAT_type_node float_type_node 3007#define DOUBLE_type_node double_type_node 3008#define VOID_type_node void_type_node 3009#define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1) 3010#define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2) 3011#define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1) 3012#define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2) 3013 3014#define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */ 3015 3016 JCF_SEEK (jcf, DECL_CODE_OFFSET (method)); 3017 byte_ops = jcf->read_ptr; 3018 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1); 3019 memset (instruction_bits, 0, length + 1); 3020 vec_alloc (type_states, length + 1); 3021 type_states->quick_grow_cleared (length + 1); 3022 3023 /* This pass figures out which PC can be the targets of jumps. */ 3024 for (PC = 0; PC < length;) 3025 { 3026 int oldpc = PC; /* PC at instruction start. */ 3027 instruction_bits [PC] |= BCODE_INSTRUCTION_START; 3028 switch (byte_ops[PC++]) 3029 { 3030#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \ 3031 case OPCODE: \ 3032 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \ 3033 break; 3034 3035#define NOTE_LABEL(PC) note_label(oldpc, PC) 3036 3037#define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE); 3038#define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE); 3039#define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE); 3040#define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */ 3041#define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */ 3042#define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */ 3043#define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */ 3044#define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */ 3045 3046#define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \ 3047 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE) 3048#define PRE_SPECIAL_IINC(OPERAND_TYPE) \ 3049 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1) 3050#define PRE_SPECIAL_ENTER(IGNORE) /* nothing */ 3051#define PRE_SPECIAL_EXIT(IGNORE) /* nothing */ 3052#define PRE_SPECIAL_THROW(IGNORE) /* nothing */ 3053#define PRE_SPECIAL_BREAK(IGNORE) /* nothing */ 3054 3055/* two forms of wide instructions */ 3056#define PRE_SPECIAL_WIDE(IGNORE) \ 3057 { \ 3058 int modified_opcode = IMMEDIATE_u1; \ 3059 if (modified_opcode == OPCODE_iinc) \ 3060 { \ 3061 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \ 3062 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \ 3063 } \ 3064 else \ 3065 { \ 3066 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \ 3067 } \ 3068 } 3069 3070#define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */ 3071 3072#define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */ 3073 3074#define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */ 3075#define PRE_ARRAY(OPERAND_TYPE, SUBOP) \ 3076 PRE_ARRAY_##SUBOP(OPERAND_TYPE) 3077#define PRE_ARRAY_LOAD(TYPE) /* nothing */ 3078#define PRE_ARRAY_STORE(TYPE) /* nothing */ 3079#define PRE_ARRAY_LENGTH(TYPE) /* nothing */ 3080#define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE 3081#define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1) 3082#define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2) 3083#define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1) 3084 3085#define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2) 3086#define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2) 3087#define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \ 3088 saw_index = 0; INT_temp = (OPERAND_VALUE); \ 3089 if (!saw_index) NOTE_LABEL(oldpc + INT_temp); 3090#define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \ 3091 saw_index = 0; INT_temp = (OPERAND_VALUE); \ 3092 NOTE_LABEL (PC); \ 3093 if (!saw_index) NOTE_LABEL(oldpc + INT_temp); 3094 3095#define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE) 3096 3097#define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \ 3098 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH 3099 3100#define PRE_LOOKUP_SWITCH \ 3101 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \ 3102 NOTE_LABEL (default_offset+oldpc); \ 3103 if (npairs >= 0) \ 3104 while (--npairs >= 0) { \ 3105 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \ 3106 jint offset = IMMEDIATE_s4; \ 3107 NOTE_LABEL (offset+oldpc); } \ 3108 } 3109 3110#define PRE_TABLE_SWITCH \ 3111 { jint default_offset = IMMEDIATE_s4; \ 3112 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \ 3113 NOTE_LABEL (default_offset+oldpc); \ 3114 if (low <= high) \ 3115 while (low++ <= high) { \ 3116 jint offset = IMMEDIATE_s4; \ 3117 NOTE_LABEL (offset+oldpc); } \ 3118 } 3119 3120#define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2); 3121#define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2); 3122#define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \ 3123 (void)(IMMEDIATE_u2); \ 3124 PC += 2 * IS_INTERFACE /* for invokeinterface */; 3125 3126#include "javaop.def" 3127#undef JAVAOP 3128 } 3129 } /* for */ 3130} 3131 3132void 3133expand_byte_code (JCF *jcf, tree method) 3134{ 3135 int PC; 3136 int i; 3137 const unsigned char *linenumber_pointer; 3138 int dead_code_index = -1; 3139 unsigned char* byte_ops; 3140 long length = DECL_CODE_LENGTH (method); 3141 location_t max_location = input_location; 3142 3143 stack_pointer = 0; 3144 JCF_SEEK (jcf, DECL_CODE_OFFSET (method)); 3145 byte_ops = jcf->read_ptr; 3146 3147 /* We make an initial pass of the line number table, to note 3148 which instructions have associated line number entries. */ 3149 linenumber_pointer = linenumber_table; 3150 for (i = 0; i < linenumber_count; i++) 3151 { 3152 int pc = GET_u2 (linenumber_pointer); 3153 linenumber_pointer += 4; 3154 if (pc >= length) 3155 warning (0, "invalid PC in line number table"); 3156 else 3157 { 3158 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0) 3159 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS; 3160 instruction_bits[pc] |= BCODE_HAS_LINENUMBER; 3161 } 3162 } 3163 3164 if (! verify_jvm_instructions_new (jcf, byte_ops, length)) 3165 return; 3166 3167 promote_arguments (); 3168 cache_this_class_ref (method); 3169 cache_cpool_data_ref (); 3170 3171 /* Translate bytecodes. */ 3172 linenumber_pointer = linenumber_table; 3173 for (PC = 0; PC < length;) 3174 { 3175 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0) 3176 { 3177 tree label = lookup_label (PC); 3178 flush_quick_stack (); 3179 if ((instruction_bits [PC] & BCODE_TARGET) != 0) 3180 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label)); 3181 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0) 3182 load_type_state (PC); 3183 } 3184 3185 if (! (instruction_bits [PC] & BCODE_VERIFIED)) 3186 { 3187 if (dead_code_index == -1) 3188 { 3189 /* This is the start of a region of unreachable bytecodes. 3190 They still need to be processed in order for EH ranges 3191 to get handled correctly. However, we can simply 3192 replace these bytecodes with nops. */ 3193 dead_code_index = PC; 3194 } 3195 3196 /* Turn this bytecode into a nop. */ 3197 byte_ops[PC] = 0x0; 3198 } 3199 else 3200 { 3201 if (dead_code_index != -1) 3202 { 3203 /* We've just reached the end of a region of dead code. */ 3204 if (extra_warnings) 3205 warning (0, "unreachable bytecode from %d to before %d", 3206 dead_code_index, PC); 3207 dead_code_index = -1; 3208 } 3209 } 3210 3211 /* Handle possible line number entry for this PC. 3212 3213 This code handles out-of-order and multiple linenumbers per PC, 3214 but is optimized for the case of line numbers increasing 3215 monotonically with PC. */ 3216 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0) 3217 { 3218 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0 3219 || GET_u2 (linenumber_pointer) != PC) 3220 linenumber_pointer = linenumber_table; 3221 while (linenumber_pointer < linenumber_table + linenumber_count * 4) 3222 { 3223 int pc = GET_u2 (linenumber_pointer); 3224 linenumber_pointer += 4; 3225 if (pc == PC) 3226 { 3227 int line = GET_u2 (linenumber_pointer - 2); 3228 input_location = linemap_line_start (line_table, line, 1); 3229 if (input_location > max_location) 3230 max_location = input_location; 3231 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS)) 3232 break; 3233 } 3234 } 3235 } 3236 maybe_pushlevels (PC); 3237 PC = process_jvm_instruction (PC, byte_ops, length); 3238 maybe_poplevels (PC); 3239 } /* for */ 3240 3241 uncache_this_class_ref (method); 3242 3243 if (dead_code_index != -1) 3244 { 3245 /* We've just reached the end of a region of dead code. */ 3246 if (extra_warnings) 3247 warning (0, "unreachable bytecode from %d to the end of the method", 3248 dead_code_index); 3249 } 3250 3251 DECL_FUNCTION_LAST_LINE (method) = max_location; 3252} 3253 3254static void 3255java_push_constant_from_pool (JCF *jcf, int index) 3256{ 3257 tree c; 3258 if (JPOOL_TAG (jcf, index) == CONSTANT_String) 3259 { 3260 tree name; 3261 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index)); 3262 index = alloc_name_constant (CONSTANT_String, name); 3263 c = build_ref_from_constant_pool (index); 3264 c = convert (promote_type (string_type_node), c); 3265 } 3266 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class 3267 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass) 3268 { 3269 tree record = get_class_constant (jcf, index); 3270 c = build_class_ref (record); 3271 } 3272 else 3273 c = get_constant (jcf, index); 3274 push_value (c); 3275} 3276 3277int 3278process_jvm_instruction (int PC, const unsigned char* byte_ops, 3279 long length ATTRIBUTE_UNUSED) 3280{ 3281 const char *opname; /* Temporary ??? */ 3282 int oldpc = PC; /* PC at instruction start. */ 3283 3284 /* If the instruction is at the beginning of an exception handler, 3285 replace the top of the stack with the thrown object reference. */ 3286 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET) 3287 { 3288 /* Note that the verifier will not emit a type map at all for 3289 dead exception handlers. In this case we just ignore the 3290 situation. */ 3291 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0) 3292 { 3293 tree type = pop_type (promote_type (throwable_type_node)); 3294 push_value (build_exception_object_ref (type)); 3295 } 3296 } 3297 3298 switch (byte_ops[PC++]) 3299 { 3300#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \ 3301 case OPCODE: \ 3302 opname = #OPNAME; \ 3303 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \ 3304 break; 3305 3306#define RET(OPERAND_TYPE, OPERAND_VALUE) \ 3307 { \ 3308 int saw_index = 0; \ 3309 int index = OPERAND_VALUE; \ 3310 (void) saw_index; /* Avoid set but not used warning. */ \ 3311 build_java_ret \ 3312 (find_local_variable (index, return_address_type_node, oldpc)); \ 3313 } 3314 3315#define JSR(OPERAND_TYPE, OPERAND_VALUE) \ 3316 { \ 3317 /* OPERAND_VALUE may have side-effects on PC */ \ 3318 int opvalue = OPERAND_VALUE; \ 3319 build_java_jsr (oldpc + opvalue, PC); \ 3320 } 3321 3322/* Push a constant onto the stack. */ 3323#define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \ 3324 { int saw_index = 0; int ival = (OPERAND_VALUE); \ 3325 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \ 3326 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); } 3327 3328/* internal macro added for use by the WIDE case */ 3329#define LOAD_INTERNAL(OPTYPE, OPVALUE) \ 3330 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc); 3331 3332/* Push local variable onto the opcode stack. */ 3333#define LOAD(OPERAND_TYPE, OPERAND_VALUE) \ 3334 { \ 3335 /* have to do this since OPERAND_VALUE may have side-effects */ \ 3336 int opvalue = OPERAND_VALUE; \ 3337 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \ 3338 } 3339 3340#define RETURN(OPERAND_TYPE, OPERAND_VALUE) \ 3341 expand_java_return (OPERAND_TYPE##_type_node) 3342 3343#define REM_EXPR TRUNC_MOD_EXPR 3344#define BINOP(OPERAND_TYPE, OPERAND_VALUE) \ 3345 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR) 3346 3347#define FIELD(IS_STATIC, IS_PUT) \ 3348 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2) 3349 3350#define TEST(OPERAND_TYPE, CONDITION) \ 3351 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2) 3352 3353#define COND(OPERAND_TYPE, CONDITION) \ 3354 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2) 3355 3356#define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \ 3357 BRANCH_##OPERAND_TYPE (OPERAND_VALUE) 3358 3359#define BRANCH_GOTO(OPERAND_VALUE) \ 3360 expand_java_goto (oldpc + OPERAND_VALUE) 3361 3362#define BRANCH_CALL(OPERAND_VALUE) \ 3363 expand_java_call (oldpc + OPERAND_VALUE, oldpc) 3364 3365#if 0 3366#define BRANCH_RETURN(OPERAND_VALUE) \ 3367 { \ 3368 tree type = OPERAND_TYPE##_type_node; \ 3369 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \ 3370 expand_java_ret (value); \ 3371 } 3372#endif 3373 3374#define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \ 3375 fprintf (stderr, "%3d: %s ", oldpc, opname); \ 3376 fprintf (stderr, "(not implemented)\n") 3377#define NOT_IMPL1(OPERAND_VALUE) \ 3378 fprintf (stderr, "%3d: %s ", oldpc, opname); \ 3379 fprintf (stderr, "(not implemented)\n") 3380 3381#define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE) 3382 3383#define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT) 3384 3385#define STACK_POP(COUNT) java_stack_pop (COUNT) 3386 3387#define STACK_SWAP(COUNT) java_stack_swap() 3388 3389#define STACK_DUP(COUNT) java_stack_dup (COUNT, 0) 3390#define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1) 3391#define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2) 3392 3393#define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \ 3394 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH 3395 3396#define LOOKUP_SWITCH \ 3397 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \ 3398 tree selector = pop_value (INT_type_node); \ 3399 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \ 3400 while (--npairs >= 0) \ 3401 { \ 3402 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \ 3403 expand_java_add_case (switch_expr, match, oldpc + offset); \ 3404 } \ 3405 } 3406 3407#define TABLE_SWITCH \ 3408 { jint default_offset = IMMEDIATE_s4; \ 3409 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \ 3410 tree selector = pop_value (INT_type_node); \ 3411 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \ 3412 for (; low <= high; low++) \ 3413 { \ 3414 jint offset = IMMEDIATE_s4; \ 3415 expand_java_add_case (switch_expr, low, oldpc + offset); \ 3416 } \ 3417 } 3418 3419#define INVOKE(MAYBE_STATIC, IS_INTERFACE) \ 3420 { int opcode = byte_ops[PC-1]; \ 3421 int method_ref_index = IMMEDIATE_u2; \ 3422 int nargs; \ 3423 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \ 3424 else nargs = -1; \ 3425 expand_invoke (opcode, method_ref_index, nargs); \ 3426 } 3427 3428/* Handle new, checkcast, instanceof */ 3429#define OBJECT(TYPE, OP) \ 3430 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2)) 3431 3432#define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE) 3433 3434#define ARRAY_LOAD(OPERAND_TYPE) \ 3435 { \ 3436 expand_java_arrayload( OPERAND_TYPE##_type_node ); \ 3437 } 3438 3439#define ARRAY_STORE(OPERAND_TYPE) \ 3440 { \ 3441 expand_java_arraystore( OPERAND_TYPE##_type_node ); \ 3442 } 3443 3444#define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length(); 3445#define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE() 3446#define ARRAY_NEW_PTR() \ 3447 push_value (build_anewarray (get_class_constant (current_jcf, \ 3448 IMMEDIATE_u2), \ 3449 pop_value (int_type_node))); 3450#define ARRAY_NEW_NUM() \ 3451 { \ 3452 int atype = IMMEDIATE_u1; \ 3453 push_value (build_newarray (atype, pop_value (int_type_node)));\ 3454 } 3455#define ARRAY_NEW_MULTI() \ 3456 { \ 3457 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \ 3458 int ndims = IMMEDIATE_u1; \ 3459 expand_java_multianewarray( klass, ndims ); \ 3460 } 3461 3462#define UNOP(OPERAND_TYPE, OPERAND_VALUE) \ 3463 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \ 3464 pop_value (OPERAND_TYPE##_type_node))); 3465 3466#define CONVERT2(FROM_TYPE, TO_TYPE) \ 3467 { \ 3468 push_value (build1 (NOP_EXPR, int_type_node, \ 3469 (convert (TO_TYPE##_type_node, \ 3470 pop_value (FROM_TYPE##_type_node))))); \ 3471 } 3472 3473#define CONVERT(FROM_TYPE, TO_TYPE) \ 3474 { \ 3475 push_value (convert (TO_TYPE##_type_node, \ 3476 pop_value (FROM_TYPE##_type_node))); \ 3477 } 3478 3479/* internal macro added for use by the WIDE case 3480 Added TREE_TYPE (decl) assignment, apbianco */ 3481#define STORE_INTERNAL(OPTYPE, OPVALUE) \ 3482 { \ 3483 tree decl, value; \ 3484 int index = OPVALUE; \ 3485 tree type = OPTYPE; \ 3486 value = pop_value (type); \ 3487 type = TREE_TYPE (value); \ 3488 decl = find_local_variable (index, type, oldpc); \ 3489 set_local_type (index, type); \ 3490 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \ 3491 } 3492 3493#define STORE(OPERAND_TYPE, OPERAND_VALUE) \ 3494 { \ 3495 /* have to do this since OPERAND_VALUE may have side-effects */ \ 3496 int opvalue = OPERAND_VALUE; \ 3497 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \ 3498 } 3499 3500#define SPECIAL(OPERAND_TYPE, INSTRUCTION) \ 3501 SPECIAL_##INSTRUCTION(OPERAND_TYPE) 3502 3503#define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node) 3504#define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node) 3505 3506#define MONITOR_OPERATION(call) \ 3507 { \ 3508 tree o = pop_value (ptr_type_node); \ 3509 tree c; \ 3510 flush_quick_stack (); \ 3511 c = build_java_monitor (call, o); \ 3512 TREE_SIDE_EFFECTS (c) = 1; \ 3513 java_add_stmt (c); \ 3514 } 3515 3516#define SPECIAL_IINC(IGNORED) \ 3517 { \ 3518 unsigned int local_var_index = IMMEDIATE_u1; \ 3519 int ival = IMMEDIATE_s1; \ 3520 expand_iinc(local_var_index, ival, oldpc); \ 3521 } 3522 3523#define SPECIAL_WIDE(IGNORED) \ 3524 { \ 3525 int modified_opcode = IMMEDIATE_u1; \ 3526 unsigned int local_var_index = IMMEDIATE_u2; \ 3527 switch (modified_opcode) \ 3528 { \ 3529 case OPCODE_iinc: \ 3530 { \ 3531 int ival = IMMEDIATE_s2; \ 3532 expand_iinc (local_var_index, ival, oldpc); \ 3533 break; \ 3534 } \ 3535 case OPCODE_iload: \ 3536 case OPCODE_lload: \ 3537 case OPCODE_fload: \ 3538 case OPCODE_dload: \ 3539 case OPCODE_aload: \ 3540 { \ 3541 /* duplicate code from LOAD macro */ \ 3542 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \ 3543 break; \ 3544 } \ 3545 case OPCODE_istore: \ 3546 case OPCODE_lstore: \ 3547 case OPCODE_fstore: \ 3548 case OPCODE_dstore: \ 3549 case OPCODE_astore: \ 3550 { \ 3551 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \ 3552 break; \ 3553 } \ 3554 default: \ 3555 error ("unrecognized wide sub-instruction"); \ 3556 } \ 3557 } 3558 3559#define SPECIAL_THROW(IGNORED) \ 3560 build_java_athrow (pop_value (throwable_type_node)) 3561 3562#define SPECIAL_BREAK NOT_IMPL1 3563#define IMPL NOT_IMPL 3564 3565#include "javaop.def" 3566#undef JAVAOP 3567 default: 3568 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]); 3569 } 3570 return PC; 3571} 3572 3573/* Return the opcode at PC in the code section pointed to by 3574 CODE_OFFSET. */ 3575 3576static unsigned char 3577peek_opcode_at_pc (JCF *jcf, int code_offset, int pc) 3578{ 3579 unsigned char opcode; 3580 long absolute_offset = (long)JCF_TELL (jcf); 3581 3582 JCF_SEEK (jcf, code_offset); 3583 opcode = jcf->read_ptr [pc]; 3584 JCF_SEEK (jcf, absolute_offset); 3585 return opcode; 3586} 3587 3588/* Some bytecode compilers are emitting accurate LocalVariableTable 3589 attributes. Here's an example: 3590 3591 PC <t>store_<n> 3592 PC+1 ... 3593 3594 Attribute "LocalVariableTable" 3595 slot #<n>: ... (PC: PC+1 length: L) 3596 3597 This is accurate because the local in slot <n> really exists after 3598 the opcode at PC is executed, hence from PC+1 to PC+1+L. 3599 3600 This procedure recognizes this situation and extends the live range 3601 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the 3602 length of the store instruction.) 3603 3604 This function is used by `give_name_to_locals' so that a local's 3605 DECL features a DECL_LOCAL_START_PC such that the first related 3606 store operation will use DECL as a destination, not an unrelated 3607 temporary created for the occasion. 3608 3609 This function uses a global (instruction_bits) `note_instructions' should 3610 have allocated and filled properly. */ 3611 3612int 3613maybe_adjust_start_pc (struct JCF *jcf, int code_offset, 3614 int start_pc, int slot) 3615{ 3616 int first, index, opcode; 3617 int pc, insn_pc; 3618 int wide_found = 0; 3619 3620 if (!start_pc) 3621 return start_pc; 3622 3623 first = index = -1; 3624 3625 /* Find last previous instruction and remember it */ 3626 for (pc = start_pc-1; pc; pc--) 3627 if (instruction_bits [pc] & BCODE_INSTRUCTION_START) 3628 break; 3629 insn_pc = pc; 3630 3631 /* Retrieve the instruction, handle `wide'. */ 3632 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++); 3633 if (opcode == OPCODE_wide) 3634 { 3635 wide_found = 1; 3636 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++); 3637 } 3638 3639 switch (opcode) 3640 { 3641 case OPCODE_astore_0: 3642 case OPCODE_astore_1: 3643 case OPCODE_astore_2: 3644 case OPCODE_astore_3: 3645 first = OPCODE_astore_0; 3646 break; 3647 3648 case OPCODE_istore_0: 3649 case OPCODE_istore_1: 3650 case OPCODE_istore_2: 3651 case OPCODE_istore_3: 3652 first = OPCODE_istore_0; 3653 break; 3654 3655 case OPCODE_lstore_0: 3656 case OPCODE_lstore_1: 3657 case OPCODE_lstore_2: 3658 case OPCODE_lstore_3: 3659 first = OPCODE_lstore_0; 3660 break; 3661 3662 case OPCODE_fstore_0: 3663 case OPCODE_fstore_1: 3664 case OPCODE_fstore_2: 3665 case OPCODE_fstore_3: 3666 first = OPCODE_fstore_0; 3667 break; 3668 3669 case OPCODE_dstore_0: 3670 case OPCODE_dstore_1: 3671 case OPCODE_dstore_2: 3672 case OPCODE_dstore_3: 3673 first = OPCODE_dstore_0; 3674 break; 3675 3676 case OPCODE_astore: 3677 case OPCODE_istore: 3678 case OPCODE_lstore: 3679 case OPCODE_fstore: 3680 case OPCODE_dstore: 3681 index = peek_opcode_at_pc (jcf, code_offset, pc); 3682 if (wide_found) 3683 { 3684 int other = peek_opcode_at_pc (jcf, code_offset, ++pc); 3685 index = (other << 8) + index; 3686 } 3687 break; 3688 } 3689 3690 /* Now we decide: first >0 means we have a <t>store_<n>, index >0 3691 means we have a <t>store. */ 3692 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot)) 3693 start_pc = insn_pc; 3694 3695 return start_pc; 3696} 3697 3698/* Build a node to represent empty statements and blocks. */ 3699 3700tree 3701build_java_empty_stmt (void) 3702{ 3703 tree t = build_empty_stmt (input_location); 3704 return t; 3705} 3706 3707/* Promote all args of integral type before generating any code. */ 3708 3709static void 3710promote_arguments (void) 3711{ 3712 int i; 3713 tree arg; 3714 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0; 3715 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++) 3716 { 3717 tree arg_type = TREE_TYPE (arg); 3718 if (INTEGRAL_TYPE_P (arg_type) 3719 && TYPE_PRECISION (arg_type) < 32) 3720 { 3721 tree copy = find_local_variable (i, integer_type_node, -1); 3722 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node, 3723 copy, 3724 fold_convert (integer_type_node, arg))); 3725 } 3726 if (TYPE_IS_WIDE (arg_type)) 3727 i++; 3728 } 3729} 3730 3731/* Create a local variable that points to the constant pool. */ 3732 3733static void 3734cache_cpool_data_ref (void) 3735{ 3736 if (optimize) 3737 { 3738 tree cpool; 3739 tree d = build_constant_data_ref (flag_indirect_classes); 3740 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE, 3741 build_pointer_type (TREE_TYPE (d))); 3742 java_add_local_var (cpool_ptr); 3743 TREE_CONSTANT (cpool_ptr) = 1; 3744 3745 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr), 3746 cpool_ptr, build_address_of (d))); 3747 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr); 3748 TREE_THIS_NOTRAP (cpool) = 1; 3749 TYPE_CPOOL_DATA_REF (output_class) = cpool; 3750 } 3751} 3752 3753#include "gt-java-expr.h" 3754