1/* Write out a Java(TM) class file. 2 Copyright (C) 1998, 1999 Free Software Foundation, Inc. 3 4This file is part of GNU CC. 5 6GNU CC is free software; you can redistribute it and/or modify 7it under the terms of the GNU General Public License as published by 8the Free Software Foundation; either version 2, or (at your option) 9any later version. 10 11GNU CC is distributed in the hope that it will be useful, 12but WITHOUT ANY WARRANTY; without even the implied warranty of 13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14GNU General Public License for more details. 15You should have received a copy of the GNU General Public License 16along with GNU CC; see the file COPYING. If not, write to 17the Free Software Foundation, 59 Temple Place - Suite 330, 18Boston, MA 02111-1307, USA. 19 20Java and all Java-based marks are trademarks or registered trademarks 21of Sun Microsystems, Inc. in the United States and other countries. 22The Free Software Foundation is independent of Sun Microsystems, Inc. */ 23 24#include "config.h" 25#include "system.h" 26#include "jcf.h" 27#include "tree.h" 28#include "java-tree.h" 29#include "obstack.h" 30#undef AND 31#include "rtl.h" 32#include "flags.h" 33#include "java-opcodes.h" 34#include "parse.h" /* for BLOCK_EXPR_BODY */ 35#include "buffer.h" 36#include "toplev.h" 37 38#ifndef DIR_SEPARATOR 39#define DIR_SEPARATOR '/' 40#endif 41 42extern struct obstack temporary_obstack; 43 44/* Base directory in which `.class' files should be written. 45 NULL means to put the file into the same directory as the 46 corresponding .java file. */ 47char *jcf_write_base_directory = NULL; 48 49/* Make sure bytecode.data is big enough for at least N more bytes. */ 50 51#define RESERVE(N) \ 52 do { CHECK_OP(state); \ 53 if (state->bytecode.ptr + (N) > state->bytecode.limit) \ 54 buffer_grow (&state->bytecode, N); } while (0) 55 56/* Add a 1-byte instruction/operand I to bytecode.data, 57 assuming space has already been RESERVE'd. */ 58 59#define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state)) 60 61/* Like OP1, but I is a 2-byte big endian integer. */ 62 63#define OP2(I) \ 64 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0) 65 66/* Like OP1, but I is a 4-byte big endian integer. */ 67 68#define OP4(I) \ 69 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \ 70 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0) 71 72/* Macro to call each time we push I words on the JVM stack. */ 73 74#define NOTE_PUSH(I) \ 75 do { state->code_SP += (I); \ 76 if (state->code_SP > state->code_SP_max) \ 77 state->code_SP_max = state->code_SP; } while (0) 78 79/* Macro to call each time we pop I words from the JVM stack. */ 80 81#define NOTE_POP(I) \ 82 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0) 83 84/* A chunk or segment of a .class file. */ 85 86struct chunk 87{ 88 /* The next segment of this .class file. */ 89 struct chunk *next; 90 91 /* The actual data in this segment to be written to the .class file. */ 92 unsigned char *data; 93 94 /* The size of the segment to be written to the .class file. */ 95 int size; 96}; 97 98#define PENDING_CLEANUP_PC (-3) 99#define PENDING_EXIT_PC (-2) 100#define UNDEFINED_PC (-1) 101 102/* Each "block" represents a label plus the bytecode instructions following. 103 There may be branches out of the block, but no incoming jumps, except 104 to the beginning of the block. 105 106 If (pc < 0), the jcf_block is not an actual block (i.e. it has no 107 assocated code yet), but it is an undefined label. 108*/ 109 110struct jcf_block 111{ 112 /* For blocks that that are defined, the next block (in pc order). 113 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR 114 or a cleanup expression (from a WITH_CLEANUP_EXPR), 115 this is the next (outer) such end label, in a stack headed by 116 labeled_blocks in jcf_partial. */ 117 struct jcf_block *next; 118 119 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR. 120 pc is PENDING_EXIT_PC. 121 In the not-yet-defined end label for pending cleanup subroutine, 122 pc is PENDING_CLEANUP_PC. 123 For other not-yet-defined labels, pc is UNDEFINED_PC. 124 125 If the label has been defined: 126 Until perform_relocations is finished, this is the maximum possible 127 value of the bytecode offset at the begnning of this block. 128 After perform_relocations, it is the actual offset (pc). */ 129 int pc; 130 131 int linenumber; 132 133 /* After finish_jcf_block is called, The actual instructions contained in this block. 134 Before than NULL, and the instructions are in state->bytecode. */ 135 union { 136 struct chunk *chunk; 137 138 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region 139 coveed by the cleanup. */ 140 struct jcf_block *start_label; 141 } v; 142 143 union { 144 /* Set of relocations (in reverse offset order) for this block. */ 145 struct jcf_relocation *relocations; 146 147 /* If this block is that of the not-yet-defined end label of 148 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR. 149 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */ 150 tree labeled_block; 151 } u; 152}; 153 154/* A "relocation" type for the 0-3 bytes of padding at the start 155 of a tableswitch or a lookupswitch. */ 156#define SWITCH_ALIGN_RELOC 4 157 158/* A relocation type for the labels in a tableswitch or a lookupswitch; 159 these are relative to the start of the instruction, but (due to 160 th 0-3 bytes of padding), we don't know the offset before relocation. */ 161#define BLOCK_START_RELOC 1 162 163struct jcf_relocation 164{ 165 /* Next relocation for the current jcf_block. */ 166 struct jcf_relocation *next; 167 168 /* The (byte) offset within the current block that needs to be relocated. */ 169 HOST_WIDE_INT offset; 170 171 /* 0 if offset is a 4-byte relative offset. 172 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted 173 for proper alignment in tableswitch/lookupswitch instructions. 174 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative 175 to the start of the containing block. 176 -1 if offset is a 2-byte relative offset. 177 < -1 if offset is the address of an instruction with a 2-byte offset 178 that does not have a corresponding 4-byte offset version, in which 179 case the absolute value of kind is the inverted opcode. 180 > 4 if offset is the address of an instruction (such as jsr) with a 181 2-byte offset that does have a corresponding 4-byte offset version, 182 in which case kind is the opcode of the 4-byte version (such as jsr_w). */ 183 int kind; 184 185 /* The label the relocation wants to actually transfer to. */ 186 struct jcf_block *label; 187}; 188 189/* State for single catch clause. */ 190 191struct jcf_handler 192{ 193 struct jcf_handler *next; 194 195 struct jcf_block *start_label; 196 struct jcf_block *end_label; 197 struct jcf_block *handler_label; 198 199 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */ 200 tree type; 201}; 202 203/* State for the current switch statement. */ 204 205struct jcf_switch_state 206{ 207 struct jcf_switch_state *prev; 208 struct jcf_block *default_label; 209 210 struct jcf_relocation *cases; 211 int num_cases; 212 HOST_WIDE_INT min_case, max_case; 213}; 214 215/* This structure is used to contain the various pieces that will 216 become a .class file. */ 217 218struct jcf_partial 219{ 220 struct chunk *first; 221 struct chunk *chunk; 222 struct obstack *chunk_obstack; 223 tree current_method; 224 225 /* List of basic blocks for the current method. */ 226 struct jcf_block *blocks; 227 struct jcf_block *last_block; 228 229 struct localvar_info *first_lvar; 230 struct localvar_info *last_lvar; 231 int lvar_count; 232 233 CPool cpool; 234 235 int linenumber_count; 236 237 /* Until perform_relocations, this is a upper bound on the number 238 of bytes (so far) in the instructions for the current method. */ 239 int code_length; 240 241 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */ 242 struct jcf_block *labeled_blocks; 243 244 /* The current stack size (stack pointer) in the current method. */ 245 int code_SP; 246 247 /* The largest extent of stack size (stack pointer) in the current method. */ 248 int code_SP_max; 249 250 /* Contains a mapping from local var slot number to localvar_info. */ 251 struct buffer localvars; 252 253 /* The buffer allocated for bytecode for the current jcf_block. */ 254 struct buffer bytecode; 255 256 /* Chain of exception handlers for the current method. */ 257 struct jcf_handler *handlers; 258 259 /* Last element in handlers chain. */ 260 struct jcf_handler *last_handler; 261 262 /* Number of exception handlers for the current method. */ 263 int num_handlers; 264 265 /* Number of finalizers we are currently nested within. */ 266 int num_finalizers; 267 268 /* If non-NULL, use this for the return value. */ 269 tree return_value_decl; 270 271 /* Information about the current switch statemenet. */ 272 struct jcf_switch_state *sw_state; 273}; 274 275static void generate_bytecode_insns PROTO ((tree, int, struct jcf_partial *)); 276static struct chunk * alloc_chunk PROTO ((struct chunk *, unsigned char *, 277 int, struct obstack *)); 278static unsigned char * append_chunk PROTO ((unsigned char *, int, 279 struct jcf_partial *)); 280static void append_chunk_copy PROTO ((unsigned char *, int, 281 struct jcf_partial *)); 282static struct jcf_block * gen_jcf_label PROTO ((struct jcf_partial *)); 283static void finish_jcf_block PROTO ((struct jcf_partial *)); 284static void define_jcf_label PROTO ((struct jcf_block *, 285 struct jcf_partial *)); 286static struct jcf_block * get_jcf_label_here PROTO ((struct jcf_partial *)); 287static void put_linenumber PROTO ((int, struct jcf_partial *)); 288static void localvar_alloc PROTO ((tree, struct jcf_partial *)); 289static int localvar_free PROTO ((tree, struct jcf_partial *)); 290static int get_access_flags PROTO ((tree)); 291static void write_chunks PROTO ((FILE *, struct chunk *)); 292static int adjust_typed_op PROTO ((tree, int)); 293static void generate_bytecode_conditional PROTO ((tree, struct jcf_block *, 294 struct jcf_block *, int, 295 struct jcf_partial *)); 296static void generate_bytecode_return PROTO ((tree, struct jcf_partial *)); 297static void perform_relocations PROTO ((struct jcf_partial *)); 298static void init_jcf_state PROTO ((struct jcf_partial *, struct obstack *)); 299static void init_jcf_method PROTO ((struct jcf_partial *, tree)); 300static void release_jcf_state PROTO ((struct jcf_partial *)); 301static struct chunk * generate_classfile PROTO ((tree, struct jcf_partial *)); 302 303 304/* Utility macros for appending (big-endian) data to a buffer. 305 We assume a local variable 'ptr' points into where we want to 306 write next, and we assume enoygh space has been allocated. */ 307 308#ifdef ENABLE_CHECKING 309int 310CHECK_PUT(ptr, state, i) 311 void *ptr; 312 struct jcf_partial *state; 313 int i; 314{ 315 if (ptr < state->chunk->data 316 || (char*)ptr + i > state->chunk->data + state->chunk->size) 317 fatal ("internal error - CHECK_PUT failed"); 318 return 0; 319} 320#else 321#define CHECK_PUT(PTR, STATE, I) ((void)0) 322#endif 323 324#define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X)) 325#define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF)) 326#define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF)) 327#define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N)) 328 329 330/* Allocate a new chunk on obstack WORK, and link it in after LAST. 331 Set the data and size fields to DATA and SIZE, respectively. 332 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */ 333 334static struct chunk * 335alloc_chunk (last, data, size, work) 336 struct chunk *last; 337 unsigned char *data; 338 int size; 339 struct obstack *work; 340{ 341 struct chunk *chunk = (struct chunk *) 342 obstack_alloc (work, sizeof(struct chunk)); 343 344 if (data == NULL && size > 0) 345 data = obstack_alloc (work, size); 346 347 chunk->next = NULL; 348 chunk->data = data; 349 chunk->size = size; 350 if (last != NULL) 351 last->next = chunk; 352 return chunk; 353} 354 355#ifdef ENABLE_CHECKING 356int 357CHECK_OP(struct jcf_partial *state) 358{ 359 if (state->bytecode.ptr > state->bytecode.limit) 360 { 361 fatal("internal error - CHECK_OP failed"); 362 } 363 return 0; 364} 365#else 366#define CHECK_OP(STATE) ((void)0) 367#endif 368 369static unsigned char * 370append_chunk (data, size, state) 371 unsigned char *data; 372 int size; 373 struct jcf_partial *state; 374{ 375 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack); 376 if (state->first == NULL) 377 state->first = state->chunk; 378 return state->chunk->data; 379} 380 381static void 382append_chunk_copy (data, size, state) 383 unsigned char *data; 384 int size; 385 struct jcf_partial *state; 386{ 387 unsigned char *ptr = append_chunk (NULL, size, state); 388 memcpy (ptr, data, size); 389} 390 391static struct jcf_block * 392gen_jcf_label (state) 393 struct jcf_partial *state; 394{ 395 struct jcf_block *block = (struct jcf_block *) 396 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block)); 397 block->next = NULL; 398 block->linenumber = -1; 399 block->pc = UNDEFINED_PC; 400 return block; 401} 402 403static void 404finish_jcf_block (state) 405 struct jcf_partial *state; 406{ 407 struct jcf_block *block = state->last_block; 408 struct jcf_relocation *reloc; 409 int code_length = BUFFER_LENGTH (&state->bytecode); 410 int pc = state->code_length; 411 append_chunk_copy (state->bytecode.data, code_length, state); 412 BUFFER_RESET (&state->bytecode); 413 block->v.chunk = state->chunk; 414 415 /* Calculate code_length to the maximum value it can have. */ 416 pc += block->v.chunk->size; 417 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next) 418 { 419 int kind = reloc->kind; 420 if (kind == SWITCH_ALIGN_RELOC) 421 pc += 3; 422 else if (kind > BLOCK_START_RELOC) 423 pc += 2; /* 2-byte offset may grow to 4-byte offset */ 424 else if (kind < -1) 425 pc += 5; /* May need to add a goto_w. */ 426 } 427 state->code_length = pc; 428} 429 430static void 431define_jcf_label (label, state) 432 struct jcf_block *label; 433 struct jcf_partial *state; 434{ 435 if (state->last_block != NULL) 436 finish_jcf_block (state); 437 label->pc = state->code_length; 438 if (state->blocks == NULL) 439 state->blocks = label; 440 else 441 state->last_block->next = label; 442 state->last_block = label; 443 label->next = NULL; 444 label->u.relocations = NULL; 445} 446 447static struct jcf_block * 448get_jcf_label_here (state) 449 struct jcf_partial *state; 450{ 451 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0) 452 return state->last_block; 453 else 454 { 455 struct jcf_block *label = gen_jcf_label (state); 456 define_jcf_label (label, state); 457 return label; 458 } 459} 460 461/* Note a line number entry for the current PC and given LINE. */ 462 463static void 464put_linenumber (line, state) 465 int line; 466 struct jcf_partial *state; 467{ 468 struct jcf_block *label = get_jcf_label_here (state); 469 if (label->linenumber > 0) 470 { 471 label = gen_jcf_label (state); 472 define_jcf_label (label, state); 473 } 474 label->linenumber = line; 475 state->linenumber_count++; 476} 477 478/* Allocate a new jcf_handler, for a catch clause that catches exceptions 479 in the range (START_LABEL, END_LABEL). */ 480 481static struct jcf_handler * 482alloc_handler (start_label, end_label, state) 483 struct jcf_block *start_label; 484 struct jcf_block *end_label; 485 struct jcf_partial *state; 486{ 487 struct jcf_handler *handler = (struct jcf_handler *) 488 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler)); 489 handler->start_label = start_label; 490 handler->end_label = end_label; 491 handler->handler_label = get_jcf_label_here (state); 492 if (state->handlers == NULL) 493 state->handlers = handler; 494 else 495 state->last_handler->next = handler; 496 state->last_handler = handler; 497 handler->next = NULL; 498 state->num_handlers++; 499 return handler; 500} 501 502 503/* The index of jvm local variable allocated for this DECL. 504 This is assigned when generating .class files; 505 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file. 506 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */ 507 508#define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL) 509 510struct localvar_info 511{ 512 struct localvar_info *next; 513 514 tree decl; 515 struct jcf_block *start_label; 516 struct jcf_block *end_label; 517}; 518 519#define localvar_buffer ((struct localvar_info**) state->localvars.data) 520#define localvar_max \ 521 ((struct localvar_info**) state->localvars.ptr - localvar_buffer) 522 523static void 524localvar_alloc (decl, state) 525 tree decl; 526 struct jcf_partial *state; 527{ 528 struct jcf_block *start_label = get_jcf_label_here (state); 529 int wide = TYPE_IS_WIDE (TREE_TYPE (decl)); 530 int index; 531 register struct localvar_info *info; 532 register struct localvar_info **ptr = localvar_buffer; 533 register struct localvar_info **limit 534 = (struct localvar_info**) state->localvars.ptr; 535 for (index = 0; ptr < limit; index++, ptr++) 536 { 537 if (ptr[0] == NULL 538 && (! wide || ((ptr+1) < limit && ptr[1] == NULL))) 539 break; 540 } 541 if (ptr == limit) 542 { 543 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*)); 544 ptr = (struct localvar_info**) state->localvars.data + index; 545 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide); 546 } 547 info = (struct localvar_info *) 548 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info)); 549 ptr[0] = info; 550 if (wide) 551 ptr[1] = (struct localvar_info *)(~0); 552 DECL_LOCAL_INDEX (decl) = index; 553 info->decl = decl; 554 info->start_label = start_label; 555 556 if (debug_info_level > DINFO_LEVEL_TERSE 557 && DECL_NAME (decl) != NULL_TREE) 558 { 559 /* Generate debugging info. */ 560 info->next = NULL; 561 if (state->last_lvar != NULL) 562 state->last_lvar->next = info; 563 else 564 state->first_lvar = info; 565 state->last_lvar = info; 566 state->lvar_count++; 567 } 568} 569 570static int 571localvar_free (decl, state) 572 tree decl; 573 struct jcf_partial *state; 574{ 575 struct jcf_block *end_label = get_jcf_label_here (state); 576 int index = DECL_LOCAL_INDEX (decl); 577 register struct localvar_info **ptr = &localvar_buffer [index]; 578 register struct localvar_info *info = *ptr; 579 int wide = TYPE_IS_WIDE (TREE_TYPE (decl)); 580 581 info->end_label = end_label; 582 583 if (info->decl != decl) 584 abort (); 585 ptr[0] = NULL; 586 if (wide) 587 { 588 if (ptr[1] != (struct localvar_info *)(~0)) 589 abort (); 590 ptr[1] = NULL; 591 } 592} 593 594 595#define STACK_TARGET 1 596#define IGNORE_TARGET 2 597 598/* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or 599 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */ 600 601static int 602get_access_flags (decl) 603 tree decl; 604{ 605 int flags = 0; 606 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL; 607 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */ 608 flags |= ACC_PUBLIC; 609 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */ 610 flags |= ACC_FINAL; 611 if (isfield || TREE_CODE (decl) == FUNCTION_DECL) 612 { 613 if (TREE_PROTECTED (decl)) 614 flags |= ACC_PROTECTED; 615 if (TREE_PRIVATE (decl)) 616 flags |= ACC_PRIVATE; 617 } 618 else if (TREE_CODE (decl) == TYPE_DECL) 619 { 620 if (CLASS_SUPER (decl)) 621 flags |= ACC_SUPER; 622 if (CLASS_ABSTRACT (decl)) 623 flags |= ACC_ABSTRACT; 624 if (CLASS_INTERFACE (decl)) 625 flags |= ACC_INTERFACE; 626 } 627 else 628 fatal ("internal error - bad argument to get_access_flags"); 629 if (TREE_CODE (decl) == FUNCTION_DECL) 630 { 631 if (METHOD_NATIVE (decl)) 632 flags |= ACC_NATIVE; 633 if (METHOD_STATIC (decl)) 634 flags |= ACC_STATIC; 635 if (METHOD_SYNCHRONIZED (decl)) 636 flags |= ACC_SYNCHRONIZED; 637 if (METHOD_ABSTRACT (decl)) 638 flags |= ACC_ABSTRACT; 639 } 640 if (isfield) 641 { 642 if (FIELD_STATIC (decl)) 643 flags |= ACC_STATIC; 644 if (FIELD_VOLATILE (decl)) 645 flags |= ACC_VOLATILE; 646 if (FIELD_TRANSIENT (decl)) 647 flags |= ACC_TRANSIENT; 648 } 649 return flags; 650} 651 652/* Write the list of segments starting at CHUNKS to STREAM. */ 653 654static void 655write_chunks (stream, chunks) 656 FILE* stream; 657 struct chunk *chunks; 658{ 659 for (; chunks != NULL; chunks = chunks->next) 660 fwrite (chunks->data, chunks->size, 1, stream); 661} 662 663/* Push a 1-word constant in the constant pool at the given INDEX. 664 (Caller is responsible for doing NOTE_PUSH.) */ 665 666static void 667push_constant1 (index, state) 668 int index; 669 struct jcf_partial *state; 670{ 671 RESERVE (3); 672 if (index < 256) 673 { 674 OP1 (OPCODE_ldc); 675 OP1 (index); 676 } 677 else 678 { 679 OP1 (OPCODE_ldc_w); 680 OP2 (index); 681 } 682} 683 684/* Push a 2-word constant in the constant pool at the given INDEX. 685 (Caller is responsible for doing NOTE_PUSH.) */ 686 687static void 688push_constant2 (index, state) 689 int index; 690 struct jcf_partial *state; 691{ 692 RESERVE (3); 693 OP1 (OPCODE_ldc2_w); 694 OP2 (index); 695} 696 697/* Push 32-bit integer constant on VM stack. 698 Caller is responsible for doing NOTE_PUSH. */ 699 700static void 701push_int_const (i, state) 702 HOST_WIDE_INT i; 703 struct jcf_partial *state; 704{ 705 RESERVE(3); 706 if (i >= -1 && i <= 5) 707 OP1(OPCODE_iconst_0 + i); 708 else if (i >= -128 && i < 128) 709 { 710 OP1(OPCODE_bipush); 711 OP1(i); 712 } 713 else if (i >= -32768 && i < 32768) 714 { 715 OP1(OPCODE_sipush); 716 OP2(i); 717 } 718 else 719 { 720 i = find_constant1 (&state->cpool, CONSTANT_Integer, i & 0xFFFFFFFF); 721 push_constant1 (i, state); 722 } 723} 724 725static int 726find_constant_wide (lo, hi, state) 727 HOST_WIDE_INT lo, hi; 728 struct jcf_partial *state; 729{ 730 HOST_WIDE_INT w1, w2; 731 lshift_double (lo, hi, -32, 64, &w1, &w2, 1); 732 return find_constant2 (&state->cpool, CONSTANT_Long, 733 w1 & 0xFFFFFFFF, lo & 0xFFFFFFFF); 734} 735 736/* Find or allocate a constant pool entry for the given VALUE. 737 Return the index in the constant pool. */ 738 739static int 740find_constant_index (value, state) 741 tree value; 742 struct jcf_partial *state; 743{ 744 if (TREE_CODE (value) == INTEGER_CST) 745 { 746 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32) 747 return find_constant1 (&state->cpool, CONSTANT_Integer, 748 TREE_INT_CST_LOW (value) & 0xFFFFFFFF); 749 else 750 return find_constant_wide (TREE_INT_CST_LOW (value), 751 TREE_INT_CST_HIGH (value), state); 752 } 753 else if (TREE_CODE (value) == REAL_CST) 754 { 755 long words[2]; 756 if (TYPE_PRECISION (TREE_TYPE (value)) == 32) 757 { 758 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF; 759 return find_constant1 (&state->cpool, CONSTANT_Float, words[0]); 760 } 761 else 762 { 763 etardouble (TREE_REAL_CST (value), words); 764 return find_constant2 (&state->cpool, CONSTANT_Double, 765 words[1-FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF, 766 words[FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF); 767 } 768 } 769 else if (TREE_CODE (value) == STRING_CST) 770 { 771 return find_string_constant (&state->cpool, value); 772 } 773 else 774 fatal ("find_constant_index - bad type"); 775} 776 777/* Push 64-bit long constant on VM stack. 778 Caller is responsible for doing NOTE_PUSH. */ 779 780static void 781push_long_const (lo, hi, state) 782 HOST_WIDE_INT lo, hi; 783 struct jcf_partial *state; 784{ 785 if (hi == 0 && lo >= 0 && lo <= 1) 786 { 787 RESERVE(1); 788 OP1(OPCODE_lconst_0 + lo); 789 } 790 else if ((hi == 0 && lo < 32768) || (hi == -1 && lo >= -32768)) 791 { 792 push_int_const (lo, state); 793 RESERVE (1); 794 OP1 (OPCODE_i2l); 795 } 796 else 797 push_constant2 (find_constant_wide (lo, hi, state), state); 798} 799 800static void 801field_op (field, opcode, state) 802 tree field; 803 int opcode; 804 struct jcf_partial *state; 805{ 806 int index = find_fieldref_index (&state->cpool, field); 807 RESERVE (3); 808 OP1 (opcode); 809 OP2 (index); 810} 811 812/* Returns an integer in the range 0 (for 'int') through 4 (for object 813 reference) to 7 (for 'short') which matches the pattern of how JVM 814 opcodes typically depend on the operand type. */ 815 816static int 817adjust_typed_op (type, max) 818 tree type; 819 int max; 820{ 821 switch (TREE_CODE (type)) 822 { 823 case POINTER_TYPE: 824 case RECORD_TYPE: return 4; 825 case BOOLEAN_TYPE: 826 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5; 827 case CHAR_TYPE: 828 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6; 829 case INTEGER_TYPE: 830 switch (TYPE_PRECISION (type)) 831 { 832 case 8: return max < 5 ? 0 : 5; 833 case 16: return max < 7 ? 0 : 7; 834 case 32: return 0; 835 case 64: return 1; 836 } 837 break; 838 case REAL_TYPE: 839 switch (TYPE_PRECISION (type)) 840 { 841 case 32: return 2; 842 case 64: return 3; 843 } 844 break; 845 default: 846 break; 847 } 848 abort (); 849} 850 851static void 852maybe_wide (opcode, index, state) 853 int opcode, index; 854 struct jcf_partial *state; 855{ 856 if (index >= 256) 857 { 858 RESERVE (4); 859 OP1 (OPCODE_wide); 860 OP1 (opcode); 861 OP2 (index); 862 } 863 else 864 { 865 RESERVE (2); 866 OP1 (opcode); 867 OP1 (index); 868 } 869} 870 871/* Compile code to duplicate with offset, where 872 SIZE is the size of the stack item to duplicate (1 or 2), abd 873 OFFSET is where to insert the result (must be 0, 1, or 2). 874 (The new words get inserted at stack[SP-size-offset].) */ 875 876static void 877emit_dup (size, offset, state) 878 int size, offset; 879 struct jcf_partial *state; 880{ 881 int kind; 882 if (size == 0) 883 return; 884 RESERVE(1); 885 if (offset == 0) 886 kind = size == 1 ? OPCODE_dup : OPCODE_dup2; 887 else if (offset == 1) 888 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1; 889 else if (offset == 2) 890 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2; 891 else 892 abort(); 893 OP1 (kind); 894 NOTE_PUSH (size); 895} 896 897static void 898emit_pop (size, state) 899 int size; 900 struct jcf_partial *state; 901{ 902 RESERVE (1); 903 OP1 (OPCODE_pop - 1 + size); 904} 905 906static void 907emit_iinc (var, value, state) 908 tree var; 909 int value; 910 struct jcf_partial *state; 911{ 912 int slot = DECL_LOCAL_INDEX (var); 913 914 if (value < -128 || value > 127 || slot >= 256) 915 { 916 RESERVE (6); 917 OP1 (OPCODE_wide); 918 OP1 (OPCODE_iinc); 919 OP2 (slot); 920 OP2 (value); 921 } 922 else 923 { 924 RESERVE (3); 925 OP1 (OPCODE_iinc); 926 OP1 (slot); 927 OP1 (value); 928 } 929} 930 931static void 932emit_load_or_store (var, opcode, state) 933 tree var; /* Variable to load from or store into. */ 934 int opcode; /* Either OPCODE_iload or OPCODE_istore. */ 935 struct jcf_partial *state; 936{ 937 tree type = TREE_TYPE (var); 938 int kind = adjust_typed_op (type, 4); 939 int index = DECL_LOCAL_INDEX (var); 940 if (index <= 3) 941 { 942 RESERVE (1); 943 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */ 944 } 945 else 946 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */ 947} 948 949static void 950emit_load (var, state) 951 tree var; 952 struct jcf_partial *state; 953{ 954 emit_load_or_store (var, OPCODE_iload, state); 955 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1); 956} 957 958static void 959emit_store (var, state) 960 tree var; 961 struct jcf_partial *state; 962{ 963 emit_load_or_store (var, OPCODE_istore, state); 964 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1); 965} 966 967static void 968emit_unop (opcode, type, state) 969 enum java_opcode opcode; 970 tree type ATTRIBUTE_UNUSED; 971 struct jcf_partial *state; 972{ 973 RESERVE(1); 974 OP1 (opcode); 975} 976 977static void 978emit_binop (opcode, type, state) 979 enum java_opcode opcode; 980 tree type; 981 struct jcf_partial *state; 982{ 983 int size = TYPE_IS_WIDE (type) ? 2 : 1; 984 RESERVE(1); 985 OP1 (opcode); 986 NOTE_POP (size); 987} 988 989static void 990emit_reloc (value, kind, target, state) 991 HOST_WIDE_INT value; 992 int kind; 993 struct jcf_block *target; 994 struct jcf_partial *state; 995{ 996 struct jcf_relocation *reloc = (struct jcf_relocation *) 997 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation)); 998 struct jcf_block *block = state->last_block; 999 reloc->next = block->u.relocations; 1000 block->u.relocations = reloc; 1001 reloc->offset = BUFFER_LENGTH (&state->bytecode); 1002 reloc->label = target; 1003 reloc->kind = kind; 1004 if (kind == 0 || kind == BLOCK_START_RELOC) 1005 OP4 (value); 1006 else if (kind != SWITCH_ALIGN_RELOC) 1007 OP2 (value); 1008} 1009 1010static void 1011emit_switch_reloc (label, state) 1012 struct jcf_block *label; 1013 struct jcf_partial *state; 1014{ 1015 emit_reloc (0, BLOCK_START_RELOC, label, state); 1016} 1017 1018/* Similar to emit_switch_reloc, 1019 but re-uses an existing case reloc. */ 1020 1021static void 1022emit_case_reloc (reloc, state) 1023 struct jcf_relocation *reloc; 1024 struct jcf_partial *state; 1025{ 1026 struct jcf_block *block = state->last_block; 1027 reloc->next = block->u.relocations; 1028 block->u.relocations = reloc; 1029 reloc->offset = BUFFER_LENGTH (&state->bytecode); 1030 reloc->kind = BLOCK_START_RELOC; 1031 OP4 (0); 1032} 1033 1034/* Emit a conditional jump to TARGET with a 2-byte relative jump offset 1035 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */ 1036 1037static void 1038emit_if (target, opcode, inv_opcode, state) 1039 struct jcf_block *target; 1040 int opcode, inv_opcode; 1041 struct jcf_partial *state; 1042{ 1043 OP1 (opcode); 1044 // value is 1 byte from reloc back to start of instruction. 1045 emit_reloc (1, - inv_opcode, target, state); 1046} 1047 1048static void 1049emit_goto (target, state) 1050 struct jcf_block *target; 1051 struct jcf_partial *state; 1052{ 1053 OP1 (OPCODE_goto); 1054 // Value is 1 byte from reloc back to start of instruction. 1055 emit_reloc (1, OPCODE_goto_w, target, state); 1056} 1057 1058static void 1059emit_jsr (target, state) 1060 struct jcf_block *target; 1061 struct jcf_partial *state; 1062{ 1063 OP1 (OPCODE_jsr); 1064 // Value is 1 byte from reloc back to start of instruction. 1065 emit_reloc (1, OPCODE_jsr_w, target, state); 1066} 1067 1068/* Generate code to evaluate EXP. If the result is true, 1069 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL. 1070 TRUE_BRANCH_FIRST is a code geneation hint that the 1071 TRUE_LABEL may follow right after this. (The idea is that we 1072 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */ 1073 1074static void 1075generate_bytecode_conditional (exp, true_label, false_label, 1076 true_branch_first, state) 1077 tree exp; 1078 struct jcf_block *true_label; 1079 struct jcf_block *false_label; 1080 int true_branch_first; 1081 struct jcf_partial *state; 1082{ 1083 tree exp0, exp1, type; 1084 int save_SP = state->code_SP; 1085 enum java_opcode op, negop; 1086 switch (TREE_CODE (exp)) 1087 { 1088 case INTEGER_CST: 1089 emit_goto (integer_zerop (exp) ? false_label : true_label, state); 1090 break; 1091 case COND_EXPR: 1092 { 1093 struct jcf_block *then_label = gen_jcf_label (state); 1094 struct jcf_block *else_label = gen_jcf_label (state); 1095 int save_SP_before, save_SP_after; 1096 generate_bytecode_conditional (TREE_OPERAND (exp, 0), 1097 then_label, else_label, 1, state); 1098 define_jcf_label (then_label, state); 1099 save_SP_before = state->code_SP; 1100 generate_bytecode_conditional (TREE_OPERAND (exp, 1), 1101 true_label, false_label, 1, state); 1102 save_SP_after = state->code_SP; 1103 state->code_SP = save_SP_before; 1104 define_jcf_label (else_label, state); 1105 generate_bytecode_conditional (TREE_OPERAND (exp, 2), 1106 true_label, false_label, 1107 true_branch_first, state); 1108 if (state->code_SP != save_SP_after) 1109 fatal ("internal error non-matching SP"); 1110 } 1111 break; 1112 case TRUTH_NOT_EXPR: 1113 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label, true_label, 1114 ! true_branch_first, state); 1115 break; 1116 case TRUTH_ANDIF_EXPR: 1117 { 1118 struct jcf_block *next_label = gen_jcf_label (state); 1119 generate_bytecode_conditional (TREE_OPERAND (exp, 0), 1120 next_label, false_label, 1, state); 1121 define_jcf_label (next_label, state); 1122 generate_bytecode_conditional (TREE_OPERAND (exp, 1), 1123 true_label, false_label, 1, state); 1124 } 1125 break; 1126 case TRUTH_ORIF_EXPR: 1127 { 1128 struct jcf_block *next_label = gen_jcf_label (state); 1129 generate_bytecode_conditional (TREE_OPERAND (exp, 0), 1130 true_label, next_label, 1, state); 1131 define_jcf_label (next_label, state); 1132 generate_bytecode_conditional (TREE_OPERAND (exp, 1), 1133 true_label, false_label, 1, state); 1134 } 1135 break; 1136 compare_1: 1137 /* Assuming op is one of the 2-operand if_icmp<COND> instructions, 1138 set it to the corresponding 1-operand if<COND> instructions. */ 1139 op = op - 6; 1140 /* FALLTHROUGH */ 1141 compare_2: 1142 /* The opcodes with their inverses are allocated in pairs. 1143 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */ 1144 negop = (op & 1) ? op + 1 : op - 1; 1145 compare_2_ptr: 1146 if (true_branch_first) 1147 { 1148 emit_if (false_label, negop, op, state); 1149 emit_goto (true_label, state); 1150 } 1151 else 1152 { 1153 emit_if (true_label, op, negop, state); 1154 emit_goto (false_label, state); 1155 } 1156 break; 1157 case EQ_EXPR: 1158 op = OPCODE_if_icmpeq; 1159 goto compare; 1160 case NE_EXPR: 1161 op = OPCODE_if_icmpne; 1162 goto compare; 1163 case GT_EXPR: 1164 op = OPCODE_if_icmpgt; 1165 goto compare; 1166 case LT_EXPR: 1167 op = OPCODE_if_icmplt; 1168 goto compare; 1169 case GE_EXPR: 1170 op = OPCODE_if_icmpge; 1171 goto compare; 1172 case LE_EXPR: 1173 op = OPCODE_if_icmple; 1174 goto compare; 1175 compare: 1176 exp0 = TREE_OPERAND (exp, 0); 1177 exp1 = TREE_OPERAND (exp, 1); 1178 type = TREE_TYPE (exp0); 1179 switch (TREE_CODE (type)) 1180 { 1181 int opf; 1182 case POINTER_TYPE: case RECORD_TYPE: 1183 switch (TREE_CODE (exp)) 1184 { 1185 case EQ_EXPR: op = OPCODE_if_acmpeq; break; 1186 case NE_EXPR: op = OPCODE_if_acmpne; break; 1187 default: abort(); 1188 } 1189 if (integer_zerop (exp1) || integer_zerop (exp0)) 1190 { 1191 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp0, 1192 STACK_TARGET, state); 1193 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq); 1194 negop = (op & 1) ? op - 1 : op + 1; 1195 NOTE_POP (1); 1196 goto compare_2_ptr; 1197 } 1198 generate_bytecode_insns (exp0, STACK_TARGET, state); 1199 generate_bytecode_insns (exp1, STACK_TARGET, state); 1200 NOTE_POP (2); 1201 goto compare_2; 1202 case REAL_TYPE: 1203 generate_bytecode_insns (exp0, STACK_TARGET, state); 1204 generate_bytecode_insns (exp1, STACK_TARGET, state); 1205 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple) 1206 opf = OPCODE_fcmpg; 1207 else 1208 opf = OPCODE_fcmpl; 1209 if (TYPE_PRECISION (type) > 32) 1210 { 1211 opf += 2; 1212 NOTE_POP (4); 1213 } 1214 else 1215 NOTE_POP (2); 1216 RESERVE (1); 1217 OP1 (opf); 1218 goto compare_1; 1219 case INTEGER_TYPE: 1220 if (TYPE_PRECISION (type) > 32) 1221 { 1222 generate_bytecode_insns (exp0, STACK_TARGET, state); 1223 generate_bytecode_insns (exp1, STACK_TARGET, state); 1224 NOTE_POP (4); 1225 RESERVE (1); 1226 OP1 (OPCODE_lcmp); 1227 goto compare_1; 1228 } 1229 /* FALLTHOUGH */ 1230 default: 1231 if (integer_zerop (exp1)) 1232 { 1233 generate_bytecode_insns (exp0, STACK_TARGET, state); 1234 NOTE_POP (1); 1235 goto compare_1; 1236 } 1237 if (integer_zerop (exp0)) 1238 { 1239 switch (op) 1240 { 1241 case OPCODE_if_icmplt: 1242 case OPCODE_if_icmpge: 1243 op += 2; 1244 break; 1245 case OPCODE_if_icmpgt: 1246 case OPCODE_if_icmple: 1247 op -= 2; 1248 break; 1249 default: 1250 break; 1251 } 1252 generate_bytecode_insns (exp1, STACK_TARGET, state); 1253 NOTE_POP (1); 1254 goto compare_1; 1255 } 1256 generate_bytecode_insns (exp0, STACK_TARGET, state); 1257 generate_bytecode_insns (exp1, STACK_TARGET, state); 1258 NOTE_POP (2); 1259 goto compare_2; 1260 } 1261 1262 default: 1263 generate_bytecode_insns (exp, STACK_TARGET, state); 1264 NOTE_POP (1); 1265 if (true_branch_first) 1266 { 1267 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state); 1268 emit_goto (true_label, state); 1269 } 1270 else 1271 { 1272 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state); 1273 emit_goto (false_label, state); 1274 } 1275 break; 1276 } 1277 if (save_SP != state->code_SP) 1278 fatal ("internal error - SP mismatch"); 1279} 1280 1281/* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs 1282 but only as far out as LIMIT (since we are about to jump to the 1283 emit label that is LIMIT). */ 1284 1285static void 1286call_cleanups (limit, state) 1287 struct jcf_block *limit; 1288 struct jcf_partial *state; 1289{ 1290 struct jcf_block *block = state->labeled_blocks; 1291 for (; block != limit; block = block->next) 1292 { 1293 if (block->pc == PENDING_CLEANUP_PC) 1294 emit_jsr (block, state); 1295 } 1296} 1297 1298static void 1299generate_bytecode_return (exp, state) 1300 tree exp; 1301 struct jcf_partial *state; 1302{ 1303 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method)); 1304 int returns_void = TREE_CODE (return_type) == VOID_TYPE; 1305 int op; 1306 again: 1307 if (exp != NULL) 1308 { 1309 switch (TREE_CODE (exp)) 1310 { 1311 case COMPOUND_EXPR: 1312 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, 1313 state); 1314 exp = TREE_OPERAND (exp, 1); 1315 goto again; 1316 case COND_EXPR: 1317 { 1318 struct jcf_block *then_label = gen_jcf_label (state); 1319 struct jcf_block *else_label = gen_jcf_label (state); 1320 generate_bytecode_conditional (TREE_OPERAND (exp, 0), 1321 then_label, else_label, 1, state); 1322 define_jcf_label (then_label, state); 1323 generate_bytecode_return (TREE_OPERAND (exp, 1), state); 1324 define_jcf_label (else_label, state); 1325 generate_bytecode_return (TREE_OPERAND (exp, 2), state); 1326 } 1327 return; 1328 default: 1329 generate_bytecode_insns (exp, 1330 returns_void ? IGNORE_TARGET 1331 : STACK_TARGET, state); 1332 } 1333 } 1334 if (returns_void) 1335 { 1336 op = OPCODE_return; 1337 call_cleanups (NULL_TREE, state); 1338 } 1339 else 1340 { 1341 op = OPCODE_ireturn + adjust_typed_op (return_type, 4); 1342 if (state->num_finalizers > 0) 1343 { 1344 if (state->return_value_decl == NULL_TREE) 1345 { 1346 state->return_value_decl 1347 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp)); 1348 localvar_alloc (state->return_value_decl, state); 1349 } 1350 emit_store (state->return_value_decl, state); 1351 call_cleanups (NULL_TREE, state); 1352 emit_load (state->return_value_decl, state); 1353 /* If we call localvar_free (state->return_value_decl, state), 1354 then we risk the save decl erroneously re-used in the 1355 finalizer. Instead, we keep the state->return_value_decl 1356 allocated through the rest of the method. This is not 1357 the greatest solution, but it is at least simple and safe. */ 1358 } 1359 } 1360 RESERVE (1); 1361 OP1 (op); 1362} 1363 1364/* Generate bytecode for sub-expression EXP of METHOD. 1365 TARGET is one of STACK_TARGET or IGNORE_TARGET. */ 1366 1367static void 1368generate_bytecode_insns (exp, target, state) 1369 tree exp; 1370 int target; 1371 struct jcf_partial *state; 1372{ 1373 tree type; 1374 enum java_opcode jopcode; 1375 int op; 1376 HOST_WIDE_INT value; 1377 int post_op; 1378 int size; 1379 int offset; 1380 1381 if (exp == NULL && target == IGNORE_TARGET) 1382 return; 1383 1384 type = TREE_TYPE (exp); 1385 1386 switch (TREE_CODE (exp)) 1387 { 1388 case BLOCK: 1389 if (BLOCK_EXPR_BODY (exp)) 1390 { 1391 tree local; 1392 tree body = BLOCK_EXPR_BODY (exp); 1393 for (local = BLOCK_EXPR_DECLS (exp); local; ) 1394 { 1395 tree next = TREE_CHAIN (local); 1396 localvar_alloc (local, state); 1397 local = next; 1398 } 1399 /* Avoid deep recursion for long blocks. */ 1400 while (TREE_CODE (body) == COMPOUND_EXPR) 1401 { 1402 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state); 1403 body = TREE_OPERAND (body, 1); 1404 } 1405 generate_bytecode_insns (body, target, state); 1406 for (local = BLOCK_EXPR_DECLS (exp); local; ) 1407 { 1408 tree next = TREE_CHAIN (local); 1409 localvar_free (local, state); 1410 local = next; 1411 } 1412 } 1413 break; 1414 case COMPOUND_EXPR: 1415 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state); 1416 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state); 1417 break; 1418 case EXPR_WITH_FILE_LOCATION: 1419 { 1420 char *saved_input_filename = input_filename; 1421 tree body = EXPR_WFL_NODE (exp); 1422 int saved_lineno = lineno; 1423 if (body == empty_stmt_node) 1424 break; 1425 input_filename = EXPR_WFL_FILENAME (exp); 1426 lineno = EXPR_WFL_LINENO (exp); 1427 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0 1428 && debug_info_level > DINFO_LEVEL_NONE) 1429 put_linenumber (lineno, state); 1430 generate_bytecode_insns (body, target, state); 1431 input_filename = saved_input_filename; 1432 lineno = saved_lineno; 1433 } 1434 break; 1435 case INTEGER_CST: 1436 if (target == IGNORE_TARGET) ; /* do nothing */ 1437 else if (TREE_CODE (type) == POINTER_TYPE) 1438 { 1439 if (! integer_zerop (exp)) 1440 abort(); 1441 RESERVE(1); 1442 OP1 (OPCODE_aconst_null); 1443 NOTE_PUSH (1); 1444 } 1445 else if (TYPE_PRECISION (type) <= 32) 1446 { 1447 push_int_const (TREE_INT_CST_LOW (exp), state); 1448 NOTE_PUSH (1); 1449 } 1450 else 1451 { 1452 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp), 1453 state); 1454 NOTE_PUSH (2); 1455 } 1456 break; 1457 case REAL_CST: 1458 { 1459 int prec = TYPE_PRECISION (type) >> 5; 1460 RESERVE(1); 1461 if (real_zerop (exp)) 1462 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0); 1463 else if (real_onep (exp)) 1464 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1); 1465 /* FIXME Should also use fconst_2 for 2.0f. 1466 Also, should use iconst_2/ldc followed by i2f/i2d 1467 for other float/double when the value is a small integer. */ 1468 else 1469 { 1470 offset = find_constant_index (exp, state); 1471 if (prec == 1) 1472 push_constant1 (offset, state); 1473 else 1474 push_constant2 (offset, state); 1475 } 1476 NOTE_PUSH (prec); 1477 } 1478 break; 1479 case STRING_CST: 1480 push_constant1 (find_string_constant (&state->cpool, exp), state); 1481 NOTE_PUSH (1); 1482 break; 1483 case VAR_DECL: 1484 if (TREE_STATIC (exp)) 1485 { 1486 field_op (exp, OPCODE_getstatic, state); 1487 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1); 1488 break; 1489 } 1490 /* ... fall through ... */ 1491 case PARM_DECL: 1492 emit_load (exp, state); 1493 break; 1494 case NON_LVALUE_EXPR: 1495 case INDIRECT_REF: 1496 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state); 1497 break; 1498 case ARRAY_REF: 1499 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state); 1500 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state); 1501 if (target != IGNORE_TARGET) 1502 { 1503 jopcode = OPCODE_iaload + adjust_typed_op (type, 7); 1504 RESERVE(1); 1505 OP1 (jopcode); 1506 if (! TYPE_IS_WIDE (type)) 1507 NOTE_POP (1); 1508 } 1509 break; 1510 case COMPONENT_REF: 1511 { 1512 tree obj = TREE_OPERAND (exp, 0); 1513 tree field = TREE_OPERAND (exp, 1); 1514 int is_static = FIELD_STATIC (field); 1515 generate_bytecode_insns (obj, 1516 is_static ? IGNORE_TARGET : target, state); 1517 if (target != IGNORE_TARGET) 1518 { 1519 if (DECL_NAME (field) == length_identifier_node && !is_static 1520 && TYPE_ARRAY_P (TREE_TYPE (obj))) 1521 { 1522 RESERVE (1); 1523 OP1 (OPCODE_arraylength); 1524 } 1525 else 1526 { 1527 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield, 1528 state); 1529 if (! is_static) 1530 NOTE_POP (1); 1531 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1); 1532 } 1533 } 1534 } 1535 break; 1536 case TRUTH_ANDIF_EXPR: 1537 case TRUTH_ORIF_EXPR: 1538 case EQ_EXPR: 1539 case NE_EXPR: 1540 case GT_EXPR: 1541 case LT_EXPR: 1542 case GE_EXPR: 1543 case LE_EXPR: 1544 { 1545 struct jcf_block *then_label = gen_jcf_label (state); 1546 struct jcf_block *else_label = gen_jcf_label (state); 1547 struct jcf_block *end_label = gen_jcf_label (state); 1548 generate_bytecode_conditional (exp, 1549 then_label, else_label, 1, state); 1550 define_jcf_label (then_label, state); 1551 push_int_const (1, state); 1552 emit_goto (end_label, state); 1553 define_jcf_label (else_label, state); 1554 push_int_const (0, state); 1555 define_jcf_label (end_label, state); 1556 NOTE_PUSH (1); 1557 } 1558 break; 1559 case COND_EXPR: 1560 { 1561 struct jcf_block *then_label = gen_jcf_label (state); 1562 struct jcf_block *else_label = gen_jcf_label (state); 1563 struct jcf_block *end_label = gen_jcf_label (state); 1564 generate_bytecode_conditional (TREE_OPERAND (exp, 0), 1565 then_label, else_label, 1, state); 1566 define_jcf_label (then_label, state); 1567 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state); 1568 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1)) 1569 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */ 1570 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE) 1571 emit_goto (end_label, state); 1572 define_jcf_label (else_label, state); 1573 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state); 1574 define_jcf_label (end_label, state); 1575 } 1576 break; 1577 case CASE_EXPR: 1578 { 1579 struct jcf_switch_state *sw_state = state->sw_state; 1580 struct jcf_relocation *reloc = (struct jcf_relocation *) 1581 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation)); 1582 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)); 1583 reloc->kind = 0; 1584 reloc->label = get_jcf_label_here (state); 1585 reloc->offset = case_value; 1586 reloc->next = sw_state->cases; 1587 sw_state->cases = reloc; 1588 if (sw_state->num_cases == 0) 1589 { 1590 sw_state->min_case = case_value; 1591 sw_state->max_case = case_value; 1592 } 1593 else 1594 { 1595 if (case_value < sw_state->min_case) 1596 sw_state->min_case = case_value; 1597 if (case_value > sw_state->max_case) 1598 sw_state->max_case = case_value; 1599 } 1600 sw_state->num_cases++; 1601 } 1602 break; 1603 case DEFAULT_EXPR: 1604 state->sw_state->default_label = get_jcf_label_here (state); 1605 break; 1606 1607 case SWITCH_EXPR: 1608 { 1609 /* The SWITCH_EXPR has three parts, generated in the following order: 1610 1. the switch_expression (the value used to select the correct case); 1611 2. the switch_body; 1612 3. the switch_instruction (the tableswitch/loopupswitch instruction.). 1613 After code generation, we will re-order then in the order 1, 3, 2. 1614 This is to avoid an extra GOTOs. */ 1615 struct jcf_switch_state sw_state; 1616 struct jcf_block *expression_last; /* Last block of the switch_expression. */ 1617 struct jcf_block *body_last; /* Last block of the switch_body. */ 1618 struct jcf_block *switch_instruction; /* First block of switch_instruction. */ 1619 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */ 1620 struct jcf_block *body_block; 1621 int switch_length; 1622 sw_state.prev = state->sw_state; 1623 state->sw_state = &sw_state; 1624 sw_state.cases = NULL; 1625 sw_state.num_cases = 0; 1626 sw_state.default_label = NULL; 1627 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state); 1628 expression_last = state->last_block; 1629 body_block = get_jcf_label_here (state); /* Force a new block here. */ 1630 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state); 1631 body_last = state->last_block; 1632 1633 switch_instruction = gen_jcf_label (state); 1634 define_jcf_label (switch_instruction, state); 1635 if (sw_state.default_label == NULL) 1636 sw_state.default_label = gen_jcf_label (state); 1637 1638 if (sw_state.num_cases <= 1) 1639 { 1640 if (sw_state.num_cases == 0) 1641 { 1642 emit_pop (1, state); 1643 NOTE_POP (1); 1644 } 1645 else 1646 { 1647 push_int_const (sw_state.cases->offset, state); 1648 emit_if (sw_state.cases->label, 1649 OPCODE_ifeq, OPCODE_ifne, state); 1650 } 1651 emit_goto (sw_state.default_label, state); 1652 } 1653 else 1654 { 1655 HOST_WIDE_INT i; 1656 /* Copy the chain of relocs into a sorted array. */ 1657 struct jcf_relocation **relocs = (struct jcf_relocation **) 1658 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *)); 1659 /* The relocs arrays is a buffer with a gap. 1660 The assumption is that cases will normally come in "runs". */ 1661 int gap_start = 0; 1662 int gap_end = sw_state.num_cases; 1663 struct jcf_relocation *reloc; 1664 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next) 1665 { 1666 HOST_WIDE_INT case_value = reloc->offset; 1667 while (gap_end < sw_state.num_cases) 1668 { 1669 struct jcf_relocation *end = relocs[gap_end]; 1670 if (case_value <= end->offset) 1671 break; 1672 relocs[gap_start++] = end; 1673 gap_end++; 1674 } 1675 while (gap_start > 0) 1676 { 1677 struct jcf_relocation *before = relocs[gap_start-1]; 1678 if (case_value >= before->offset) 1679 break; 1680 relocs[--gap_end] = before; 1681 gap_start--; 1682 } 1683 relocs[gap_start++] = reloc; 1684 /* Note we don't check for duplicates. FIXME! */ 1685 } 1686 1687 if (2 * sw_state.num_cases 1688 >= sw_state.max_case - sw_state.min_case) 1689 { /* Use tableswitch. */ 1690 int index = 0; 1691 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1)); 1692 OP1 (OPCODE_tableswitch); 1693 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state); 1694 emit_switch_reloc (sw_state.default_label, state); 1695 OP4 (sw_state.min_case); 1696 OP4 (sw_state.max_case); 1697 for (i = sw_state.min_case; ; ) 1698 { 1699 reloc = relocs[index]; 1700 if (i == reloc->offset) 1701 { 1702 emit_case_reloc (reloc, state); 1703 if (i == sw_state.max_case) 1704 break; 1705 index++; 1706 } 1707 else 1708 emit_switch_reloc (sw_state.default_label, state); 1709 i++; 1710 } 1711 } 1712 else 1713 { /* Use lookupswitch. */ 1714 RESERVE(9 + 8 * sw_state.num_cases); 1715 OP1 (OPCODE_lookupswitch); 1716 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state); 1717 emit_switch_reloc (sw_state.default_label, state); 1718 OP4 (sw_state.num_cases); 1719 for (i = 0; i < sw_state.num_cases; i++) 1720 { 1721 struct jcf_relocation *reloc = relocs[i]; 1722 OP4 (reloc->offset); 1723 emit_case_reloc (reloc, state); 1724 } 1725 } 1726 free (relocs); 1727 } 1728 1729 instruction_last = state->last_block; 1730 if (sw_state.default_label->pc < 0) 1731 define_jcf_label (sw_state.default_label, state); 1732 else /* Force a new block. */ 1733 sw_state.default_label = get_jcf_label_here (state); 1734 /* Now re-arrange the blocks so the switch_instruction 1735 comes before the switch_body. */ 1736 switch_length = state->code_length - switch_instruction->pc; 1737 switch_instruction->pc = body_block->pc; 1738 instruction_last->next = body_block; 1739 instruction_last->v.chunk->next = body_block->v.chunk; 1740 expression_last->next = switch_instruction; 1741 expression_last->v.chunk->next = switch_instruction->v.chunk; 1742 body_last->next = sw_state.default_label; 1743 body_last->v.chunk->next = NULL; 1744 state->chunk = body_last->v.chunk; 1745 for (; body_block != sw_state.default_label; body_block = body_block->next) 1746 body_block->pc += switch_length; 1747 1748 state->sw_state = sw_state.prev; 1749 break; 1750 } 1751 1752 case RETURN_EXPR: 1753 exp = TREE_OPERAND (exp, 0); 1754 if (exp == NULL_TREE) 1755 exp = empty_stmt_node; 1756 else if (TREE_CODE (exp) != MODIFY_EXPR) 1757 abort (); 1758 else 1759 exp = TREE_OPERAND (exp, 1); 1760 generate_bytecode_return (exp, state); 1761 break; 1762 case LABELED_BLOCK_EXPR: 1763 { 1764 struct jcf_block *end_label = gen_jcf_label (state); 1765 end_label->next = state->labeled_blocks; 1766 state->labeled_blocks = end_label; 1767 end_label->pc = PENDING_EXIT_PC; 1768 end_label->u.labeled_block = exp; 1769 if (LABELED_BLOCK_BODY (exp)) 1770 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state); 1771 if (state->labeled_blocks != end_label) 1772 abort(); 1773 state->labeled_blocks = end_label->next; 1774 define_jcf_label (end_label, state); 1775 } 1776 break; 1777 case LOOP_EXPR: 1778 { 1779 tree body = TREE_OPERAND (exp, 0); 1780#if 0 1781 if (TREE_CODE (body) == COMPOUND_EXPR 1782 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR) 1783 { 1784 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L: 1785 to: GOTO L; BODY; L: if (!TEST) GOTO L; */ 1786 struct jcf_block *head_label; 1787 struct jcf_block *body_label; 1788 struct jcf_block *end_label = gen_jcf_label (state); 1789 struct jcf_block *exit_label = state->labeled_blocks; 1790 head_label = gen_jcf_label (state); 1791 emit_goto (head_label, state); 1792 body_label = get_jcf_label_here (state); 1793 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state); 1794 define_jcf_label (head_label, state); 1795 generate_bytecode_conditional (TREE_OPERAND (body, 0), 1796 end_label, body_label, 1, state); 1797 define_jcf_label (end_label, state); 1798 } 1799 else 1800#endif 1801 { 1802 struct jcf_block *head_label = get_jcf_label_here (state); 1803 generate_bytecode_insns (body, IGNORE_TARGET, state); 1804 emit_goto (head_label, state); 1805 } 1806 } 1807 break; 1808 case EXIT_EXPR: 1809 { 1810 struct jcf_block *label = state->labeled_blocks; 1811 struct jcf_block *end_label = gen_jcf_label (state); 1812 generate_bytecode_conditional (TREE_OPERAND (exp, 0), 1813 label, end_label, 0, state); 1814 define_jcf_label (end_label, state); 1815 } 1816 break; 1817 case EXIT_BLOCK_EXPR: 1818 { 1819 struct jcf_block *label = state->labeled_blocks; 1820 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl; 1821 while (label->u.labeled_block != TREE_OPERAND (exp, 0)) 1822 label = label->next; 1823 call_cleanups (label, state); 1824 emit_goto (label, state); 1825 } 1826 break; 1827 1828 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment; 1829 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment; 1830 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment; 1831 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment; 1832 increment: 1833 1834 exp = TREE_OPERAND (exp, 0); 1835 type = TREE_TYPE (exp); 1836 size = TYPE_IS_WIDE (type) ? 2 : 1; 1837 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL) 1838 && ! TREE_STATIC (exp) 1839 && TREE_CODE (type) == INTEGER_TYPE 1840 && TYPE_PRECISION (type) == 32) 1841 { 1842 if (target != IGNORE_TARGET && post_op) 1843 emit_load (exp, state); 1844 emit_iinc (exp, value, state); 1845 if (target != IGNORE_TARGET && ! post_op) 1846 emit_load (exp, state); 1847 break; 1848 } 1849 if (TREE_CODE (exp) == COMPONENT_REF) 1850 { 1851 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state); 1852 emit_dup (1, 0, state); 1853 /* Stack: ..., objectref, objectref. */ 1854 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state); 1855 NOTE_PUSH (size-1); 1856 /* Stack: ..., objectref, oldvalue. */ 1857 offset = 1; 1858 } 1859 else if (TREE_CODE (exp) == ARRAY_REF) 1860 { 1861 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state); 1862 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state); 1863 emit_dup (2, 0, state); 1864 /* Stack: ..., array, index, array, index. */ 1865 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7); 1866 RESERVE(1); 1867 OP1 (jopcode); 1868 NOTE_POP (2-size); 1869 /* Stack: ..., array, index, oldvalue. */ 1870 offset = 2; 1871 } 1872 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL) 1873 { 1874 generate_bytecode_insns (exp, STACK_TARGET, state); 1875 /* Stack: ..., oldvalue. */ 1876 offset = 0; 1877 } 1878 else 1879 abort (); 1880 1881 if (target != IGNORE_TARGET && post_op) 1882 emit_dup (size, offset, state); 1883 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */ 1884 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */ 1885 /* Stack, otherwise: ..., [result, ] oldvalue. */ 1886 if (size == 1) 1887 push_int_const (value, state); 1888 else 1889 push_long_const (value, value >= 0 ? 0 : -1, state); 1890 NOTE_PUSH (size); 1891 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state); 1892 if (target != IGNORE_TARGET && ! post_op) 1893 emit_dup (size, offset, state); 1894 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */ 1895 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */ 1896 /* Stack, otherwise: ..., [result, ] newvalue. */ 1897 goto finish_assignment; 1898 1899 case MODIFY_EXPR: 1900 { 1901 tree lhs = TREE_OPERAND (exp, 0); 1902 tree rhs = TREE_OPERAND (exp, 1); 1903 int offset = 0; 1904 1905 /* See if we can use the iinc instruction. */ 1906 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL) 1907 && ! TREE_STATIC (lhs) 1908 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE 1909 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32 1910 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR)) 1911 { 1912 tree arg0 = TREE_OPERAND (rhs, 0); 1913 tree arg1 = TREE_OPERAND (rhs, 1); 1914 HOST_WIDE_INT min_value = -32768; 1915 HOST_WIDE_INT max_value = 32767; 1916 if (TREE_CODE (rhs) == MINUS_EXPR) 1917 { 1918 min_value++; 1919 max_value++; 1920 } 1921 else if (arg1 == lhs) 1922 { 1923 arg0 = arg1; 1924 arg1 = TREE_OPERAND (rhs, 0); 1925 } 1926 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST) 1927 { 1928 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1); 1929 value = TREE_INT_CST_LOW (arg1); 1930 if ((hi_value == 0 && value <= max_value) 1931 || (hi_value == -1 && value >= min_value)) 1932 { 1933 if (TREE_CODE (rhs) == MINUS_EXPR) 1934 value = -value; 1935 emit_iinc (lhs, value, state); 1936 break; 1937 } 1938 } 1939 } 1940 1941 if (TREE_CODE (lhs) == COMPONENT_REF) 1942 { 1943 generate_bytecode_insns (TREE_OPERAND (lhs, 0), 1944 STACK_TARGET, state); 1945 offset = 1; 1946 } 1947 else if (TREE_CODE (lhs) == ARRAY_REF) 1948 { 1949 generate_bytecode_insns (TREE_OPERAND(lhs, 0), 1950 STACK_TARGET, state); 1951 generate_bytecode_insns (TREE_OPERAND(lhs, 1), 1952 STACK_TARGET, state); 1953 offset = 2; 1954 } 1955 else 1956 offset = 0; 1957 generate_bytecode_insns (rhs, STACK_TARGET, state); 1958 if (target != IGNORE_TARGET) 1959 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state); 1960 exp = lhs; 1961 } 1962 /* FALLTHOUGH */ 1963 1964 finish_assignment: 1965 if (TREE_CODE (exp) == COMPONENT_REF) 1966 { 1967 tree field = TREE_OPERAND (exp, 1); 1968 if (! FIELD_STATIC (field)) 1969 NOTE_POP (1); 1970 field_op (field, 1971 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield, 1972 state); 1973 1974 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1); 1975 } 1976 else if (TREE_CODE (exp) == VAR_DECL 1977 || TREE_CODE (exp) == PARM_DECL) 1978 { 1979 if (FIELD_STATIC (exp)) 1980 { 1981 field_op (exp, OPCODE_putstatic, state); 1982 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1); 1983 } 1984 else 1985 emit_store (exp, state); 1986 } 1987 else if (TREE_CODE (exp) == ARRAY_REF) 1988 { 1989 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7); 1990 RESERVE(1); 1991 OP1 (jopcode); 1992 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3); 1993 } 1994 else 1995 fatal ("internal error (bad lhs to MODIFY_EXPR)"); 1996 break; 1997 case PLUS_EXPR: 1998 jopcode = OPCODE_iadd; 1999 goto binop; 2000 case MINUS_EXPR: 2001 jopcode = OPCODE_isub; 2002 goto binop; 2003 case MULT_EXPR: 2004 jopcode = OPCODE_imul; 2005 goto binop; 2006 case TRUNC_DIV_EXPR: 2007 case RDIV_EXPR: 2008 jopcode = OPCODE_idiv; 2009 goto binop; 2010 case TRUNC_MOD_EXPR: 2011 jopcode = OPCODE_irem; 2012 goto binop; 2013 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop; 2014 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop; 2015 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop; 2016 case TRUTH_AND_EXPR: 2017 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop; 2018 case TRUTH_OR_EXPR: 2019 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop; 2020 case TRUTH_XOR_EXPR: 2021 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop; 2022 binop: 2023 { 2024 tree arg0 = TREE_OPERAND (exp, 0); 2025 tree arg1 = TREE_OPERAND (exp, 1); 2026 jopcode += adjust_typed_op (type, 3); 2027 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR) 2028 { 2029 /* fold may (e.g) convert 2*x to x+x. */ 2030 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state); 2031 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state); 2032 } 2033 else 2034 { 2035 generate_bytecode_insns (arg0, target, state); 2036 generate_bytecode_insns (arg1, target, state); 2037 } 2038 /* For most binary operations, both operands and the result have the 2039 same type. Shift operations are different. Using arg1's type 2040 gets us the correct SP adjustment in all casesd. */ 2041 if (target == STACK_TARGET) 2042 emit_binop (jopcode, TREE_TYPE (arg1), state); 2043 break; 2044 } 2045 case TRUTH_NOT_EXPR: 2046 case BIT_NOT_EXPR: 2047 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state); 2048 if (target == STACK_TARGET) 2049 { 2050 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32; 2051 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state); 2052 RESERVE (2); 2053 if (is_long) 2054 OP1 (OPCODE_i2l); 2055 NOTE_PUSH (1 + is_long); 2056 OP1 (OPCODE_ixor + is_long); 2057 NOTE_POP (1 + is_long); 2058 } 2059 break; 2060 case NEGATE_EXPR: 2061 jopcode = OPCODE_ineg; 2062 jopcode += adjust_typed_op (type, 3); 2063 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state); 2064 if (target == STACK_TARGET) 2065 emit_unop (jopcode, type, state); 2066 break; 2067 case INSTANCEOF_EXPR: 2068 { 2069 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1)); 2070 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state); 2071 RESERVE (3); 2072 OP1 (OPCODE_instanceof); 2073 OP2 (index); 2074 } 2075 break; 2076 case CONVERT_EXPR: 2077 case NOP_EXPR: 2078 case FLOAT_EXPR: 2079 case FIX_TRUNC_EXPR: 2080 { 2081 tree src = TREE_OPERAND (exp, 0); 2082 tree src_type = TREE_TYPE (src); 2083 tree dst_type = TREE_TYPE (exp); 2084 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state); 2085 if (target == IGNORE_TARGET || src_type == dst_type) 2086 break; 2087 if (TREE_CODE (dst_type) == POINTER_TYPE) 2088 { 2089 if (TREE_CODE (exp) == CONVERT_EXPR) 2090 { 2091 int index = find_class_constant (&state->cpool, TREE_TYPE (dst_type)); 2092 RESERVE (3); 2093 OP1 (OPCODE_checkcast); 2094 OP2 (index); 2095 } 2096 } 2097 else /* Convert numeric types. */ 2098 { 2099 int wide_src = TYPE_PRECISION (src_type) > 32; 2100 int wide_dst = TYPE_PRECISION (dst_type) > 32; 2101 NOTE_POP (1 + wide_src); 2102 RESERVE (1); 2103 if (TREE_CODE (dst_type) == REAL_TYPE) 2104 { 2105 if (TREE_CODE (src_type) == REAL_TYPE) 2106 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f); 2107 else if (TYPE_PRECISION (src_type) == 64) 2108 OP1 (OPCODE_l2f + wide_dst); 2109 else 2110 OP1 (OPCODE_i2f + wide_dst); 2111 } 2112 else /* Convert to integral type. */ 2113 { 2114 if (TREE_CODE (src_type) == REAL_TYPE) 2115 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src); 2116 else if (wide_dst) 2117 OP1 (OPCODE_i2l); 2118 else if (wide_src) 2119 OP1 (OPCODE_l2i); 2120 if (TYPE_PRECISION (dst_type) < 32) 2121 { 2122 RESERVE (1); 2123 /* Already converted to int, if needed. */ 2124 if (TYPE_PRECISION (dst_type) <= 8) 2125 OP1 (OPCODE_i2b); 2126 else if (TREE_UNSIGNED (dst_type)) 2127 OP1 (OPCODE_i2c); 2128 else 2129 OP1 (OPCODE_i2s); 2130 } 2131 } 2132 NOTE_PUSH (1 + wide_dst); 2133 } 2134 } 2135 break; 2136 2137 case CLEANUP_POINT_EXPR: 2138 { 2139 struct jcf_block *save_labeled_blocks = state->labeled_blocks; 2140 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)); 2141 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state); 2142 if (target != IGNORE_TARGET) 2143 abort (); 2144 while (state->labeled_blocks != save_labeled_blocks) 2145 { 2146 struct jcf_block *finished_label = NULL; 2147 tree return_link; 2148 tree exception_type = build_pointer_type (throwable_type_node); 2149 tree exception_decl = build_decl (VAR_DECL, NULL_TREE, 2150 exception_type); 2151 struct jcf_block *end_label = get_jcf_label_here (state); 2152 struct jcf_block *label = state->labeled_blocks; 2153 struct jcf_handler *handler; 2154 tree cleanup = label->u.labeled_block; 2155 state->labeled_blocks = label->next; 2156 state->num_finalizers--; 2157 if (can_complete) 2158 { 2159 finished_label = gen_jcf_label (state); 2160 emit_jsr (label, state); 2161 emit_goto (finished_label, state); 2162 if (! CAN_COMPLETE_NORMALLY (cleanup)) 2163 can_complete = 0; 2164 } 2165 handler = alloc_handler (label->v.start_label, end_label, state); 2166 handler->type = NULL_TREE; 2167 localvar_alloc (exception_decl, state); 2168 NOTE_PUSH (1); 2169 emit_store (exception_decl, state); 2170 emit_jsr (label, state); 2171 emit_load (exception_decl, state); 2172 RESERVE (1); 2173 OP1 (OPCODE_athrow); 2174 NOTE_POP (1); 2175 2176 /* The finally block. */ 2177 return_link = build_decl (VAR_DECL, NULL_TREE, 2178 return_address_type_node); 2179 define_jcf_label (label, state); 2180 NOTE_PUSH (1); 2181 localvar_alloc (return_link, state); 2182 emit_store (return_link, state); 2183 generate_bytecode_insns (cleanup, IGNORE_TARGET, state); 2184 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state); 2185 localvar_free (return_link, state); 2186 localvar_free (exception_decl, state); 2187 if (finished_label != NULL) 2188 define_jcf_label (finished_label, state); 2189 } 2190 } 2191 break; 2192 2193 case WITH_CLEANUP_EXPR: 2194 { 2195 struct jcf_block *label; 2196 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state); 2197 label = gen_jcf_label (state); 2198 label->pc = PENDING_CLEANUP_PC; 2199 label->next = state->labeled_blocks; 2200 state->labeled_blocks = label; 2201 state->num_finalizers++; 2202 label->u.labeled_block = TREE_OPERAND (exp, 2); 2203 label->v.start_label = get_jcf_label_here (state); 2204 if (target != IGNORE_TARGET) 2205 abort (); 2206 } 2207 break; 2208 2209 case TRY_EXPR: 2210 { 2211 tree try_clause = TREE_OPERAND (exp, 0); 2212 struct jcf_block *start_label = get_jcf_label_here (state); 2213 struct jcf_block *end_label; /* End of try clause. */ 2214 struct jcf_block *finished_label = gen_jcf_label (state); 2215 tree clause = TREE_OPERAND (exp, 1); 2216 if (target != IGNORE_TARGET) 2217 abort (); 2218 generate_bytecode_insns (try_clause, IGNORE_TARGET, state); 2219 end_label = get_jcf_label_here (state); 2220 if (CAN_COMPLETE_NORMALLY (try_clause)) 2221 emit_goto (finished_label, state); 2222 while (clause != NULL_TREE) 2223 { 2224 tree catch_clause = TREE_OPERAND (clause, 0); 2225 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause); 2226 struct jcf_handler *handler = alloc_handler (start_label, end_label, state); 2227 if (exception_decl == NULL_TREE) 2228 handler->type = NULL_TREE; 2229 else 2230 handler->type = TREE_TYPE (TREE_TYPE (exception_decl)); 2231 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state); 2232 clause = TREE_CHAIN (clause); 2233 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE) 2234 emit_goto (finished_label, state); 2235 } 2236 define_jcf_label (finished_label, state); 2237 } 2238 break; 2239 case TRY_FINALLY_EXPR: 2240 { 2241 tree try_block = TREE_OPERAND (exp, 0); 2242 tree finally = TREE_OPERAND (exp, 1); 2243 struct jcf_block *finished_label = gen_jcf_label (state); 2244 struct jcf_block *finally_label = gen_jcf_label (state); 2245 struct jcf_block *start_label = get_jcf_label_here (state); 2246 tree return_link = build_decl (VAR_DECL, NULL_TREE, 2247 return_address_type_node); 2248 tree exception_type = build_pointer_type (throwable_type_node); 2249 tree exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type); 2250 struct jcf_handler *handler; 2251 2252 finally_label->pc = PENDING_CLEANUP_PC; 2253 finally_label->next = state->labeled_blocks; 2254 state->labeled_blocks = finally_label; 2255 state->num_finalizers++; 2256 2257 generate_bytecode_insns (try_block, target, state); 2258 if (state->labeled_blocks != finally_label) 2259 abort(); 2260 state->labeled_blocks = finally_label->next; 2261 emit_jsr (finally_label, state); 2262 if (CAN_COMPLETE_NORMALLY (try_block)) 2263 emit_goto (finished_label, state); 2264 2265 /* Handle exceptions. */ 2266 localvar_alloc (return_link, state); 2267 handler = alloc_handler (start_label, NULL_TREE, state); 2268 handler->end_label = handler->handler_label; 2269 handler->type = NULL_TREE; 2270 localvar_alloc (exception_decl, state); 2271 NOTE_PUSH (1); 2272 emit_store (exception_decl, state); 2273 emit_jsr (finally_label, state); 2274 emit_load (exception_decl, state); 2275 RESERVE (1); 2276 OP1 (OPCODE_athrow); 2277 NOTE_POP (1); 2278 localvar_free (exception_decl, state); 2279 2280 /* The finally block. First save return PC into return_link. */ 2281 define_jcf_label (finally_label, state); 2282 NOTE_PUSH (1); 2283 emit_store (return_link, state); 2284 2285 generate_bytecode_insns (finally, IGNORE_TARGET, state); 2286 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state); 2287 localvar_free (return_link, state); 2288 define_jcf_label (finished_label, state); 2289 } 2290 break; 2291 case THROW_EXPR: 2292 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state); 2293 RESERVE (1); 2294 OP1 (OPCODE_athrow); 2295 break; 2296 case NEW_ARRAY_INIT: 2297 { 2298 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); 2299 tree array_type = TREE_TYPE (TREE_TYPE (exp)); 2300 tree element_type = TYPE_ARRAY_ELEMENT (array_type); 2301 HOST_WIDE_INT length = java_array_type_length (array_type); 2302 if (target == IGNORE_TARGET) 2303 { 2304 for ( ; values != NULL_TREE; values = TREE_CHAIN (values)) 2305 generate_bytecode_insns (TREE_VALUE (values), target, state); 2306 break; 2307 } 2308 push_int_const (length, state); 2309 NOTE_PUSH (1); 2310 RESERVE (3); 2311 if (JPRIMITIVE_TYPE_P (element_type)) 2312 { 2313 int atype = encode_newarray_type (element_type); 2314 OP1 (OPCODE_newarray); 2315 OP1 (atype); 2316 } 2317 else 2318 { 2319 int index = find_class_constant (&state->cpool, 2320 TREE_TYPE (element_type)); 2321 OP1 (OPCODE_anewarray); 2322 OP2 (index); 2323 } 2324 offset = 0; 2325 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7); 2326 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++) 2327 { 2328 int save_SP = state->code_SP; 2329 emit_dup (1, 0, state); 2330 push_int_const (offset, state); 2331 NOTE_PUSH (1); 2332 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state); 2333 RESERVE (1); 2334 OP1 (jopcode); 2335 state->code_SP = save_SP; 2336 } 2337 } 2338 break; 2339 case NEW_CLASS_EXPR: 2340 { 2341 tree class = TREE_TYPE (TREE_TYPE (exp)); 2342 int need_result = target != IGNORE_TARGET; 2343 int index = find_class_constant (&state->cpool, class); 2344 RESERVE (4); 2345 OP1 (OPCODE_new); 2346 OP2 (index); 2347 if (need_result) 2348 OP1 (OPCODE_dup); 2349 NOTE_PUSH (1 + need_result); 2350 } 2351 /* ... fall though ... */ 2352 case CALL_EXPR: 2353 { 2354 tree f = TREE_OPERAND (exp, 0); 2355 tree x = TREE_OPERAND (exp, 1); 2356 int save_SP = state->code_SP; 2357 int nargs; 2358 if (TREE_CODE (f) == ADDR_EXPR) 2359 f = TREE_OPERAND (f, 0); 2360 if (f == soft_newarray_node) 2361 { 2362 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x)); 2363 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)), 2364 STACK_TARGET, state); 2365 RESERVE (2); 2366 OP1 (OPCODE_newarray); 2367 OP1 (type_code); 2368 break; 2369 } 2370 else if (f == soft_multianewarray_node) 2371 { 2372 int ndims; 2373 int idim; 2374 int index = find_class_constant (&state->cpool, 2375 TREE_TYPE (TREE_TYPE (exp))); 2376 x = TREE_CHAIN (x); /* Skip class argument. */ 2377 ndims = TREE_INT_CST_LOW (TREE_VALUE (x)); 2378 for (idim = ndims; --idim >= 0; ) 2379 { 2380 x = TREE_CHAIN (x); 2381 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state); 2382 } 2383 RESERVE (4); 2384 OP1 (OPCODE_multianewarray); 2385 OP2 (index); 2386 OP1 (ndims); 2387 break; 2388 } 2389 else if (f == soft_anewarray_node) 2390 { 2391 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp))); 2392 int index = find_class_constant (&state->cpool, TREE_TYPE (cl)); 2393 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state); 2394 RESERVE (3); 2395 OP1 (OPCODE_anewarray); 2396 OP2 (index); 2397 break; 2398 } 2399 else if (f == soft_monitorenter_node 2400 || f == soft_monitorexit_node 2401 || f == throw_node) 2402 { 2403 if (f == soft_monitorenter_node) 2404 op = OPCODE_monitorenter; 2405 else if (f == soft_monitorexit_node) 2406 op = OPCODE_monitorexit; 2407 else 2408 op = OPCODE_athrow; 2409 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state); 2410 RESERVE (1); 2411 OP1 (op); 2412 NOTE_POP (1); 2413 break; 2414 } 2415 else if (exp == soft_exceptioninfo_call_node) 2416 { 2417 NOTE_PUSH (1); /* Pushed by exception system. */ 2418 break; 2419 } 2420 for ( ; x != NULL_TREE; x = TREE_CHAIN (x)) 2421 { 2422 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state); 2423 } 2424 nargs = state->code_SP - save_SP; 2425 state->code_SP = save_SP; 2426 if (f == soft_fmod_node) 2427 { 2428 RESERVE (1); 2429 OP1 (OPCODE_drem); 2430 NOTE_PUSH (2); 2431 break; 2432 } 2433 if (TREE_CODE (exp) == NEW_CLASS_EXPR) 2434 NOTE_POP (1); /* Pop implicit this. */ 2435 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE) 2436 { 2437 int index = find_methodref_index (&state->cpool, f); 2438 int interface = 0; 2439 RESERVE (5); 2440 if (METHOD_STATIC (f)) 2441 OP1 (OPCODE_invokestatic); 2442 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp) 2443 || METHOD_PRIVATE (f)) 2444 OP1 (OPCODE_invokespecial); 2445 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f)))) 2446 { 2447 OP1 (OPCODE_invokeinterface); 2448 interface = 1; 2449 } 2450 else 2451 OP1 (OPCODE_invokevirtual); 2452 OP2 (index); 2453 f = TREE_TYPE (TREE_TYPE (f)); 2454 if (TREE_CODE (f) != VOID_TYPE) 2455 { 2456 int size = TYPE_IS_WIDE (f) ? 2 : 1; 2457 if (target == IGNORE_TARGET) 2458 emit_pop (size, state); 2459 else 2460 NOTE_PUSH (size); 2461 } 2462 if (interface) 2463 { 2464 OP1 (nargs); 2465 OP1 (0); 2466 } 2467 break; 2468 } 2469 } 2470 /* fall through */ 2471 notimpl: 2472 default: 2473 error("internal error - tree code not implemented: %s", 2474 tree_code_name [(int) TREE_CODE (exp)]); 2475 } 2476} 2477 2478static void 2479perform_relocations (state) 2480 struct jcf_partial *state; 2481{ 2482 struct jcf_block *block; 2483 struct jcf_relocation *reloc; 2484 int pc; 2485 int shrink; 2486 2487 /* Before we start, the pc field of each block is an upper bound on 2488 the block's start pc (it may be less, if previous blocks need less 2489 than their maximum). 2490 2491 The minimum size of each block is in the block's chunk->size. */ 2492 2493 /* First, figure out the actual locations of each block. */ 2494 pc = 0; 2495 shrink = 0; 2496 for (block = state->blocks; block != NULL; block = block->next) 2497 { 2498 int block_size = block->v.chunk->size; 2499 2500 block->pc = pc; 2501 2502 /* Optimize GOTO L; L: by getting rid of the redundant goto. 2503 Assumes relocations are in reverse order. */ 2504 reloc = block->u.relocations; 2505 while (reloc != NULL 2506 && reloc->kind == OPCODE_goto_w 2507 && reloc->label->pc == block->next->pc 2508 && reloc->offset + 2 == block_size) 2509 { 2510 reloc = reloc->next; 2511 block->u.relocations = reloc; 2512 block->v.chunk->size -= 3; 2513 block_size -= 3; 2514 shrink += 3; 2515 } 2516 2517 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next) 2518 { 2519 if (reloc->kind == SWITCH_ALIGN_RELOC) 2520 { 2521 /* We assume this is the first relocation in this block, 2522 so we know its final pc. */ 2523 int where = pc + reloc->offset; 2524 int pad = ((where + 3) & ~3) - where; 2525 block_size += pad; 2526 } 2527 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC) 2528 { 2529 int delta = reloc->label->pc - (pc + reloc->offset - 1); 2530 int expand = reloc->kind > 0 ? 2 : 5; 2531 2532 if (delta > 0) 2533 delta -= shrink; 2534 if (delta >= -32768 && delta <= 32767) 2535 { 2536 shrink += expand; 2537 reloc->kind = -1; 2538 } 2539 else 2540 block_size += expand; 2541 } 2542 } 2543 pc += block_size; 2544 } 2545 2546 for (block = state->blocks; block != NULL; block = block->next) 2547 { 2548 struct chunk *chunk = block->v.chunk; 2549 int old_size = chunk->size; 2550 int next_pc = block->next == NULL ? pc : block->next->pc; 2551 int new_size = next_pc - block->pc; 2552 unsigned char *new_ptr; 2553 unsigned char *old_buffer = chunk->data; 2554 unsigned char *old_ptr = old_buffer + old_size; 2555 if (new_size != old_size) 2556 { 2557 chunk->data = (unsigned char *) 2558 obstack_alloc (state->chunk_obstack, new_size); 2559 chunk->size = new_size; 2560 } 2561 new_ptr = chunk->data + new_size; 2562 2563 /* We do the relocations from back to front, because 2564 the relocations are in reverse order. */ 2565 for (reloc = block->u.relocations; ; reloc = reloc->next) 2566 { 2567 /* new_ptr and old_ptr point into the old and new buffers, 2568 respectively. (If no relocations cause the buffer to 2569 grow, the buffer will be the same buffer, and new_ptr==old_ptr.) 2570 The bytes at higher adress have been copied and relocations 2571 handled; those at lower addresses remain to process. */ 2572 2573 /* Lower old index of piece to be copied with no relocation. 2574 I.e. high index of the first piece that does need relocation. */ 2575 int start = reloc == NULL ? 0 2576 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset 2577 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC) 2578 ? reloc->offset + 4 2579 : reloc->offset + 2; 2580 int32 value; 2581 int new_offset; 2582 int n = (old_ptr - old_buffer) - start; 2583 new_ptr -= n; 2584 old_ptr -= n; 2585 if (n > 0) 2586 memcpy (new_ptr, old_ptr, n); 2587 if (old_ptr == old_buffer) 2588 break; 2589 2590 new_offset = new_ptr - chunk->data; 2591 new_offset -= (reloc->kind == -1 ? 2 : 4); 2592 if (reloc->kind == 0) 2593 { 2594 old_ptr -= 4; 2595 value = GET_u4 (old_ptr); 2596 } 2597 else if (reloc->kind == BLOCK_START_RELOC) 2598 { 2599 old_ptr -= 4; 2600 value = 0; 2601 new_offset = 0; 2602 } 2603 else if (reloc->kind == SWITCH_ALIGN_RELOC) 2604 { 2605 int where = block->pc + reloc->offset; 2606 int pad = ((where + 3) & ~3) - where; 2607 while (--pad >= 0) 2608 *--new_ptr = 0; 2609 continue; 2610 } 2611 else 2612 { 2613 old_ptr -= 2; 2614 value = GET_u2 (old_ptr); 2615 } 2616 value += reloc->label->pc - (block->pc + new_offset); 2617 *--new_ptr = (unsigned char) value; value >>= 8; 2618 *--new_ptr = (unsigned char) value; value >>= 8; 2619 if (reloc->kind != -1) 2620 { 2621 *--new_ptr = (unsigned char) value; value >>= 8; 2622 *--new_ptr = (unsigned char) value; 2623 } 2624 if (reloc->kind > BLOCK_START_RELOC) 2625 { 2626 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */ 2627 --old_ptr; 2628 *--new_ptr = reloc->kind; 2629 } 2630 else if (reloc->kind < -1) 2631 { 2632 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */ 2633 --old_ptr; 2634 *--new_ptr = OPCODE_goto_w; 2635 *--new_ptr = 3; 2636 *--new_ptr = 0; 2637 *--new_ptr = - reloc->kind; 2638 } 2639 } 2640 if (new_ptr != chunk->data) 2641 fatal ("internal error - perform_relocations"); 2642 } 2643 state->code_length = pc; 2644} 2645 2646static void 2647init_jcf_state (state, work) 2648 struct jcf_partial *state; 2649 struct obstack *work; 2650{ 2651 state->chunk_obstack = work; 2652 state->first = state->chunk = NULL; 2653 CPOOL_INIT (&state->cpool); 2654 BUFFER_INIT (&state->localvars); 2655 BUFFER_INIT (&state->bytecode); 2656} 2657 2658static void 2659init_jcf_method (state, method) 2660 struct jcf_partial *state; 2661 tree method; 2662{ 2663 state->current_method = method; 2664 state->blocks = state->last_block = NULL; 2665 state->linenumber_count = 0; 2666 state->first_lvar = state->last_lvar = NULL; 2667 state->lvar_count = 0; 2668 state->labeled_blocks = NULL; 2669 state->code_length = 0; 2670 BUFFER_RESET (&state->bytecode); 2671 BUFFER_RESET (&state->localvars); 2672 state->code_SP = 0; 2673 state->code_SP_max = 0; 2674 state->handlers = NULL; 2675 state->last_handler = NULL; 2676 state->num_handlers = 0; 2677 state->num_finalizers = 0; 2678 state->return_value_decl = NULL_TREE; 2679} 2680 2681static void 2682release_jcf_state (state) 2683 struct jcf_partial *state; 2684{ 2685 CPOOL_FINISH (&state->cpool); 2686 obstack_free (state->chunk_obstack, state->first); 2687} 2688 2689/* Generate and return a list of chunks containing the class CLAS 2690 in the .class file representation. The list can be written to a 2691 .class file using write_chunks. Allocate chunks from obstack WORK. */ 2692 2693static struct chunk * 2694generate_classfile (clas, state) 2695 tree clas; 2696 struct jcf_partial *state; 2697{ 2698 struct chunk *cpool_chunk; 2699 char *source_file; 2700 char *ptr; 2701 int i; 2702 char *fields_count_ptr; 2703 int fields_count = 0; 2704 char *methods_count_ptr; 2705 int methods_count = 0; 2706 static tree SourceFile_node = NULL_TREE; 2707 tree part; 2708 int total_supers 2709 = clas == object_type_node ? 0 2710 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas)); 2711 2712 ptr = append_chunk (NULL, 8, state); 2713 PUT4 (0xCafeBabe); /* Magic number */ 2714 PUT2 (3); /* Minor version */ 2715 PUT2 (45); /* Major version */ 2716 2717 append_chunk (NULL, 0, state); 2718 cpool_chunk = state->chunk; 2719 2720 /* Next allocate the chunk containing acces_flags through fields_counr. */ 2721 if (clas == object_type_node) 2722 i = 10; 2723 else 2724 i = 8 + 2 * total_supers; 2725 ptr = append_chunk (NULL, i, state); 2726 i = get_access_flags (TYPE_NAME (clas)); 2727 if (! (i & ACC_INTERFACE)) 2728 i |= ACC_SUPER; 2729 PUT2 (i); /* acces_flags */ 2730 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */ 2731 if (clas == object_type_node) 2732 { 2733 PUT2(0); /* super_class */ 2734 PUT2(0); /* interfaces_count */ 2735 } 2736 else 2737 { 2738 tree basetypes = TYPE_BINFO_BASETYPES (clas); 2739 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0)); 2740 int j = find_class_constant (&state->cpool, base); 2741 PUT2 (j); /* super_class */ 2742 PUT2 (total_supers - 1); /* interfaces_count */ 2743 for (i = 1; i < total_supers; i++) 2744 { 2745 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i)); 2746 j = find_class_constant (&state->cpool, base); 2747 PUT2 (j); 2748 } 2749 } 2750 fields_count_ptr = ptr; 2751 2752 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part)) 2753 { 2754 int have_value; 2755 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part)) 2756 continue; 2757 ptr = append_chunk (NULL, 8, state); 2758 i = get_access_flags (part); PUT2 (i); 2759 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i); 2760 i = find_utf8_constant (&state->cpool, build_java_signature (TREE_TYPE (part))); 2761 PUT2(i); 2762 have_value = DECL_INITIAL (part) != NULL_TREE && FIELD_STATIC (part); 2763 PUT2 (have_value); /* attributes_count */ 2764 if (have_value) 2765 { 2766 tree init = DECL_INITIAL (part); 2767 static tree ConstantValue_node = NULL_TREE; 2768 ptr = append_chunk (NULL, 8, state); 2769 if (ConstantValue_node == NULL_TREE) 2770 ConstantValue_node = get_identifier ("ConstantValue"); 2771 i = find_utf8_constant (&state->cpool, ConstantValue_node); 2772 PUT2 (i); /* attribute_name_index */ 2773 PUT4 (2); /* attribute_length */ 2774 i = find_constant_index (init, state); PUT2 (i); 2775 } 2776 fields_count++; 2777 } 2778 ptr = fields_count_ptr; PUT2 (fields_count); 2779 2780 ptr = methods_count_ptr = append_chunk (NULL, 2, state); 2781 PUT2 (0); 2782 2783 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part)) 2784 { 2785 struct jcf_block *block; 2786 tree function_body = DECL_FUNCTION_BODY (part); 2787 tree body = function_body == NULL_TREE ? NULL_TREE 2788 : BLOCK_EXPR_BODY (function_body); 2789 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node 2790 : DECL_NAME (part); 2791 tree type = TREE_TYPE (part); 2792 tree save_function = current_function_decl; 2793 current_function_decl = part; 2794 ptr = append_chunk (NULL, 8, state); 2795 i = get_access_flags (part); PUT2 (i); 2796 i = find_utf8_constant (&state->cpool, name); PUT2 (i); 2797 i = find_utf8_constant (&state->cpool, build_java_signature (type)); 2798 PUT2 (i); 2799 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE); 2800 PUT2 (i); /* attributes_count */ 2801 if (body != NULL_TREE) 2802 { 2803 int code_attributes_count = 0; 2804 static tree Code_node = NULL_TREE; 2805 tree t; 2806 char *attr_len_ptr; 2807 struct jcf_handler *handler; 2808 if (Code_node == NULL_TREE) 2809 Code_node = get_identifier ("Code"); 2810 ptr = append_chunk (NULL, 14, state); 2811 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i); 2812 attr_len_ptr = ptr; 2813 init_jcf_method (state, part); 2814 get_jcf_label_here (state); /* Force a first block. */ 2815 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t)) 2816 localvar_alloc (t, state); 2817 generate_bytecode_insns (body, IGNORE_TARGET, state); 2818 if (CAN_COMPLETE_NORMALLY (body)) 2819 { 2820 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE) 2821 abort(); 2822 RESERVE (1); 2823 OP1 (OPCODE_return); 2824 } 2825 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t)) 2826 localvar_free (t, state); 2827 if (state->return_value_decl != NULL_TREE) 2828 localvar_free (state->return_value_decl, state); 2829 finish_jcf_block (state); 2830 perform_relocations (state); 2831 2832 ptr = attr_len_ptr; 2833 i = 8 + state->code_length + 4 + 8 * state->num_handlers; 2834 if (state->linenumber_count > 0) 2835 { 2836 code_attributes_count++; 2837 i += 8 + 4 * state->linenumber_count; 2838 } 2839 if (state->lvar_count > 0) 2840 { 2841 code_attributes_count++; 2842 i += 8 + 10 * state->lvar_count; 2843 } 2844 PUT4 (i); /* attribute_length */ 2845 PUT2 (state->code_SP_max); /* max_stack */ 2846 PUT2 (localvar_max); /* max_locals */ 2847 PUT4 (state->code_length); 2848 2849 /* Emit the exception table. */ 2850 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state); 2851 PUT2 (state->num_handlers); /* exception_table_length */ 2852 handler = state->handlers; 2853 for (; handler != NULL; handler = handler->next) 2854 { 2855 int type_index; 2856 PUT2 (handler->start_label->pc); 2857 PUT2 (handler->end_label->pc); 2858 PUT2 (handler->handler_label->pc); 2859 if (handler->type == NULL_TREE) 2860 type_index = 0; 2861 else 2862 type_index = find_class_constant (&state->cpool, 2863 handler->type); 2864 PUT2 (type_index); 2865 } 2866 2867 ptr = append_chunk (NULL, 2, state); 2868 PUT2 (code_attributes_count); 2869 2870 /* Write the LineNumberTable attribute. */ 2871 if (state->linenumber_count > 0) 2872 { 2873 static tree LineNumberTable_node = NULL_TREE; 2874 ptr = append_chunk (NULL, 8 + 4 * state->linenumber_count, state); 2875 if (LineNumberTable_node == NULL_TREE) 2876 LineNumberTable_node = get_identifier ("LineNumberTable"); 2877 i = find_utf8_constant (&state->cpool, LineNumberTable_node); 2878 PUT2 (i); /* attribute_name_index */ 2879 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */ 2880 i = state->linenumber_count; PUT2 (i); 2881 for (block = state->blocks; block != NULL; block = block->next) 2882 { 2883 int line = block->linenumber; 2884 if (line > 0) 2885 { 2886 PUT2 (block->pc); 2887 PUT2 (line); 2888 } 2889 } 2890 } 2891 2892 /* Write the LocalVariableTable attribute. */ 2893 if (state->lvar_count > 0) 2894 { 2895 static tree LocalVariableTable_node = NULL_TREE; 2896 struct localvar_info *lvar = state->first_lvar; 2897 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state); 2898 if (LocalVariableTable_node == NULL_TREE) 2899 LocalVariableTable_node = get_identifier("LocalVariableTable"); 2900 i = find_utf8_constant (&state->cpool, LocalVariableTable_node); 2901 PUT2 (i); /* attribute_name_index */ 2902 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */ 2903 i = state->lvar_count; PUT2 (i); 2904 for ( ; lvar != NULL; lvar = lvar->next) 2905 { 2906 tree name = DECL_NAME (lvar->decl); 2907 tree sig = build_java_signature (TREE_TYPE (lvar->decl)); 2908 i = lvar->start_label->pc; PUT2 (i); 2909 i = lvar->end_label->pc - i; PUT2 (i); 2910 i = find_utf8_constant (&state->cpool, name); PUT2 (i); 2911 i = find_utf8_constant (&state->cpool, sig); PUT2 (i); 2912 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i); 2913 } 2914 } 2915 } 2916 if (DECL_FUNCTION_THROWS (part) != NULL_TREE) 2917 { 2918 tree t = DECL_FUNCTION_THROWS (part); 2919 int throws_count = list_length (t); 2920 static tree Exceptions_node = NULL_TREE; 2921 if (Exceptions_node == NULL_TREE) 2922 Exceptions_node = get_identifier ("Exceptions"); 2923 ptr = append_chunk (NULL, 8 + 2 * throws_count, state); 2924 i = find_utf8_constant (&state->cpool, Exceptions_node); 2925 PUT2 (i); /* attribute_name_index */ 2926 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */ 2927 i = throws_count; PUT2 (i); 2928 for (; t != NULL_TREE; t = TREE_CHAIN (t)) 2929 { 2930 i = find_class_constant (&state->cpool, TREE_VALUE (t)); 2931 PUT2 (i); 2932 } 2933 } 2934 methods_count++; 2935 current_function_decl = save_function; 2936 } 2937 ptr = methods_count_ptr; PUT2 (methods_count); 2938 2939 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas)); 2940 for (ptr = source_file; ; ptr++) 2941 { 2942 char ch = *ptr; 2943 if (ch == '\0') 2944 break; 2945 if (ch == '/' || ch == '\\') 2946 source_file = ptr+1; 2947 } 2948 ptr = append_chunk (NULL, 10, state); 2949 PUT2 (1); /* attributes_count */ 2950 2951 /* generate the SourceFile attribute. */ 2952 if (SourceFile_node == NULL_TREE) 2953 SourceFile_node = get_identifier ("SourceFile"); 2954 i = find_utf8_constant (&state->cpool, SourceFile_node); 2955 PUT2 (i); /* attribute_name_index */ 2956 PUT4 (2); 2957 i = find_utf8_constant (&state->cpool, get_identifier (source_file)); 2958 PUT2 (i); 2959 2960 /* New finally generate the contents of the constant pool chunk. */ 2961 i = count_constant_pool_bytes (&state->cpool); 2962 ptr = obstack_alloc (state->chunk_obstack, i); 2963 cpool_chunk->data = ptr; 2964 cpool_chunk->size = i; 2965 write_constant_pool (&state->cpool, ptr, i); 2966 return state->first; 2967} 2968 2969static char * 2970make_class_file_name (clas) 2971 tree clas; 2972{ 2973 const char *dname, *slash; 2974 char *cname, *r; 2975 struct stat sb; 2976 2977 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)), 2978 "", '.', DIR_SEPARATOR, 2979 ".class")); 2980 if (jcf_write_base_directory == NULL) 2981 { 2982 /* Make sure we put the class file into the .java file's 2983 directory, and not into some subdirectory thereof. */ 2984 char *t; 2985 dname = DECL_SOURCE_FILE (TYPE_NAME (clas)); 2986 slash = strrchr (dname, DIR_SEPARATOR); 2987 if (! slash) 2988 { 2989 dname = "."; 2990 slash = dname + 1; 2991 } 2992 t = strrchr (cname, DIR_SEPARATOR); 2993 if (t) 2994 cname = t + 1; 2995 } 2996 else 2997 { 2998 dname = jcf_write_base_directory; 2999 slash = dname + strlen (dname); 3000 } 3001 3002 r = xmalloc (slash - dname + strlen (cname) + 2); 3003 strncpy (r, dname, slash - dname); 3004 r[slash - dname] = DIR_SEPARATOR; 3005 strcpy (&r[slash - dname + 1], cname); 3006 3007 /* We try to make new directories when we need them. We only do 3008 this for directories which "might not" exist. For instance, we 3009 assume the `-d' directory exists, but we don't assume that any 3010 subdirectory below it exists. It might be worthwhile to keep 3011 track of which directories we've created to avoid gratuitous 3012 stat()s. */ 3013 dname = r + (slash - dname) + 1; 3014 while (1) 3015 { 3016 cname = strchr (dname, DIR_SEPARATOR); 3017 if (cname == NULL) 3018 break; 3019 *cname = '\0'; 3020 if (stat (r, &sb) == -1) 3021 { 3022 /* Try to make it. */ 3023 if (mkdir (r, 0755) == -1) 3024 { 3025 fatal ("failed to create directory `%s'", r); 3026 free (r); 3027 return NULL; 3028 } 3029 } 3030 *cname = DIR_SEPARATOR; 3031 /* Skip consecutive separators. */ 3032 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname) 3033 ; 3034 } 3035 3036 return r; 3037} 3038 3039/* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file. 3040 The output .class file name is make_class_file_name(CLAS). */ 3041 3042void 3043write_classfile (clas) 3044 tree clas; 3045{ 3046 struct obstack *work = &temporary_obstack; 3047 struct jcf_partial state[1]; 3048 char *class_file_name = make_class_file_name (clas); 3049 struct chunk *chunks; 3050 3051 if (class_file_name != NULL) 3052 { 3053 FILE* stream = fopen (class_file_name, "wb"); 3054 if (stream == NULL) 3055 fatal ("failed to open `%s' for writing", class_file_name); 3056 jcf_dependency_add_target (class_file_name); 3057 init_jcf_state (state, work); 3058 chunks = generate_classfile (clas, state); 3059 write_chunks (stream, chunks); 3060 if (fclose (stream)) 3061 fatal ("failed to close after writing `%s'", class_file_name); 3062 free (class_file_name); 3063 } 3064 release_jcf_state (state); 3065} 3066 3067/* TODO: 3068 string concatenation 3069 synchronized statement 3070 */ 3071