rewriter.cpp revision 3602:da91efe96a93
1139826Simp/*
253541Sshin * Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved.
353541Sshin * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
453541Sshin *
553541Sshin * This code is free software; you can redistribute it and/or modify it
653541Sshin * under the terms of the GNU General Public License version 2 only, as
753541Sshin * published by the Free Software Foundation.
853541Sshin *
953541Sshin * This code is distributed in the hope that it will be useful, but WITHOUT
1053541Sshin * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
1153541Sshin * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1253541Sshin * version 2 for more details (a copy is included in the LICENSE file that
1353541Sshin * accompanied this code).
1453541Sshin *
1553541Sshin * You should have received a copy of the GNU General Public License version
1653541Sshin * 2 along with this work; if not, write to the Free Software Foundation,
1753541Sshin * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
1853541Sshin *
1953541Sshin * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
2053541Sshin * or visit www.oracle.com if you need additional information or have any
2153541Sshin * questions.
2253541Sshin *
2353541Sshin */
2453541Sshin
2553541Sshin#include "precompiled.hpp"
2653541Sshin#include "interpreter/bytecodes.hpp"
2753541Sshin#include "interpreter/interpreter.hpp"
28174510Sobrien#include "interpreter/rewriter.hpp"
29174510Sobrien#include "memory/gcLocker.hpp"
30174510Sobrien#include "memory/metadataFactory.hpp"
3153541Sshin#include "memory/oopFactory.hpp"
3253541Sshin#include "memory/resourceArea.hpp"
3353541Sshin#include "oops/generateOopMap.hpp"
3453541Sshin#include "oops/objArrayOop.hpp"
3553541Sshin#include "oops/oop.inline.hpp"
3653541Sshin#include "prims/methodComparator.hpp"
3753541Sshin#include "prims/methodHandles.hpp"
3853541Sshin
3953541Sshin// Computes a CPC map (new_index -> original_index) for constant pool entries
4053541Sshin// that are referred to by the interpreter at runtime via the constant pool cache.
41174510Sobrien// Also computes a CP map (original_index -> new_index).
42174510Sobrien// Marks entries in CP which require additional processing.
43174510Sobrienvoid Rewriter::compute_index_maps() {
4453541Sshin  const int length  = _pool->length();
4562587Sitojun  init_maps(length);
4662587Sitojun  bool saw_mh_symbol = false;
4762587Sitojun  for (int i = 0; i < length; i++) {
4862587Sitojun    int tag = _pool->tag_at(i).value();
4962587Sitojun    switch (tag) {
5062587Sitojun      case JVM_CONSTANT_InterfaceMethodref:
5162587Sitojun      case JVM_CONSTANT_Fieldref          : // fall through
5253541Sshin      case JVM_CONSTANT_Methodref         : // fall through
5353541Sshin        add_cp_cache_entry(i);
5462587Sitojun        break;
5562587Sitojun      case JVM_CONSTANT_String:
5662587Sitojun      case JVM_CONSTANT_Object:
5753541Sshin      case JVM_CONSTANT_MethodHandle      : // fall through
5853541Sshin      case JVM_CONSTANT_MethodType        : // fall through
5953541Sshin        add_resolved_references_entry(i);
6053541Sshin        break;
6162587Sitojun      case JVM_CONSTANT_Utf8:
6262587Sitojun        if (_pool->symbol_at(i) == vmSymbols::java_lang_invoke_MethodHandle())
6353541Sshin          saw_mh_symbol = true;
6462587Sitojun        break;
6553541Sshin    }
6653541Sshin  }
6753541Sshin
6853541Sshin  // Record limits of resolved reference map for constant pool cache indices
69  record_map_limits();
70
71  guarantee((int)_cp_cache_map.length()-1 <= (int)((u2)-1),
72            "all cp cache indexes fit in a u2");
73
74  if (saw_mh_symbol)
75    _method_handle_invokers.initialize(length, (int)0);
76}
77
78// Unrewrite the bytecodes if an error occurs.
79void Rewriter::restore_bytecodes() {
80  int len = _methods->length();
81
82  for (int i = len-1; i >= 0; i--) {
83    Method* method = _methods->at(i);
84    scan_method(method, true);
85  }
86}
87
88// Creates a constant pool cache given a CPC map
89void Rewriter::make_constant_pool_cache(TRAPS) {
90  const int length = _cp_cache_map.length();
91  ClassLoaderData* loader_data = _pool->pool_holder()->class_loader_data();
92  ConstantPoolCache* cache =
93      ConstantPoolCache::allocate(loader_data, length, CHECK);
94
95  // initialize object cache in constant pool
96  _pool->initialize_resolved_references(loader_data, _resolved_references_map,
97                                        _resolved_reference_limit,
98                                        CHECK);
99
100  No_Safepoint_Verifier nsv;
101  cache->initialize(_cp_cache_map, _invokedynamic_references_map);
102  _pool->set_cache(cache);
103  cache->set_constant_pool(_pool());
104}
105
106
107
108// The new finalization semantics says that registration of
109// finalizable objects must be performed on successful return from the
110// Object.<init> constructor.  We could implement this trivially if
111// <init> were never rewritten but since JVMTI allows this to occur, a
112// more complicated solution is required.  A special return bytecode
113// is used only by Object.<init> to signal the finalization
114// registration point.  Additionally local 0 must be preserved so it's
115// available to pass to the registration function.  For simplicty we
116// require that local 0 is never overwritten so it's available as an
117// argument for registration.
118
119void Rewriter::rewrite_Object_init(methodHandle method, TRAPS) {
120  RawBytecodeStream bcs(method);
121  while (!bcs.is_last_bytecode()) {
122    Bytecodes::Code opcode = bcs.raw_next();
123    switch (opcode) {
124      case Bytecodes::_return: *bcs.bcp() = Bytecodes::_return_register_finalizer; break;
125
126      case Bytecodes::_istore:
127      case Bytecodes::_lstore:
128      case Bytecodes::_fstore:
129      case Bytecodes::_dstore:
130      case Bytecodes::_astore:
131        if (bcs.get_index() != 0) continue;
132
133        // fall through
134      case Bytecodes::_istore_0:
135      case Bytecodes::_lstore_0:
136      case Bytecodes::_fstore_0:
137      case Bytecodes::_dstore_0:
138      case Bytecodes::_astore_0:
139        THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(),
140                  "can't overwrite local 0 in Object.<init>");
141        break;
142    }
143  }
144}
145
146
147// Rewrite a classfile-order CP index into a native-order CPC index.
148void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
149  address p = bcp + offset;
150  if (!reverse) {
151    int  cp_index    = Bytes::get_Java_u2(p);
152    int  cache_index = cp_entry_to_cp_cache(cp_index);
153    Bytes::put_native_u2(p, cache_index);
154    if (!_method_handle_invokers.is_empty())
155      maybe_rewrite_invokehandle(p - 1, cp_index, cache_index, reverse);
156  } else {
157    int cache_index = Bytes::get_native_u2(p);
158    int pool_index = cp_cache_entry_pool_index(cache_index);
159    Bytes::put_Java_u2(p, pool_index);
160    if (!_method_handle_invokers.is_empty())
161      maybe_rewrite_invokehandle(p - 1, pool_index, cache_index, reverse);
162  }
163}
164
165
166// Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.)
167void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, int cache_index, bool reverse) {
168  if (!reverse) {
169    if ((*opc) == (u1)Bytecodes::_invokevirtual ||
170        // allow invokespecial as an alias, although it would be very odd:
171        (*opc) == (u1)Bytecodes::_invokespecial) {
172      assert(_pool->tag_at(cp_index).is_method(), "wrong index");
173      // Determine whether this is a signature-polymorphic method.
174      if (cp_index >= _method_handle_invokers.length())  return;
175      int status = _method_handle_invokers[cp_index];
176      assert(status >= -1 && status <= 1, "oob tri-state");
177      if (status == 0) {
178        if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() &&
179            MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(),
180                                                         _pool->name_ref_at(cp_index))) {
181          // we may need a resolved_refs entry for the appendix
182          add_invokedynamic_resolved_references_entry(cp_index, cache_index);
183          status = +1;
184        } else {
185          status = -1;
186        }
187        _method_handle_invokers[cp_index] = status;
188      }
189      // We use a special internal bytecode for such methods (if non-static).
190      // The basic reason for this is that such methods need an extra "appendix" argument
191      // to transmit the call site's intended call type.
192      if (status > 0) {
193        (*opc) = (u1)Bytecodes::_invokehandle;
194      }
195    }
196  } else {
197    // Do not need to look at cp_index.
198    if ((*opc) == (u1)Bytecodes::_invokehandle) {
199      (*opc) = (u1)Bytecodes::_invokevirtual;
200      // Ignore corner case of original _invokespecial instruction.
201      // This is safe because (a) the signature polymorphic method was final, and
202      // (b) the implementation of MethodHandle will not call invokespecial on it.
203    }
204  }
205}
206
207
208void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
209  address p = bcp + offset;
210  assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
211  if (!reverse) {
212    int cp_index = Bytes::get_Java_u2(p);
213    int cache_index = add_invokedynamic_cp_cache_entry(cp_index);
214    add_invokedynamic_resolved_references_entry(cp_index, cache_index);
215    // Replace the trailing four bytes with a CPC index for the dynamic
216    // call site.  Unlike other CPC entries, there is one per bytecode,
217    // not just one per distinct CP entry.  In other words, the
218    // CPC-to-CP relation is many-to-one for invokedynamic entries.
219    // This means we must use a larger index size than u2 to address
220    // all these entries.  That is the main reason invokedynamic
221    // must have a five-byte instruction format.  (Of course, other JVM
222    // implementations can use the bytes for other purposes.)
223    Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index));
224    // Note: We use native_u4 format exclusively for 4-byte indexes.
225  } else {
226    // callsite index
227    int cache_index = ConstantPool::decode_invokedynamic_index(
228                        Bytes::get_native_u4(p));
229    int cp_index = cp_cache_entry_pool_index(cache_index);
230    assert(_pool->tag_at(cp_index).is_invoke_dynamic(), "wrong index");
231    // zero out 4 bytes
232    Bytes::put_Java_u4(p, 0);
233    Bytes::put_Java_u2(p, cp_index);
234  }
235}
236
237
238// Rewrite some ldc bytecodes to _fast_aldc
239void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
240                                 bool reverse) {
241  if (!reverse) {
242    assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
243    address p = bcp + offset;
244    int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
245    constantTag tag = _pool->tag_at(cp_index).value();
246    if (tag.is_method_handle() || tag.is_method_type() || tag.is_string() || tag.is_object()) {
247      int ref_index = cp_entry_to_resolved_references(cp_index);
248      if (is_wide) {
249        (*bcp) = Bytecodes::_fast_aldc_w;
250        assert(ref_index == (u2)ref_index, "index overflow");
251        Bytes::put_native_u2(p, ref_index);
252      } else {
253        (*bcp) = Bytecodes::_fast_aldc;
254        assert(ref_index == (u1)ref_index, "index overflow");
255        (*p) = (u1)ref_index;
256      }
257    }
258  } else {
259    Bytecodes::Code rewritten_bc =
260              (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
261    if ((*bcp) == rewritten_bc) {
262      address p = bcp + offset;
263      int ref_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
264      int pool_index = resolved_references_entry_to_pool_index(ref_index);
265      if (is_wide) {
266        (*bcp) = Bytecodes::_ldc_w;
267        assert(pool_index == (u2)pool_index, "index overflow");
268        Bytes::put_Java_u2(p, pool_index);
269      } else {
270        (*bcp) = Bytecodes::_ldc;
271        assert(pool_index == (u1)pool_index, "index overflow");
272        (*p) = (u1)pool_index;
273      }
274    }
275  }
276}
277
278
279// Rewrites a method given the index_map information
280void Rewriter::scan_method(Method* method, bool reverse) {
281
282  int nof_jsrs = 0;
283  bool has_monitor_bytecodes = false;
284
285  {
286    // We cannot tolerate a GC in this block, because we've
287    // cached the bytecodes in 'code_base'. If the Method*
288    // moves, the bytecodes will also move.
289    No_Safepoint_Verifier nsv;
290    Bytecodes::Code c;
291
292    // Bytecodes and their length
293    const address code_base = method->code_base();
294    const int code_length = method->code_size();
295
296    int bc_length;
297    for (int bci = 0; bci < code_length; bci += bc_length) {
298      address bcp = code_base + bci;
299      int prefix_length = 0;
300      c = (Bytecodes::Code)(*bcp);
301
302      // Since we have the code, see if we can get the length
303      // directly. Some more complicated bytecodes will report
304      // a length of zero, meaning we need to make another method
305      // call to calculate the length.
306      bc_length = Bytecodes::length_for(c);
307      if (bc_length == 0) {
308        bc_length = Bytecodes::length_at(method, bcp);
309
310        // length_at will put us at the bytecode after the one modified
311        // by 'wide'. We don't currently examine any of the bytecodes
312        // modified by wide, but in case we do in the future...
313        if (c == Bytecodes::_wide) {
314          prefix_length = 1;
315          c = (Bytecodes::Code)bcp[1];
316        }
317      }
318
319      assert(bc_length != 0, "impossible bytecode length");
320
321      switch (c) {
322        case Bytecodes::_lookupswitch   : {
323#ifndef CC_INTERP
324          Bytecode_lookupswitch bc(method, bcp);
325          (*bcp) = (
326            bc.number_of_pairs() < BinarySwitchThreshold
327            ? Bytecodes::_fast_linearswitch
328            : Bytecodes::_fast_binaryswitch
329          );
330#endif
331          break;
332        }
333        case Bytecodes::_fast_linearswitch:
334        case Bytecodes::_fast_binaryswitch: {
335#ifndef CC_INTERP
336          (*bcp) = Bytecodes::_lookupswitch;
337#endif
338          break;
339        }
340        case Bytecodes::_getstatic      : // fall through
341        case Bytecodes::_putstatic      : // fall through
342        case Bytecodes::_getfield       : // fall through
343        case Bytecodes::_putfield       : // fall through
344        case Bytecodes::_invokevirtual  : // fall through
345        case Bytecodes::_invokespecial  : // fall through
346        case Bytecodes::_invokestatic   :
347        case Bytecodes::_invokeinterface:
348        case Bytecodes::_invokehandle   : // if reverse=true
349          rewrite_member_reference(bcp, prefix_length+1, reverse);
350          break;
351        case Bytecodes::_invokedynamic:
352          rewrite_invokedynamic(bcp, prefix_length+1, reverse);
353          break;
354        case Bytecodes::_ldc:
355        case Bytecodes::_fast_aldc:  // if reverse=true
356          maybe_rewrite_ldc(bcp, prefix_length+1, false, reverse);
357          break;
358        case Bytecodes::_ldc_w:
359        case Bytecodes::_fast_aldc_w:  // if reverse=true
360          maybe_rewrite_ldc(bcp, prefix_length+1, true, reverse);
361          break;
362        case Bytecodes::_jsr            : // fall through
363        case Bytecodes::_jsr_w          : nof_jsrs++;                   break;
364        case Bytecodes::_monitorenter   : // fall through
365        case Bytecodes::_monitorexit    : has_monitor_bytecodes = true; break;
366      }
367    }
368  }
369
370  // Update access flags
371  if (has_monitor_bytecodes) {
372    method->set_has_monitor_bytecodes();
373  }
374
375  // The present of a jsr bytecode implies that the method might potentially
376  // have to be rewritten, so we run the oopMapGenerator on the method
377  if (nof_jsrs > 0) {
378    method->set_has_jsrs();
379    // Second pass will revisit this method.
380    assert(method->has_jsrs(), "didn't we just set this?");
381  }
382}
383
384// After constant pool is created, revisit methods containing jsrs.
385methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
386  ResourceMark rm(THREAD);
387  ResolveOopMapConflicts romc(method);
388  methodHandle original_method = method;
389  method = romc.do_potential_rewrite(CHECK_(methodHandle()));
390  // Update monitor matching info.
391  if (romc.monitor_safe()) {
392    method->set_guaranteed_monitor_matching();
393  }
394
395  return method;
396}
397
398void Rewriter::rewrite(instanceKlassHandle klass, TRAPS) {
399  ResourceMark rm(THREAD);
400  Rewriter     rw(klass, klass->constants(), klass->methods(), CHECK);
401  // (That's all, folks.)
402}
403
404
405void Rewriter::rewrite(instanceKlassHandle klass, constantPoolHandle cpool, Array<Method*>* methods, TRAPS) {
406  ResourceMark rm(THREAD);
407  Rewriter     rw(klass, cpool, methods, CHECK);
408  // (That's all, folks.)
409}
410
411
412Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, Array<Method*>* methods, TRAPS)
413  : _klass(klass),
414    _pool(cpool),
415    _methods(methods)
416{
417  assert(_pool->cache() == NULL, "constant pool cache must not be set yet");
418
419  // determine index maps for Method* rewriting
420  compute_index_maps();
421
422  if (RegisterFinalizersAtInit && _klass->name() == vmSymbols::java_lang_Object()) {
423    bool did_rewrite = false;
424    int i = _methods->length();
425    while (i-- > 0) {
426      Method* method = _methods->at(i);
427      if (method->intrinsic_id() == vmIntrinsics::_Object_init) {
428        // rewrite the return bytecodes of Object.<init> to register the
429        // object for finalization if needed.
430        methodHandle m(THREAD, method);
431        rewrite_Object_init(m, CHECK);
432        did_rewrite = true;
433        break;
434      }
435    }
436    assert(did_rewrite, "must find Object::<init> to rewrite it");
437  }
438
439  // rewrite methods, in two passes
440  int len = _methods->length();
441
442  for (int i = len-1; i >= 0; i--) {
443    Method* method = _methods->at(i);
444    scan_method(method);
445  }
446
447  // allocate constant pool cache, now that we've seen all the bytecodes
448  make_constant_pool_cache(THREAD);
449
450  // Restore bytecodes to their unrewritten state if there are exceptions
451  // rewriting bytecodes or allocating the cpCache
452  if (HAS_PENDING_EXCEPTION) {
453    restore_bytecodes();
454    return;
455  }
456}
457
458// Relocate jsr/rets in a method.  This can't be done with the rewriter
459// stage because it can throw other exceptions, leaving the bytecodes
460// pointing at constant pool cache entries.
461// Link and check jvmti dependencies while we're iterating over the methods.
462// JSR292 code calls with a different set of methods, so two entry points.
463void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) {
464  relocate_and_link(this_oop, this_oop->methods(), THREAD);
465}
466
467void Rewriter::relocate_and_link(instanceKlassHandle this_oop,
468                                 Array<Method*>* methods, TRAPS) {
469  int len = methods->length();
470  for (int i = len-1; i >= 0; i--) {
471    methodHandle m(THREAD, methods->at(i));
472
473    if (m->has_jsrs()) {
474      m = rewrite_jsrs(m, CHECK);
475      // Method might have gotten rewritten.
476      methods->at_put(i, m());
477    }
478
479    // Set up method entry points for compiler and interpreter    .
480    m->link_method(m, CHECK);
481
482    // This is for JVMTI and unrelated to relocator but the last thing we do
483#ifdef ASSERT
484    if (StressMethodComparator) {
485      static int nmc = 0;
486      for (int j = i; j >= 0 && j >= i-4; j--) {
487        if ((++nmc % 1000) == 0)  tty->print_cr("Have run MethodComparator %d times...", nmc);
488        bool z = MethodComparator::methods_EMCP(m(),
489                   methods->at(j));
490        if (j == i && !z) {
491          tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
492          assert(z, "method must compare equal to itself");
493        }
494      }
495    }
496#endif //ASSERT
497  }
498}
499