method.cpp revision 13370:731370f39fcd
1/*
2 * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "classfile/metadataOnStackMark.hpp"
27#include "classfile/systemDictionary.hpp"
28#include "code/codeCache.hpp"
29#include "code/debugInfoRec.hpp"
30#include "gc/shared/collectedHeap.inline.hpp"
31#include "gc/shared/gcLocker.hpp"
32#include "gc/shared/generation.hpp"
33#include "interpreter/bytecodeStream.hpp"
34#include "interpreter/bytecodeTracer.hpp"
35#include "interpreter/bytecodes.hpp"
36#include "interpreter/interpreter.hpp"
37#include "interpreter/oopMapCache.hpp"
38#include "memory/heapInspection.hpp"
39#include "memory/metadataFactory.hpp"
40#include "memory/metaspaceClosure.hpp"
41#include "memory/metaspaceShared.hpp"
42#include "memory/oopFactory.hpp"
43#include "memory/resourceArea.hpp"
44#include "oops/constMethod.hpp"
45#include "oops/method.hpp"
46#include "oops/methodData.hpp"
47#include "oops/objArrayOop.inline.hpp"
48#include "oops/oop.inline.hpp"
49#include "oops/symbol.hpp"
50#include "prims/jvmtiExport.hpp"
51#include "prims/methodHandles.hpp"
52#include "prims/nativeLookup.hpp"
53#include "runtime/arguments.hpp"
54#include "runtime/compilationPolicy.hpp"
55#include "runtime/frame.inline.hpp"
56#include "runtime/handles.inline.hpp"
57#include "runtime/init.hpp"
58#include "runtime/orderAccess.inline.hpp"
59#include "runtime/relocator.hpp"
60#include "runtime/sharedRuntime.hpp"
61#include "runtime/signature.hpp"
62#include "utilities/align.hpp"
63#include "utilities/quickSort.hpp"
64#include "utilities/vmError.hpp"
65#include "utilities/xmlstream.hpp"
66
67// Implementation of Method
68
69Method* Method::allocate(ClassLoaderData* loader_data,
70                         int byte_code_size,
71                         AccessFlags access_flags,
72                         InlineTableSizes* sizes,
73                         ConstMethod::MethodType method_type,
74                         TRAPS) {
75  assert(!access_flags.is_native() || byte_code_size == 0,
76         "native methods should not contain byte codes");
77  ConstMethod* cm = ConstMethod::allocate(loader_data,
78                                          byte_code_size,
79                                          sizes,
80                                          method_type,
81                                          CHECK_NULL);
82  int size = Method::size(access_flags.is_native());
83  return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags);
84}
85
86Method::Method(ConstMethod* xconst, AccessFlags access_flags) {
87  NoSafepointVerifier no_safepoint;
88  set_constMethod(xconst);
89  set_access_flags(access_flags);
90  set_intrinsic_id(vmIntrinsics::_none);
91  set_force_inline(false);
92  set_hidden(false);
93  set_dont_inline(false);
94  set_has_injected_profile(false);
95  set_method_data(NULL);
96  clear_method_counters();
97  set_vtable_index(Method::garbage_vtable_index);
98
99  // Fix and bury in Method*
100  set_interpreter_entry(NULL); // sets i2i entry and from_int
101  set_adapter_entry(NULL);
102  clear_code(false /* don't need a lock */); // from_c/from_i get set to c2i/i2i
103
104  if (access_flags.is_native()) {
105    clear_native_function();
106    set_signature_handler(NULL);
107  }
108
109  NOT_PRODUCT(set_compiled_invocation_count(0);)
110}
111
112// Release Method*.  The nmethod will be gone when we get here because
113// we've walked the code cache.
114void Method::deallocate_contents(ClassLoaderData* loader_data) {
115  MetadataFactory::free_metadata(loader_data, constMethod());
116  set_constMethod(NULL);
117  MetadataFactory::free_metadata(loader_data, method_data());
118  set_method_data(NULL);
119  MetadataFactory::free_metadata(loader_data, method_counters());
120  clear_method_counters();
121  // The nmethod will be gone when we get here.
122  if (code() != NULL) _code = NULL;
123}
124
125address Method::get_i2c_entry() {
126  assert(adapter() != NULL, "must have");
127  return adapter()->get_i2c_entry();
128}
129
130address Method::get_c2i_entry() {
131  assert(adapter() != NULL, "must have");
132  return adapter()->get_c2i_entry();
133}
134
135address Method::get_c2i_unverified_entry() {
136  assert(adapter() != NULL, "must have");
137  return adapter()->get_c2i_unverified_entry();
138}
139
140char* Method::name_and_sig_as_C_string() const {
141  return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
142}
143
144char* Method::name_and_sig_as_C_string(char* buf, int size) const {
145  return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
146}
147
148char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
149  const char* klass_name = klass->external_name();
150  int klass_name_len  = (int)strlen(klass_name);
151  int method_name_len = method_name->utf8_length();
152  int len             = klass_name_len + 1 + method_name_len + signature->utf8_length();
153  char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
154  strcpy(dest, klass_name);
155  dest[klass_name_len] = '.';
156  strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
157  strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
158  dest[len] = 0;
159  return dest;
160}
161
162char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
163  Symbol* klass_name = klass->name();
164  klass_name->as_klass_external_name(buf, size);
165  int len = (int)strlen(buf);
166
167  if (len < size - 1) {
168    buf[len++] = '.';
169
170    method_name->as_C_string(&(buf[len]), size - len);
171    len = (int)strlen(buf);
172
173    signature->as_C_string(&(buf[len]), size - len);
174  }
175
176  return buf;
177}
178
179int Method::fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS) {
180  // exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
181  // access exception table
182  ExceptionTable table(mh());
183  int length = table.length();
184  // iterate through all entries sequentially
185  constantPoolHandle pool(THREAD, mh->constants());
186  for (int i = 0; i < length; i ++) {
187    //reacquire the table in case a GC happened
188    ExceptionTable table(mh());
189    int beg_bci = table.start_pc(i);
190    int end_bci = table.end_pc(i);
191    assert(beg_bci <= end_bci, "inconsistent exception table");
192    if (beg_bci <= throw_bci && throw_bci < end_bci) {
193      // exception handler bci range covers throw_bci => investigate further
194      int handler_bci = table.handler_pc(i);
195      int klass_index = table.catch_type_index(i);
196      if (klass_index == 0) {
197        return handler_bci;
198      } else if (ex_klass == NULL) {
199        return handler_bci;
200      } else {
201        // we know the exception class => get the constraint class
202        // this may require loading of the constraint class; if verification
203        // fails or some other exception occurs, return handler_bci
204        Klass* k = pool->klass_at(klass_index, CHECK_(handler_bci));
205        assert(k != NULL, "klass not loaded");
206        if (ex_klass->is_subtype_of(k)) {
207          return handler_bci;
208        }
209      }
210    }
211  }
212
213  return -1;
214}
215
216void Method::mask_for(int bci, InterpreterOopMap* mask) {
217
218  Thread* myThread    = Thread::current();
219  methodHandle h_this(myThread, this);
220#if defined(ASSERT) && !INCLUDE_JVMCI
221  bool has_capability = myThread->is_VM_thread() ||
222                        myThread->is_ConcurrentGC_thread() ||
223                        myThread->is_GC_task_thread();
224
225  if (!has_capability) {
226    if (!VerifyStack && !VerifyLastFrame) {
227      // verify stack calls this outside VM thread
228      warning("oopmap should only be accessed by the "
229              "VM, GC task or CMS threads (or during debugging)");
230      InterpreterOopMap local_mask;
231      method_holder()->mask_for(h_this, bci, &local_mask);
232      local_mask.print();
233    }
234  }
235#endif
236  method_holder()->mask_for(h_this, bci, mask);
237  return;
238}
239
240
241int Method::bci_from(address bcp) const {
242  if (is_native() && bcp == 0) {
243    return 0;
244  }
245#ifdef ASSERT
246  {
247    ResourceMark rm;
248    assert(is_native() && bcp == code_base() || contains(bcp) || VMError::is_error_reported(),
249           "bcp doesn't belong to this method: bcp: " INTPTR_FORMAT ", method: %s",
250           p2i(bcp), name_and_sig_as_C_string());
251  }
252#endif
253  return bcp - code_base();
254}
255
256
257int Method::validate_bci(int bci) const {
258  return (bci == 0 || bci < code_size()) ? bci : -1;
259}
260
261// Return bci if it appears to be a valid bcp
262// Return -1 otherwise.
263// Used by profiling code, when invalid data is a possibility.
264// The caller is responsible for validating the Method* itself.
265int Method::validate_bci_from_bcp(address bcp) const {
266  // keep bci as -1 if not a valid bci
267  int bci = -1;
268  if (bcp == 0 || bcp == code_base()) {
269    // code_size() may return 0 and we allow 0 here
270    // the method may be native
271    bci = 0;
272  } else if (contains(bcp)) {
273    bci = bcp - code_base();
274  }
275  // Assert that if we have dodged any asserts, bci is negative.
276  assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
277  return bci;
278}
279
280address Method::bcp_from(int bci) const {
281  assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()),
282         "illegal bci: %d for %s method", bci, is_native() ? "native" : "non-native");
283  address bcp = code_base() + bci;
284  assert(is_native() && bcp == code_base() || contains(bcp), "bcp doesn't belong to this method");
285  return bcp;
286}
287
288address Method::bcp_from(address bcp) const {
289  if (is_native() && bcp == NULL) {
290    return code_base();
291  } else {
292    return bcp;
293  }
294}
295
296int Method::size(bool is_native) {
297  // If native, then include pointers for native_function and signature_handler
298  int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
299  int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
300  return align_metadata_size(header_size() + extra_words);
301}
302
303
304Symbol* Method::klass_name() const {
305  return method_holder()->name();
306}
307
308
309void Method::metaspace_pointers_do(MetaspaceClosure* it) {
310  log_trace(cds)("Iter(Method): %p", this);
311
312  it->push(&_constMethod);
313  it->push(&_method_data);
314  it->push(&_method_counters);
315}
316
317// Attempt to return method oop to original state.  Clear any pointers
318// (to objects outside the shared spaces).  We won't be able to predict
319// where they should point in a new JVM.  Further initialize some
320// entries now in order allow them to be write protected later.
321
322void Method::remove_unshareable_info() {
323  unlink_method();
324}
325
326void Method::set_vtable_index(int index) {
327  if (is_shared() && !MetaspaceShared::remapped_readwrite()) {
328    // At runtime initialize_vtable is rerun as part of link_class_impl()
329    // for a shared class loaded by the non-boot loader to obtain the loader
330    // constraints based on the runtime classloaders' context.
331    return; // don't write into the shared class
332  } else {
333    _vtable_index = index;
334  }
335}
336
337void Method::set_itable_index(int index) {
338  if (is_shared() && !MetaspaceShared::remapped_readwrite()) {
339    // At runtime initialize_itable is rerun as part of link_class_impl()
340    // for a shared class loaded by the non-boot loader to obtain the loader
341    // constraints based on the runtime classloaders' context. The dumptime
342    // itable index should be the same as the runtime index.
343    assert(_vtable_index == itable_index_max - index,
344           "archived itable index is different from runtime index");
345    return; // don���t write into the shared class
346  } else {
347    _vtable_index = itable_index_max - index;
348  }
349  assert(valid_itable_index(), "");
350}
351
352
353
354bool Method::was_executed_more_than(int n) {
355  // Invocation counter is reset when the Method* is compiled.
356  // If the method has compiled code we therefore assume it has
357  // be excuted more than n times.
358  if (is_accessor() || is_empty_method() || (code() != NULL)) {
359    // interpreter doesn't bump invocation counter of trivial methods
360    // compiler does not bump invocation counter of compiled methods
361    return true;
362  }
363  else if ((method_counters() != NULL &&
364            method_counters()->invocation_counter()->carry()) ||
365           (method_data() != NULL &&
366            method_data()->invocation_counter()->carry())) {
367    // The carry bit is set when the counter overflows and causes
368    // a compilation to occur.  We don't know how many times
369    // the counter has been reset, so we simply assume it has
370    // been executed more than n times.
371    return true;
372  } else {
373    return invocation_count() > n;
374  }
375}
376
377void Method::print_invocation_count() {
378  if (is_static()) tty->print("static ");
379  if (is_final()) tty->print("final ");
380  if (is_synchronized()) tty->print("synchronized ");
381  if (is_native()) tty->print("native ");
382  tty->print("%s::", method_holder()->external_name());
383  name()->print_symbol_on(tty);
384  signature()->print_symbol_on(tty);
385
386  if (WizardMode) {
387    // dump the size of the byte codes
388    tty->print(" {%d}", code_size());
389  }
390  tty->cr();
391
392  tty->print_cr ("  interpreter_invocation_count: %8d ", interpreter_invocation_count());
393  tty->print_cr ("  invocation_counter:           %8d ", invocation_count());
394  tty->print_cr ("  backedge_counter:             %8d ", backedge_count());
395#ifndef PRODUCT
396  if (CountCompiledCalls) {
397    tty->print_cr ("  compiled_invocation_count: %8d ", compiled_invocation_count());
398  }
399#endif
400}
401
402// Build a MethodData* object to hold information about this method
403// collected in the interpreter.
404void Method::build_interpreter_method_data(const methodHandle& method, TRAPS) {
405  // Do not profile the method if metaspace has hit an OOM previously
406  // allocating profiling data. Callers clear pending exception so don't
407  // add one here.
408  if (ClassLoaderDataGraph::has_metaspace_oom()) {
409    return;
410  }
411
412  // Grab a lock here to prevent multiple
413  // MethodData*s from being created.
414  MutexLocker ml(MethodData_lock, THREAD);
415  if (method->method_data() == NULL) {
416    ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
417    MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
418    if (HAS_PENDING_EXCEPTION) {
419      CompileBroker::log_metaspace_failure();
420      ClassLoaderDataGraph::set_metaspace_oom(true);
421      return;   // return the exception (which is cleared)
422    }
423
424    method->set_method_data(method_data);
425    if (PrintMethodData && (Verbose || WizardMode)) {
426      ResourceMark rm(THREAD);
427      tty->print("build_interpreter_method_data for ");
428      method->print_name(tty);
429      tty->cr();
430      // At the end of the run, the MDO, full of data, will be dumped.
431    }
432  }
433}
434
435MethodCounters* Method::build_method_counters(Method* m, TRAPS) {
436  // Do not profile the method if metaspace has hit an OOM previously
437  if (ClassLoaderDataGraph::has_metaspace_oom()) {
438    return NULL;
439  }
440
441  methodHandle mh(m);
442  MethodCounters* counters = MethodCounters::allocate(mh, THREAD);
443  if (HAS_PENDING_EXCEPTION) {
444    CompileBroker::log_metaspace_failure();
445    ClassLoaderDataGraph::set_metaspace_oom(true);
446    return NULL;   // return the exception (which is cleared)
447  }
448  if (!mh->init_method_counters(counters)) {
449    MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
450  }
451
452  if (LogTouchedMethods) {
453    mh->log_touched(CHECK_NULL);
454  }
455
456  return mh->method_counters();
457}
458
459void Method::cleanup_inline_caches() {
460  // The current system doesn't use inline caches in the interpreter
461  // => nothing to do (keep this method around for future use)
462}
463
464
465int Method::extra_stack_words() {
466  // not an inline function, to avoid a header dependency on Interpreter
467  return extra_stack_entries() * Interpreter::stackElementSize;
468}
469
470
471void Method::compute_size_of_parameters(Thread *thread) {
472  ArgumentSizeComputer asc(signature());
473  set_size_of_parameters(asc.size() + (is_static() ? 0 : 1));
474}
475
476BasicType Method::result_type() const {
477  ResultTypeFinder rtf(signature());
478  return rtf.type();
479}
480
481
482bool Method::is_empty_method() const {
483  return  code_size() == 1
484      && *code_base() == Bytecodes::_return;
485}
486
487
488bool Method::is_vanilla_constructor() const {
489  // Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
490  // which only calls the superclass vanilla constructor and possibly does stores of
491  // zero constants to local fields:
492  //
493  //   aload_0
494  //   invokespecial
495  //   indexbyte1
496  //   indexbyte2
497  //
498  // followed by an (optional) sequence of:
499  //
500  //   aload_0
501  //   aconst_null / iconst_0 / fconst_0 / dconst_0
502  //   putfield
503  //   indexbyte1
504  //   indexbyte2
505  //
506  // followed by:
507  //
508  //   return
509
510  assert(name() == vmSymbols::object_initializer_name(),    "Should only be called for default constructors");
511  assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
512  int size = code_size();
513  // Check if size match
514  if (size == 0 || size % 5 != 0) return false;
515  address cb = code_base();
516  int last = size - 1;
517  if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
518    // Does not call superclass default constructor
519    return false;
520  }
521  // Check optional sequence
522  for (int i = 4; i < last; i += 5) {
523    if (cb[i] != Bytecodes::_aload_0) return false;
524    if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
525    if (cb[i+2] != Bytecodes::_putfield) return false;
526  }
527  return true;
528}
529
530
531bool Method::compute_has_loops_flag() {
532  BytecodeStream bcs(this);
533  Bytecodes::Code bc;
534
535  while ((bc = bcs.next()) >= 0) {
536    switch( bc ) {
537      case Bytecodes::_ifeq:
538      case Bytecodes::_ifnull:
539      case Bytecodes::_iflt:
540      case Bytecodes::_ifle:
541      case Bytecodes::_ifne:
542      case Bytecodes::_ifnonnull:
543      case Bytecodes::_ifgt:
544      case Bytecodes::_ifge:
545      case Bytecodes::_if_icmpeq:
546      case Bytecodes::_if_icmpne:
547      case Bytecodes::_if_icmplt:
548      case Bytecodes::_if_icmpgt:
549      case Bytecodes::_if_icmple:
550      case Bytecodes::_if_icmpge:
551      case Bytecodes::_if_acmpeq:
552      case Bytecodes::_if_acmpne:
553      case Bytecodes::_goto:
554      case Bytecodes::_jsr:
555        if( bcs.dest() < bcs.next_bci() ) _access_flags.set_has_loops();
556        break;
557
558      case Bytecodes::_goto_w:
559      case Bytecodes::_jsr_w:
560        if( bcs.dest_w() < bcs.next_bci() ) _access_flags.set_has_loops();
561        break;
562
563      default:
564        break;
565    }
566  }
567  _access_flags.set_loops_flag_init();
568  return _access_flags.has_loops();
569}
570
571bool Method::is_final_method(AccessFlags class_access_flags) const {
572  // or "does_not_require_vtable_entry"
573  // default method or overpass can occur, is not final (reuses vtable entry)
574  // private methods in classes get vtable entries for backward class compatibility.
575  if (is_overpass() || is_default_method())  return false;
576  return is_final() || class_access_flags.is_final();
577}
578
579bool Method::is_final_method() const {
580  return is_final_method(method_holder()->access_flags());
581}
582
583bool Method::is_default_method() const {
584  if (method_holder() != NULL &&
585      method_holder()->is_interface() &&
586      !is_abstract() && !is_private()) {
587    return true;
588  } else {
589    return false;
590  }
591}
592
593bool Method::can_be_statically_bound(AccessFlags class_access_flags) const {
594  if (is_final_method(class_access_flags))  return true;
595#ifdef ASSERT
596  ResourceMark rm;
597  bool is_nonv = (vtable_index() == nonvirtual_vtable_index);
598  if (class_access_flags.is_interface()) {
599      assert(is_nonv == is_static() || is_nonv == is_private(),
600             "nonvirtual unexpected for non-static, non-private: %s",
601             name_and_sig_as_C_string());
602  }
603#endif
604  assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question");
605  return vtable_index() == nonvirtual_vtable_index;
606}
607
608bool Method::can_be_statically_bound() const {
609  return can_be_statically_bound(method_holder()->access_flags());
610}
611
612bool Method::is_accessor() const {
613  return is_getter() || is_setter();
614}
615
616bool Method::is_getter() const {
617  if (code_size() != 5) return false;
618  if (size_of_parameters() != 1) return false;
619  if (java_code_at(0) != Bytecodes::_aload_0)  return false;
620  if (java_code_at(1) != Bytecodes::_getfield) return false;
621  switch (java_code_at(4)) {
622    case Bytecodes::_ireturn:
623    case Bytecodes::_lreturn:
624    case Bytecodes::_freturn:
625    case Bytecodes::_dreturn:
626    case Bytecodes::_areturn:
627      break;
628    default:
629      return false;
630  }
631  return true;
632}
633
634bool Method::is_setter() const {
635  if (code_size() != 6) return false;
636  if (java_code_at(0) != Bytecodes::_aload_0) return false;
637  switch (java_code_at(1)) {
638    case Bytecodes::_iload_1:
639    case Bytecodes::_aload_1:
640    case Bytecodes::_fload_1:
641      if (size_of_parameters() != 2) return false;
642      break;
643    case Bytecodes::_dload_1:
644    case Bytecodes::_lload_1:
645      if (size_of_parameters() != 3) return false;
646      break;
647    default:
648      return false;
649  }
650  if (java_code_at(2) != Bytecodes::_putfield) return false;
651  if (java_code_at(5) != Bytecodes::_return)   return false;
652  return true;
653}
654
655bool Method::is_constant_getter() const {
656  int last_index = code_size() - 1;
657  // Check if the first 1-3 bytecodes are a constant push
658  // and the last bytecode is a return.
659  return (2 <= code_size() && code_size() <= 4 &&
660          Bytecodes::is_const(java_code_at(0)) &&
661          Bytecodes::length_for(java_code_at(0)) == last_index &&
662          Bytecodes::is_return(java_code_at(last_index)));
663}
664
665bool Method::is_initializer() const {
666  return is_object_initializer() || is_static_initializer();
667}
668
669bool Method::has_valid_initializer_flags() const {
670  return (is_static() ||
671          method_holder()->major_version() < 51);
672}
673
674bool Method::is_static_initializer() const {
675  // For classfiles version 51 or greater, ensure that the clinit method is
676  // static.  Non-static methods with the name "<clinit>" are not static
677  // initializers. (older classfiles exempted for backward compatibility)
678  return name() == vmSymbols::class_initializer_name() &&
679         has_valid_initializer_flags();
680}
681
682bool Method::is_object_initializer() const {
683   return name() == vmSymbols::object_initializer_name();
684}
685
686objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
687  int length = method->checked_exceptions_length();
688  if (length == 0) {  // common case
689    return objArrayHandle(THREAD, Universe::the_empty_class_klass_array());
690  } else {
691    methodHandle h_this(THREAD, method);
692    objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));
693    objArrayHandle mirrors (THREAD, m_oop);
694    for (int i = 0; i < length; i++) {
695      CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
696      Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
697      assert(k->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");
698      mirrors->obj_at_put(i, k->java_mirror());
699    }
700    return mirrors;
701  }
702};
703
704
705int Method::line_number_from_bci(int bci) const {
706  if (bci == SynchronizationEntryBCI) bci = 0;
707  assert(bci == 0 || 0 <= bci && bci < code_size(), "illegal bci");
708  int best_bci  =  0;
709  int best_line = -1;
710
711  if (has_linenumber_table()) {
712    // The line numbers are a short array of 2-tuples [start_pc, line_number].
713    // Not necessarily sorted and not necessarily one-to-one.
714    CompressedLineNumberReadStream stream(compressed_linenumber_table());
715    while (stream.read_pair()) {
716      if (stream.bci() == bci) {
717        // perfect match
718        return stream.line();
719      } else {
720        // update best_bci/line
721        if (stream.bci() < bci && stream.bci() >= best_bci) {
722          best_bci  = stream.bci();
723          best_line = stream.line();
724        }
725      }
726    }
727  }
728  return best_line;
729}
730
731
732bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
733  if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
734    Thread *thread = Thread::current();
735    Symbol* klass_name = constants()->klass_name_at(klass_index);
736    Handle loader(thread, method_holder()->class_loader());
737    Handle prot  (thread, method_holder()->protection_domain());
738    return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;
739  } else {
740    return true;
741  }
742}
743
744
745bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
746  int klass_index = constants()->klass_ref_index_at(refinfo_index);
747  if (must_be_resolved) {
748    // Make sure klass is resolved in constantpool.
749    if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
750  }
751  return is_klass_loaded_by_klass_index(klass_index);
752}
753
754
755void Method::set_native_function(address function, bool post_event_flag) {
756  assert(function != NULL, "use clear_native_function to unregister natives");
757  assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
758  address* native_function = native_function_addr();
759
760  // We can see racers trying to place the same native function into place. Once
761  // is plenty.
762  address current = *native_function;
763  if (current == function) return;
764  if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
765      function != NULL) {
766    // native_method_throw_unsatisfied_link_error_entry() should only
767    // be passed when post_event_flag is false.
768    assert(function !=
769      SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
770      "post_event_flag mis-match");
771
772    // post the bind event, and possible change the bind function
773    JvmtiExport::post_native_method_bind(this, &function);
774  }
775  *native_function = function;
776  // This function can be called more than once. We must make sure that we always
777  // use the latest registered method -> check if a stub already has been generated.
778  // If so, we have to make it not_entrant.
779  CompiledMethod* nm = code(); // Put it into local variable to guard against concurrent updates
780  if (nm != NULL) {
781    nm->make_not_entrant();
782  }
783}
784
785
786bool Method::has_native_function() const {
787  if (is_method_handle_intrinsic())
788    return false;  // special-cased in SharedRuntime::generate_native_wrapper
789  address func = native_function();
790  return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
791}
792
793
794void Method::clear_native_function() {
795  // Note: is_method_handle_intrinsic() is allowed here.
796  set_native_function(
797    SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
798    !native_bind_event_is_interesting);
799  clear_code();
800}
801
802address Method::critical_native_function() {
803  methodHandle mh(this);
804  return NativeLookup::lookup_critical_entry(mh);
805}
806
807
808void Method::set_signature_handler(address handler) {
809  address* signature_handler =  signature_handler_addr();
810  *signature_handler = handler;
811}
812
813
814void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
815  if (PrintCompilation && report) {
816    ttyLocker ttyl;
817    tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
818    if (comp_level == CompLevel_all) {
819      tty->print("all levels ");
820    } else {
821      tty->print("levels ");
822      for (int i = (int)CompLevel_none; i <= comp_level; i++) {
823        tty->print("%d ", i);
824      }
825    }
826    this->print_short_name(tty);
827    int size = this->code_size();
828    if (size > 0) {
829      tty->print(" (%d bytes)", size);
830    }
831    if (reason != NULL) {
832      tty->print("   %s", reason);
833    }
834    tty->cr();
835  }
836  if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) {
837    ttyLocker ttyl;
838    xtty->begin_elem("make_not_compilable thread='" UINTX_FORMAT "' osr='%d' level='%d'",
839                     os::current_thread_id(), is_osr, comp_level);
840    if (reason != NULL) {
841      xtty->print(" reason=\'%s\'", reason);
842    }
843    xtty->method(this);
844    xtty->stamp();
845    xtty->end_elem();
846  }
847}
848
849bool Method::is_always_compilable() const {
850  // Generated adapters must be compiled
851  if (is_method_handle_intrinsic() && is_synthetic()) {
852    assert(!is_not_c1_compilable(), "sanity check");
853    assert(!is_not_c2_compilable(), "sanity check");
854    return true;
855  }
856
857  return false;
858}
859
860bool Method::is_not_compilable(int comp_level) const {
861  if (number_of_breakpoints() > 0)
862    return true;
863  if (is_always_compilable())
864    return false;
865  if (comp_level == CompLevel_any)
866    return is_not_c1_compilable() || is_not_c2_compilable();
867  if (is_c1_compile(comp_level))
868    return is_not_c1_compilable();
869  if (is_c2_compile(comp_level))
870    return is_not_c2_compilable();
871  return false;
872}
873
874// call this when compiler finds that this method is not compilable
875void Method::set_not_compilable(int comp_level, bool report, const char* reason) {
876  if (is_always_compilable()) {
877    // Don't mark a method which should be always compilable
878    return;
879  }
880  print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
881  if (comp_level == CompLevel_all) {
882    set_not_c1_compilable();
883    set_not_c2_compilable();
884  } else {
885    if (is_c1_compile(comp_level))
886      set_not_c1_compilable();
887    if (is_c2_compile(comp_level))
888      set_not_c2_compilable();
889  }
890  CompilationPolicy::policy()->disable_compilation(this);
891  assert(!CompilationPolicy::can_be_compiled(this, comp_level), "sanity check");
892}
893
894bool Method::is_not_osr_compilable(int comp_level) const {
895  if (is_not_compilable(comp_level))
896    return true;
897  if (comp_level == CompLevel_any)
898    return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
899  if (is_c1_compile(comp_level))
900    return is_not_c1_osr_compilable();
901  if (is_c2_compile(comp_level))
902    return is_not_c2_osr_compilable();
903  return false;
904}
905
906void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {
907  print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
908  if (comp_level == CompLevel_all) {
909    set_not_c1_osr_compilable();
910    set_not_c2_osr_compilable();
911  } else {
912    if (is_c1_compile(comp_level))
913      set_not_c1_osr_compilable();
914    if (is_c2_compile(comp_level))
915      set_not_c2_osr_compilable();
916  }
917  CompilationPolicy::policy()->disable_compilation(this);
918  assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
919}
920
921// Revert to using the interpreter and clear out the nmethod
922void Method::clear_code(bool acquire_lock /* = true */) {
923  MutexLockerEx pl(acquire_lock ? Patching_lock : NULL, Mutex::_no_safepoint_check_flag);
924  // this may be NULL if c2i adapters have not been made yet
925  // Only should happen at allocate time.
926  if (adapter() == NULL) {
927    _from_compiled_entry    = NULL;
928  } else {
929    _from_compiled_entry    = adapter()->get_c2i_entry();
930  }
931  OrderAccess::storestore();
932  _from_interpreted_entry = _i2i_entry;
933  OrderAccess::storestore();
934  _code = NULL;
935}
936
937#if INCLUDE_CDS
938// Called by class data sharing to remove any entry points (which are not shared)
939void Method::unlink_method() {
940  _code = NULL;
941
942  assert(DumpSharedSpaces, "dump time only");
943  // Set the values to what they should be at run time. Note that
944  // this Method can no longer be executed during dump time.
945  _i2i_entry = Interpreter::entry_for_cds_method(this);
946  _from_interpreted_entry = _i2i_entry;
947
948  if (is_native()) {
949    *native_function_addr() = NULL;
950    set_signature_handler(NULL);
951  }
952  NOT_PRODUCT(set_compiled_invocation_count(0);)
953
954  CDSAdapterHandlerEntry* cds_adapter = (CDSAdapterHandlerEntry*)adapter();
955  constMethod()->set_adapter_trampoline(cds_adapter->get_adapter_trampoline());
956  _from_compiled_entry = cds_adapter->get_c2i_entry_trampoline();
957  assert(*((int*)_from_compiled_entry) == 0, "must be NULL during dump time, to be initialized at run time");
958
959
960  // In case of DumpSharedSpaces, _method_data should always be NULL.
961  assert(_method_data == NULL, "unexpected method data?");
962
963  set_method_data(NULL);
964  clear_method_counters();
965}
966#endif
967
968/****************************************************************************
969// The following illustrates how the entries work for CDS shared Methods:
970//
971// Our goal is to delay writing into a shared Method until it's compiled.
972// Hence, we want to determine the initial values for _i2i_entry,
973// _from_interpreted_entry and _from_compiled_entry during CDS dump time.
974//
975// In this example, both Methods A and B have the _i2i_entry of "zero_locals".
976// They also have similar signatures so that they will share the same
977// AdapterHandlerEntry.
978//
979// _adapter_trampoline points to a fixed location in the RW section of
980// the CDS archive. This location initially contains a NULL pointer. When the
981// first of method A or B is linked, an AdapterHandlerEntry is allocated
982// dynamically, and its c2i/i2c entries are generated.
983//
984// _i2i_entry and _from_interpreted_entry initially points to the same
985// (fixed) location in the CODE section of the CDS archive. This contains
986// an unconditional branch to the actual entry for "zero_locals", which is
987// generated at run time and may be on an arbitrary address. Thus, the
988// unconditional branch is also generated at run time to jump to the correct
989// address.
990//
991// Similarly, _from_compiled_entry points to a fixed address in the CODE
992// section. This address has enough space for an unconditional branch
993// instruction, and is initially zero-filled. After the AdapterHandlerEntry is
994// initialized, and the address for the actual c2i_entry is known, we emit a
995// branch instruction here to branch to the actual c2i_entry.
996//
997// The effect of the extra branch on the i2i and c2i entries is negligible.
998//
999// The reason for putting _adapter_trampoline in RO is many shared Methods
1000// share the same AdapterHandlerEntry, so we can save space in the RW section
1001// by having the extra indirection.
1002
1003
1004[Method A: RW]
1005  _constMethod ----> [ConstMethod: RO]
1006                       _adapter_trampoline -----------+
1007                                                      |
1008  _i2i_entry              (same value as method B)    |
1009  _from_interpreted_entry (same value as method B)    |
1010  _from_compiled_entry    (same value as method B)    |
1011                                                      |
1012                                                      |
1013[Method B: RW]                               +--------+
1014  _constMethod ----> [ConstMethod: RO]       |
1015                       _adapter_trampoline --+--->(AdapterHandlerEntry* ptr: RW)-+
1016                                                                                 |
1017                                                 +-------------------------------+
1018                                                 |
1019                                                 +----> [AdapterHandlerEntry] (allocated at run time)
1020                                                              _fingerprint
1021                                                              _c2i_entry ---------------------------------+->[c2i entry..]
1022 _i2i_entry  -------------+                                   _i2c_entry ---------------+-> [i2c entry..] |
1023 _from_interpreted_entry  |                                   _c2i_unverified_entry     |                 |
1024         |                |                                                             |                 |
1025         |                |  (_cds_entry_table: CODE)                                   |                 |
1026         |                +->[0]: jmp _entry_table[0] --> (i2i_entry_for "zero_locals") |                 |
1027         |                |                               (allocated at run time)       |                 |
1028         |                |  ...                           [asm code ...]               |                 |
1029         +-[not compiled]-+  [n]: jmp _entry_table[n]                                   |                 |
1030         |                                                                              |                 |
1031         |                                                                              |                 |
1032         +-[compiled]-------------------------------------------------------------------+                 |
1033                                                                                                          |
1034 _from_compiled_entry------------>  (_c2i_entry_trampoline: CODE)                                         |
1035                                    [jmp c2i_entry] ------------------------------------------------------+
1036
1037***/
1038
1039// Called when the method_holder is getting linked. Setup entrypoints so the method
1040// is ready to be called from interpreter, compiler, and vtables.
1041void Method::link_method(const methodHandle& h_method, TRAPS) {
1042  // If the code cache is full, we may reenter this function for the
1043  // leftover methods that weren't linked.
1044  if (is_shared()) {
1045    address entry = Interpreter::entry_for_cds_method(h_method);
1046    assert(entry != NULL && entry == _i2i_entry,
1047           "should be correctly set during dump time");
1048    if (adapter() != NULL) {
1049      return;
1050    }
1051    assert(entry == _from_interpreted_entry,
1052           "should be correctly set during dump time");
1053  } else if (_i2i_entry != NULL) {
1054    return;
1055  }
1056  assert( _code == NULL, "nothing compiled yet" );
1057
1058  // Setup interpreter entrypoint
1059  assert(this == h_method(), "wrong h_method()" );
1060
1061  if (!is_shared()) {
1062    assert(adapter() == NULL, "init'd to NULL");
1063    address entry = Interpreter::entry_for_method(h_method);
1064    assert(entry != NULL, "interpreter entry must be non-null");
1065    // Sets both _i2i_entry and _from_interpreted_entry
1066    set_interpreter_entry(entry);
1067  }
1068
1069  // Don't overwrite already registered native entries.
1070  if (is_native() && !has_native_function()) {
1071    set_native_function(
1072      SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1073      !native_bind_event_is_interesting);
1074  }
1075
1076  // Setup compiler entrypoint.  This is made eagerly, so we do not need
1077  // special handling of vtables.  An alternative is to make adapters more
1078  // lazily by calling make_adapter() from from_compiled_entry() for the
1079  // normal calls.  For vtable calls life gets more complicated.  When a
1080  // call-site goes mega-morphic we need adapters in all methods which can be
1081  // called from the vtable.  We need adapters on such methods that get loaded
1082  // later.  Ditto for mega-morphic itable calls.  If this proves to be a
1083  // problem we'll make these lazily later.
1084  (void) make_adapters(h_method, CHECK);
1085
1086  // ONLY USE the h_method now as make_adapter may have blocked
1087
1088}
1089
1090address Method::make_adapters(const methodHandle& mh, TRAPS) {
1091  // Adapters for compiled code are made eagerly here.  They are fairly
1092  // small (generally < 100 bytes) and quick to make (and cached and shared)
1093  // so making them eagerly shouldn't be too expensive.
1094  AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1095  if (adapter == NULL ) {
1096    if (!is_init_completed()) {
1097      // Don't throw exceptions during VM initialization because java.lang.* classes
1098      // might not have been initialized, causing problems when constructing the
1099      // Java exception object.
1100      vm_exit_during_initialization("Out of space in CodeCache for adapters");
1101    } else {
1102      THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "Out of space in CodeCache for adapters");
1103    }
1104  }
1105
1106  if (mh->is_shared()) {
1107    assert(mh->adapter() == adapter, "must be");
1108    assert(mh->_from_compiled_entry != NULL, "must be");
1109  } else {
1110    mh->set_adapter_entry(adapter);
1111    mh->_from_compiled_entry = adapter->get_c2i_entry();
1112  }
1113  return adapter->get_c2i_entry();
1114}
1115
1116void Method::restore_unshareable_info(TRAPS) {
1117  assert(is_method() && is_valid_method(), "ensure C++ vtable is restored");
1118
1119  // Since restore_unshareable_info can be called more than once for a method, don't
1120  // redo any work.
1121  if (adapter() == NULL) {
1122    methodHandle mh(THREAD, this);
1123    link_method(mh, CHECK);
1124  }
1125}
1126
1127volatile address Method::from_compiled_entry_no_trampoline() const {
1128  nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);
1129  if (code) {
1130    return code->verified_entry_point();
1131  } else {
1132    return adapter()->get_c2i_entry();
1133  }
1134}
1135
1136// The verified_code_entry() must be called when a invoke is resolved
1137// on this method.
1138
1139// It returns the compiled code entry point, after asserting not null.
1140// This function is called after potential safepoints so that nmethod
1141// or adapter that it points to is still live and valid.
1142// This function must not hit a safepoint!
1143address Method::verified_code_entry() {
1144  debug_only(NoSafepointVerifier nsv;)
1145  assert(_from_compiled_entry != NULL, "must be set");
1146  return _from_compiled_entry;
1147}
1148
1149// Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1150// (could be racing a deopt).
1151// Not inline to avoid circular ref.
1152bool Method::check_code() const {
1153  // cached in a register or local.  There's a race on the value of the field.
1154  CompiledMethod *code = (CompiledMethod *)OrderAccess::load_ptr_acquire(&_code);
1155  return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());
1156}
1157
1158// Install compiled code.  Instantly it can execute.
1159void Method::set_code(const methodHandle& mh, CompiledMethod *code) {
1160  MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
1161  assert( code, "use clear_code to remove code" );
1162  assert( mh->check_code(), "" );
1163
1164  guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
1165
1166  // These writes must happen in this order, because the interpreter will
1167  // directly jump to from_interpreted_entry which jumps to an i2c adapter
1168  // which jumps to _from_compiled_entry.
1169  mh->_code = code;             // Assign before allowing compiled code to exec
1170
1171  int comp_level = code->comp_level();
1172  // In theory there could be a race here. In practice it is unlikely
1173  // and not worth worrying about.
1174  if (comp_level > mh->highest_comp_level()) {
1175    mh->set_highest_comp_level(comp_level);
1176  }
1177
1178  OrderAccess::storestore();
1179#ifdef SHARK
1180  mh->_from_interpreted_entry = code->insts_begin();
1181#else //!SHARK
1182  mh->_from_compiled_entry = code->verified_entry_point();
1183  OrderAccess::storestore();
1184  // Instantly compiled code can execute.
1185  if (!mh->is_method_handle_intrinsic())
1186    mh->_from_interpreted_entry = mh->get_i2c_entry();
1187#endif //!SHARK
1188}
1189
1190
1191bool Method::is_overridden_in(Klass* k) const {
1192  InstanceKlass* ik = InstanceKlass::cast(k);
1193
1194  if (ik->is_interface()) return false;
1195
1196  // If method is an interface, we skip it - except if it
1197  // is a miranda method
1198  if (method_holder()->is_interface()) {
1199    // Check that method is not a miranda method
1200    if (ik->lookup_method(name(), signature()) == NULL) {
1201      // No implementation exist - so miranda method
1202      return false;
1203    }
1204    return true;
1205  }
1206
1207  assert(ik->is_subclass_of(method_holder()), "should be subklass");
1208  if (!has_vtable_index()) {
1209    return false;
1210  } else {
1211    Method* vt_m = ik->method_at_vtable(vtable_index());
1212    return vt_m != this;
1213  }
1214}
1215
1216
1217// give advice about whether this Method* should be cached or not
1218bool Method::should_not_be_cached() const {
1219  if (is_old()) {
1220    // This method has been redefined. It is either EMCP or obsolete
1221    // and we don't want to cache it because that would pin the method
1222    // down and prevent it from being collectible if and when it
1223    // finishes executing.
1224    return true;
1225  }
1226
1227  // caching this method should be just fine
1228  return false;
1229}
1230
1231
1232/**
1233 *  Returns true if this is one of the specially treated methods for
1234 *  security related stack walks (like Reflection.getCallerClass).
1235 */
1236bool Method::is_ignored_by_security_stack_walk() const {
1237  if (intrinsic_id() == vmIntrinsics::_invoke) {
1238    // This is Method.invoke() -- ignore it
1239    return true;
1240  }
1241  if (method_holder()->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass())) {
1242    // This is an auxilary frame -- ignore it
1243    return true;
1244  }
1245  if (is_method_handle_intrinsic() || is_compiled_lambda_form()) {
1246    // This is an internal adapter frame for method handles -- ignore it
1247    return true;
1248  }
1249  return false;
1250}
1251
1252
1253// Constant pool structure for invoke methods:
1254enum {
1255  _imcp_invoke_name = 1,        // utf8: 'invokeExact', etc.
1256  _imcp_invoke_signature,       // utf8: (variable Symbol*)
1257  _imcp_limit
1258};
1259
1260// Test if this method is an MH adapter frame generated by Java code.
1261// Cf. java/lang/invoke/InvokerBytecodeGenerator
1262bool Method::is_compiled_lambda_form() const {
1263  return intrinsic_id() == vmIntrinsics::_compiledLambdaForm;
1264}
1265
1266// Test if this method is an internal MH primitive method.
1267bool Method::is_method_handle_intrinsic() const {
1268  vmIntrinsics::ID iid = intrinsic_id();
1269  return (MethodHandles::is_signature_polymorphic(iid) &&
1270          MethodHandles::is_signature_polymorphic_intrinsic(iid));
1271}
1272
1273bool Method::has_member_arg() const {
1274  vmIntrinsics::ID iid = intrinsic_id();
1275  return (MethodHandles::is_signature_polymorphic(iid) &&
1276          MethodHandles::has_member_arg(iid));
1277}
1278
1279// Make an instance of a signature-polymorphic internal MH primitive.
1280methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,
1281                                                         Symbol* signature,
1282                                                         TRAPS) {
1283  ResourceMark rm;
1284  methodHandle empty;
1285
1286  InstanceKlass* holder = SystemDictionary::MethodHandle_klass();
1287  Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid);
1288  assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1289  if (TraceMethodHandles) {
1290    tty->print_cr("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1291  }
1292
1293  // invariant:   cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1294  name->increment_refcount();
1295  signature->increment_refcount();
1296
1297  int cp_length = _imcp_limit;
1298  ClassLoaderData* loader_data = holder->class_loader_data();
1299  constantPoolHandle cp;
1300  {
1301    ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1302    cp = constantPoolHandle(THREAD, cp_oop);
1303  }
1304  cp->set_pool_holder(holder);
1305  cp->symbol_at_put(_imcp_invoke_name,       name);
1306  cp->symbol_at_put(_imcp_invoke_signature,  signature);
1307  cp->set_has_preresolution();
1308
1309  // decide on access bits:  public or not?
1310  int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1311  bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1312  if (must_be_static)  flags_bits |= JVM_ACC_STATIC;
1313  assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1314
1315  methodHandle m;
1316  {
1317    InlineTableSizes sizes;
1318    Method* m_oop = Method::allocate(loader_data, 0,
1319                                     accessFlags_from(flags_bits), &sizes,
1320                                     ConstMethod::NORMAL, CHECK_(empty));
1321    m = methodHandle(THREAD, m_oop);
1322  }
1323  m->set_constants(cp());
1324  m->set_name_index(_imcp_invoke_name);
1325  m->set_signature_index(_imcp_invoke_signature);
1326  assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");
1327  assert(m->signature() == signature, "");
1328  ResultTypeFinder rtf(signature);
1329  m->constMethod()->set_result_type(rtf.type());
1330  m->compute_size_of_parameters(THREAD);
1331  m->init_intrinsic_id();
1332  assert(m->is_method_handle_intrinsic(), "");
1333#ifdef ASSERT
1334  if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id()))  m->print();
1335  assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker");
1336  assert(m->intrinsic_id() == iid, "correctly predicted iid");
1337#endif //ASSERT
1338
1339  // Finally, set up its entry points.
1340  assert(m->can_be_statically_bound(), "");
1341  m->set_vtable_index(Method::nonvirtual_vtable_index);
1342  m->link_method(m, CHECK_(empty));
1343
1344  if (TraceMethodHandles && (Verbose || WizardMode)) {
1345    ttyLocker ttyl;
1346    m->print_on(tty);
1347  }
1348
1349  return m;
1350}
1351
1352Klass* Method::check_non_bcp_klass(Klass* klass) {
1353  if (klass != NULL && klass->class_loader() != NULL) {
1354    if (klass->is_objArray_klass())
1355      klass = ObjArrayKlass::cast(klass)->bottom_klass();
1356    return klass;
1357  }
1358  return NULL;
1359}
1360
1361
1362methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1363                                                u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1364  // Code below does not work for native methods - they should never get rewritten anyway
1365  assert(!m->is_native(), "cannot rewrite native methods");
1366  // Allocate new Method*
1367  AccessFlags flags = m->access_flags();
1368
1369  ConstMethod* cm = m->constMethod();
1370  int checked_exceptions_len = cm->checked_exceptions_length();
1371  int localvariable_len = cm->localvariable_table_length();
1372  int exception_table_len = cm->exception_table_length();
1373  int method_parameters_len = cm->method_parameters_length();
1374  int method_annotations_len = cm->method_annotations_length();
1375  int parameter_annotations_len = cm->parameter_annotations_length();
1376  int type_annotations_len = cm->type_annotations_length();
1377  int default_annotations_len = cm->default_annotations_length();
1378
1379  InlineTableSizes sizes(
1380      localvariable_len,
1381      new_compressed_linenumber_size,
1382      exception_table_len,
1383      checked_exceptions_len,
1384      method_parameters_len,
1385      cm->generic_signature_index(),
1386      method_annotations_len,
1387      parameter_annotations_len,
1388      type_annotations_len,
1389      default_annotations_len,
1390      0);
1391
1392  ClassLoaderData* loader_data = m->method_holder()->class_loader_data();
1393  Method* newm_oop = Method::allocate(loader_data,
1394                                      new_code_length,
1395                                      flags,
1396                                      &sizes,
1397                                      m->method_type(),
1398                                      CHECK_(methodHandle()));
1399  methodHandle newm (THREAD, newm_oop);
1400
1401  // Create a shallow copy of Method part, but be careful to preserve the new ConstMethod*
1402  ConstMethod* newcm = newm->constMethod();
1403  int new_const_method_size = newm->constMethod()->size();
1404
1405  // This works because the source and target are both Methods. Some compilers
1406  // (e.g., clang) complain that the target vtable pointer will be stomped,
1407  // so cast away newm()'s and m()'s Methodness.
1408  memcpy((void*)newm(), (void*)m(), sizeof(Method));
1409
1410  // Create shallow copy of ConstMethod.
1411  memcpy(newcm, m->constMethod(), sizeof(ConstMethod));
1412
1413  // Reset correct method/const method, method size, and parameter info
1414  newm->set_constMethod(newcm);
1415  newm->constMethod()->set_code_size(new_code_length);
1416  newm->constMethod()->set_constMethod_size(new_const_method_size);
1417  assert(newm->code_size() == new_code_length, "check");
1418  assert(newm->method_parameters_length() == method_parameters_len, "check");
1419  assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");
1420  assert(newm->exception_table_length() == exception_table_len, "check");
1421  assert(newm->localvariable_table_length() == localvariable_len, "check");
1422  // Copy new byte codes
1423  memcpy(newm->code_base(), new_code, new_code_length);
1424  // Copy line number table
1425  if (new_compressed_linenumber_size > 0) {
1426    memcpy(newm->compressed_linenumber_table(),
1427           new_compressed_linenumber_table,
1428           new_compressed_linenumber_size);
1429  }
1430  // Copy method_parameters
1431  if (method_parameters_len > 0) {
1432    memcpy(newm->method_parameters_start(),
1433           m->method_parameters_start(),
1434           method_parameters_len * sizeof(MethodParametersElement));
1435  }
1436  // Copy checked_exceptions
1437  if (checked_exceptions_len > 0) {
1438    memcpy(newm->checked_exceptions_start(),
1439           m->checked_exceptions_start(),
1440           checked_exceptions_len * sizeof(CheckedExceptionElement));
1441  }
1442  // Copy exception table
1443  if (exception_table_len > 0) {
1444    memcpy(newm->exception_table_start(),
1445           m->exception_table_start(),
1446           exception_table_len * sizeof(ExceptionTableElement));
1447  }
1448  // Copy local variable number table
1449  if (localvariable_len > 0) {
1450    memcpy(newm->localvariable_table_start(),
1451           m->localvariable_table_start(),
1452           localvariable_len * sizeof(LocalVariableTableElement));
1453  }
1454  // Copy stackmap table
1455  if (m->has_stackmap_table()) {
1456    int code_attribute_length = m->stackmap_data()->length();
1457    Array<u1>* stackmap_data =
1458      MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_NULL);
1459    memcpy((void*)stackmap_data->adr_at(0),
1460           (void*)m->stackmap_data()->adr_at(0), code_attribute_length);
1461    newm->set_stackmap_data(stackmap_data);
1462  }
1463
1464  // copy annotations over to new method
1465  newcm->copy_annotations_from(loader_data, cm, CHECK_NULL);
1466  return newm;
1467}
1468
1469vmSymbols::SID Method::klass_id_for_intrinsics(const Klass* holder) {
1470  // if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
1471  // because we are not loading from core libraries
1472  // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
1473  // which does not use the class default class loader so we check for its loader here
1474  const InstanceKlass* ik = InstanceKlass::cast(holder);
1475  if ((ik->class_loader() != NULL) && !SystemDictionary::is_platform_class_loader(ik->class_loader())) {
1476    return vmSymbols::NO_SID;   // regardless of name, no intrinsics here
1477  }
1478
1479  // see if the klass name is well-known:
1480  Symbol* klass_name = ik->name();
1481  return vmSymbols::find_sid(klass_name);
1482}
1483
1484void Method::init_intrinsic_id() {
1485  assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
1486  const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));
1487  assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");
1488  assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");
1489
1490  // the klass name is well-known:
1491  vmSymbols::SID klass_id = klass_id_for_intrinsics(method_holder());
1492  assert(klass_id != vmSymbols::NO_SID, "caller responsibility");
1493
1494  // ditto for method and signature:
1495  vmSymbols::SID  name_id = vmSymbols::find_sid(name());
1496  if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1497      && klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle)
1498      && name_id == vmSymbols::NO_SID) {
1499    return;
1500  }
1501  vmSymbols::SID   sig_id = vmSymbols::find_sid(signature());
1502  if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1503      && klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle)
1504      && sig_id == vmSymbols::NO_SID) {
1505    return;
1506  }
1507  jshort flags = access_flags().as_short();
1508
1509  vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1510  if (id != vmIntrinsics::_none) {
1511    set_intrinsic_id(id);
1512    if (id == vmIntrinsics::_Class_cast) {
1513      // Even if the intrinsic is rejected, we want to inline this simple method.
1514      set_force_inline(true);
1515    }
1516    return;
1517  }
1518
1519  // A few slightly irregular cases:
1520  switch (klass_id) {
1521  case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_StrictMath):
1522    // Second chance: check in regular Math.
1523    switch (name_id) {
1524    case vmSymbols::VM_SYMBOL_ENUM_NAME(min_name):
1525    case vmSymbols::VM_SYMBOL_ENUM_NAME(max_name):
1526    case vmSymbols::VM_SYMBOL_ENUM_NAME(sqrt_name):
1527      // pretend it is the corresponding method in the non-strict class:
1528      klass_id = vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_Math);
1529      id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1530      break;
1531    default:
1532      break;
1533    }
1534    break;
1535
1536  // Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*., VarHandle
1537  case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):
1538  case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle):
1539    if (!is_native())  break;
1540    id = MethodHandles::signature_polymorphic_name_id(method_holder(), name());
1541    if (is_static() != MethodHandles::is_signature_polymorphic_static(id))
1542      id = vmIntrinsics::_none;
1543    break;
1544
1545  default:
1546    break;
1547  }
1548
1549  if (id != vmIntrinsics::_none) {
1550    // Set up its iid.  It is an alias method.
1551    set_intrinsic_id(id);
1552    return;
1553  }
1554}
1555
1556// These two methods are static since a GC may move the Method
1557bool Method::load_signature_classes(const methodHandle& m, TRAPS) {
1558  if (!THREAD->can_call_java()) {
1559    // There is nothing useful this routine can do from within the Compile thread.
1560    // Hopefully, the signature contains only well-known classes.
1561    // We could scan for this and return true/false, but the caller won't care.
1562    return false;
1563  }
1564  bool sig_is_loaded = true;
1565  Handle class_loader(THREAD, m->method_holder()->class_loader());
1566  Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1567  ResourceMark rm(THREAD);
1568  Symbol*  signature = m->signature();
1569  for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1570    if (ss.is_object()) {
1571      Symbol* sym = ss.as_symbol(CHECK_(false));
1572      Symbol*  name  = sym;
1573      Klass* klass = SystemDictionary::resolve_or_null(name, class_loader,
1574                                             protection_domain, THREAD);
1575      // We are loading classes eagerly. If a ClassNotFoundException or
1576      // a LinkageError was generated, be sure to ignore it.
1577      if (HAS_PENDING_EXCEPTION) {
1578        if (PENDING_EXCEPTION->is_a(SystemDictionary::ClassNotFoundException_klass()) ||
1579            PENDING_EXCEPTION->is_a(SystemDictionary::LinkageError_klass())) {
1580          CLEAR_PENDING_EXCEPTION;
1581        } else {
1582          return false;
1583        }
1584      }
1585      if( klass == NULL) { sig_is_loaded = false; }
1586    }
1587  }
1588  return sig_is_loaded;
1589}
1590
1591bool Method::has_unloaded_classes_in_signature(const methodHandle& m, TRAPS) {
1592  Handle class_loader(THREAD, m->method_holder()->class_loader());
1593  Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1594  ResourceMark rm(THREAD);
1595  Symbol*  signature = m->signature();
1596  for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1597    if (ss.type() == T_OBJECT) {
1598      Symbol* name = ss.as_symbol_or_null();
1599      if (name == NULL) return true;
1600      Klass* klass = SystemDictionary::find(name, class_loader, protection_domain, THREAD);
1601      if (klass == NULL) return true;
1602    }
1603  }
1604  return false;
1605}
1606
1607// Exposed so field engineers can debug VM
1608void Method::print_short_name(outputStream* st) {
1609  ResourceMark rm;
1610#ifdef PRODUCT
1611  st->print(" %s::", method_holder()->external_name());
1612#else
1613  st->print(" %s::", method_holder()->internal_name());
1614#endif
1615  name()->print_symbol_on(st);
1616  if (WizardMode) signature()->print_symbol_on(st);
1617  else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
1618    MethodHandles::print_as_basic_type_signature_on(st, signature(), true);
1619}
1620
1621// Comparer for sorting an object array containing
1622// Method*s.
1623static int method_comparator(Method* a, Method* b) {
1624  return a->name()->fast_compare(b->name());
1625}
1626
1627// This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
1628// default_methods also uses this without the ordering for fast find_method
1629void Method::sort_methods(Array<Method*>* methods, bool idempotent, bool set_idnums) {
1630  int length = methods->length();
1631  if (length > 1) {
1632    {
1633      NoSafepointVerifier nsv;
1634      QuickSort::sort<Method*>(methods->data(), length, method_comparator, idempotent);
1635    }
1636    // Reset method ordering
1637    if (set_idnums) {
1638      for (int i = 0; i < length; i++) {
1639        Method* m = methods->at(i);
1640        m->set_method_idnum(i);
1641        m->set_orig_method_idnum(i);
1642      }
1643    }
1644  }
1645}
1646
1647//-----------------------------------------------------------------------------------
1648// Non-product code unless JVM/TI needs it
1649
1650#if !defined(PRODUCT) || INCLUDE_JVMTI
1651class SignatureTypePrinter : public SignatureTypeNames {
1652 private:
1653  outputStream* _st;
1654  bool _use_separator;
1655
1656  void type_name(const char* name) {
1657    if (_use_separator) _st->print(", ");
1658    _st->print("%s", name);
1659    _use_separator = true;
1660  }
1661
1662 public:
1663  SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
1664    _st = st;
1665    _use_separator = false;
1666  }
1667
1668  void print_parameters()              { _use_separator = false; iterate_parameters(); }
1669  void print_returntype()              { _use_separator = false; iterate_returntype(); }
1670};
1671
1672
1673void Method::print_name(outputStream* st) {
1674  Thread *thread = Thread::current();
1675  ResourceMark rm(thread);
1676  st->print("%s ", is_static() ? "static" : "virtual");
1677  if (WizardMode) {
1678    st->print("%s.", method_holder()->internal_name());
1679    name()->print_symbol_on(st);
1680    signature()->print_symbol_on(st);
1681  } else {
1682    SignatureTypePrinter sig(signature(), st);
1683    sig.print_returntype();
1684    st->print(" %s.", method_holder()->internal_name());
1685    name()->print_symbol_on(st);
1686    st->print("(");
1687    sig.print_parameters();
1688    st->print(")");
1689  }
1690}
1691#endif // !PRODUCT || INCLUDE_JVMTI
1692
1693
1694void Method::print_codes_on(outputStream* st) const {
1695  print_codes_on(0, code_size(), st);
1696}
1697
1698void Method::print_codes_on(int from, int to, outputStream* st) const {
1699  Thread *thread = Thread::current();
1700  ResourceMark rm(thread);
1701  methodHandle mh (thread, (Method*)this);
1702  BytecodeStream s(mh);
1703  s.set_interval(from, to);
1704  BytecodeTracer::set_closure(BytecodeTracer::std_closure());
1705  while (s.next() >= 0) BytecodeTracer::trace(mh, s.bcp(), st);
1706}
1707
1708
1709// Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas
1710// between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
1711// we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used
1712// as end-of-stream terminator.
1713
1714void CompressedLineNumberWriteStream::write_pair_regular(int bci_delta, int line_delta) {
1715  // bci and line number does not compress into single byte.
1716  // Write out escape character and use regular compression for bci and line number.
1717  write_byte((jubyte)0xFF);
1718  write_signed_int(bci_delta);
1719  write_signed_int(line_delta);
1720}
1721
1722// See comment in method.hpp which explains why this exists.
1723#if defined(_M_AMD64) && _MSC_VER >= 1400
1724#pragma optimize("", off)
1725void CompressedLineNumberWriteStream::write_pair(int bci, int line) {
1726  write_pair_inline(bci, line);
1727}
1728#pragma optimize("", on)
1729#endif
1730
1731CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
1732  _bci = 0;
1733  _line = 0;
1734};
1735
1736
1737bool CompressedLineNumberReadStream::read_pair() {
1738  jubyte next = read_byte();
1739  // Check for terminator
1740  if (next == 0) return false;
1741  if (next == 0xFF) {
1742    // Escape character, regular compression used
1743    _bci  += read_signed_int();
1744    _line += read_signed_int();
1745  } else {
1746    // Single byte compression used
1747    _bci  += next >> 3;
1748    _line += next & 0x7;
1749  }
1750  return true;
1751}
1752
1753#if INCLUDE_JVMTI
1754
1755Bytecodes::Code Method::orig_bytecode_at(int bci) const {
1756  BreakpointInfo* bp = method_holder()->breakpoints();
1757  for (; bp != NULL; bp = bp->next()) {
1758    if (bp->match(this, bci)) {
1759      return bp->orig_bytecode();
1760    }
1761  }
1762  {
1763    ResourceMark rm;
1764    fatal("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci);
1765  }
1766  return Bytecodes::_shouldnotreachhere;
1767}
1768
1769void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
1770  assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
1771  BreakpointInfo* bp = method_holder()->breakpoints();
1772  for (; bp != NULL; bp = bp->next()) {
1773    if (bp->match(this, bci)) {
1774      bp->set_orig_bytecode(code);
1775      // and continue, in case there is more than one
1776    }
1777  }
1778}
1779
1780void Method::set_breakpoint(int bci) {
1781  InstanceKlass* ik = method_holder();
1782  BreakpointInfo *bp = new BreakpointInfo(this, bci);
1783  bp->set_next(ik->breakpoints());
1784  ik->set_breakpoints(bp);
1785  // do this last:
1786  bp->set(this);
1787}
1788
1789static void clear_matches(Method* m, int bci) {
1790  InstanceKlass* ik = m->method_holder();
1791  BreakpointInfo* prev_bp = NULL;
1792  BreakpointInfo* next_bp;
1793  for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {
1794    next_bp = bp->next();
1795    // bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
1796    if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {
1797      // do this first:
1798      bp->clear(m);
1799      // unhook it
1800      if (prev_bp != NULL)
1801        prev_bp->set_next(next_bp);
1802      else
1803        ik->set_breakpoints(next_bp);
1804      delete bp;
1805      // When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
1806      // at same location. So we have multiple matching (method_index and bci)
1807      // BreakpointInfo nodes in BreakpointInfo list. We should just delete one
1808      // breakpoint for clear_breakpoint request and keep all other method versions
1809      // BreakpointInfo for future clear_breakpoint request.
1810      // bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
1811      // which is being called when class is unloaded. We delete all the Breakpoint
1812      // information for all versions of method. We may not correctly restore the original
1813      // bytecode in all method versions, but that is ok. Because the class is being unloaded
1814      // so these methods won't be used anymore.
1815      if (bci >= 0) {
1816        break;
1817      }
1818    } else {
1819      // This one is a keeper.
1820      prev_bp = bp;
1821    }
1822  }
1823}
1824
1825void Method::clear_breakpoint(int bci) {
1826  assert(bci >= 0, "");
1827  clear_matches(this, bci);
1828}
1829
1830void Method::clear_all_breakpoints() {
1831  clear_matches(this, -1);
1832}
1833
1834#endif // INCLUDE_JVMTI
1835
1836int Method::invocation_count() {
1837  MethodCounters *mcs = method_counters();
1838  if (TieredCompilation) {
1839    MethodData* const mdo = method_data();
1840    if (((mcs != NULL) ? mcs->invocation_counter()->carry() : false) ||
1841        ((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {
1842      return InvocationCounter::count_limit;
1843    } else {
1844      return ((mcs != NULL) ? mcs->invocation_counter()->count() : 0) +
1845             ((mdo != NULL) ? mdo->invocation_counter()->count() : 0);
1846    }
1847  } else {
1848    return (mcs == NULL) ? 0 : mcs->invocation_counter()->count();
1849  }
1850}
1851
1852int Method::backedge_count() {
1853  MethodCounters *mcs = method_counters();
1854  if (TieredCompilation) {
1855    MethodData* const mdo = method_data();
1856    if (((mcs != NULL) ? mcs->backedge_counter()->carry() : false) ||
1857        ((mdo != NULL) ? mdo->backedge_counter()->carry() : false)) {
1858      return InvocationCounter::count_limit;
1859    } else {
1860      return ((mcs != NULL) ? mcs->backedge_counter()->count() : 0) +
1861             ((mdo != NULL) ? mdo->backedge_counter()->count() : 0);
1862    }
1863  } else {
1864    return (mcs == NULL) ? 0 : mcs->backedge_counter()->count();
1865  }
1866}
1867
1868int Method::highest_comp_level() const {
1869  const MethodCounters* mcs = method_counters();
1870  if (mcs != NULL) {
1871    return mcs->highest_comp_level();
1872  } else {
1873    return CompLevel_none;
1874  }
1875}
1876
1877int Method::highest_osr_comp_level() const {
1878  const MethodCounters* mcs = method_counters();
1879  if (mcs != NULL) {
1880    return mcs->highest_osr_comp_level();
1881  } else {
1882    return CompLevel_none;
1883  }
1884}
1885
1886void Method::set_highest_comp_level(int level) {
1887  MethodCounters* mcs = method_counters();
1888  if (mcs != NULL) {
1889    mcs->set_highest_comp_level(level);
1890  }
1891}
1892
1893void Method::set_highest_osr_comp_level(int level) {
1894  MethodCounters* mcs = method_counters();
1895  if (mcs != NULL) {
1896    mcs->set_highest_osr_comp_level(level);
1897  }
1898}
1899
1900#if INCLUDE_JVMTI
1901
1902BreakpointInfo::BreakpointInfo(Method* m, int bci) {
1903  _bci = bci;
1904  _name_index = m->name_index();
1905  _signature_index = m->signature_index();
1906  _orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);
1907  if (_orig_bytecode == Bytecodes::_breakpoint)
1908    _orig_bytecode = m->orig_bytecode_at(_bci);
1909  _next = NULL;
1910}
1911
1912void BreakpointInfo::set(Method* method) {
1913#ifdef ASSERT
1914  {
1915    Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);
1916    if (code == Bytecodes::_breakpoint)
1917      code = method->orig_bytecode_at(_bci);
1918    assert(orig_bytecode() == code, "original bytecode must be the same");
1919  }
1920#endif
1921  Thread *thread = Thread::current();
1922  *method->bcp_from(_bci) = Bytecodes::_breakpoint;
1923  method->incr_number_of_breakpoints(thread);
1924  SystemDictionary::notice_modification();
1925  {
1926    // Deoptimize all dependents on this method
1927    HandleMark hm(thread);
1928    methodHandle mh(thread, method);
1929    CodeCache::flush_dependents_on_method(mh);
1930  }
1931}
1932
1933void BreakpointInfo::clear(Method* method) {
1934  *method->bcp_from(_bci) = orig_bytecode();
1935  assert(method->number_of_breakpoints() > 0, "must not go negative");
1936  method->decr_number_of_breakpoints(Thread::current());
1937}
1938
1939#endif // INCLUDE_JVMTI
1940
1941// jmethodID handling
1942
1943// This is a block allocating object, sort of like JNIHandleBlock, only a
1944// lot simpler.
1945// It's allocated on the CHeap because once we allocate a jmethodID, we can
1946// never get rid of it.
1947
1948static const int min_block_size = 8;
1949
1950class JNIMethodBlockNode : public CHeapObj<mtClass> {
1951  friend class JNIMethodBlock;
1952  Method**        _methods;
1953  int             _number_of_methods;
1954  int             _top;
1955  JNIMethodBlockNode* _next;
1956
1957 public:
1958
1959  JNIMethodBlockNode(int num_methods = min_block_size);
1960
1961  ~JNIMethodBlockNode() { FREE_C_HEAP_ARRAY(Method*, _methods); }
1962
1963  void ensure_methods(int num_addl_methods) {
1964    if (_top < _number_of_methods) {
1965      num_addl_methods -= _number_of_methods - _top;
1966      if (num_addl_methods <= 0) {
1967        return;
1968      }
1969    }
1970    if (_next == NULL) {
1971      _next = new JNIMethodBlockNode(MAX2(num_addl_methods, min_block_size));
1972    } else {
1973      _next->ensure_methods(num_addl_methods);
1974    }
1975  }
1976};
1977
1978class JNIMethodBlock : public CHeapObj<mtClass> {
1979  JNIMethodBlockNode _head;
1980  JNIMethodBlockNode *_last_free;
1981 public:
1982  static Method* const _free_method;
1983
1984  JNIMethodBlock(int initial_capacity = min_block_size)
1985      : _head(initial_capacity), _last_free(&_head) {}
1986
1987  void ensure_methods(int num_addl_methods) {
1988    _last_free->ensure_methods(num_addl_methods);
1989  }
1990
1991  Method** add_method(Method* m) {
1992    for (JNIMethodBlockNode* b = _last_free; b != NULL; b = b->_next) {
1993      if (b->_top < b->_number_of_methods) {
1994        // top points to the next free entry.
1995        int i = b->_top;
1996        b->_methods[i] = m;
1997        b->_top++;
1998        _last_free = b;
1999        return &(b->_methods[i]);
2000      } else if (b->_top == b->_number_of_methods) {
2001        // if the next free entry ran off the block see if there's a free entry
2002        for (int i = 0; i < b->_number_of_methods; i++) {
2003          if (b->_methods[i] == _free_method) {
2004            b->_methods[i] = m;
2005            _last_free = b;
2006            return &(b->_methods[i]);
2007          }
2008        }
2009        // Only check each block once for frees.  They're very unlikely.
2010        // Increment top past the end of the block.
2011        b->_top++;
2012      }
2013      // need to allocate a next block.
2014      if (b->_next == NULL) {
2015        b->_next = _last_free = new JNIMethodBlockNode();
2016      }
2017    }
2018    guarantee(false, "Should always allocate a free block");
2019    return NULL;
2020  }
2021
2022  bool contains(Method** m) {
2023    if (m == NULL) return false;
2024    for (JNIMethodBlockNode* b = &_head; b != NULL; b = b->_next) {
2025      if (b->_methods <= m && m < b->_methods + b->_number_of_methods) {
2026        // This is a bit of extra checking, for two reasons.  One is
2027        // that contains() deals with pointers that are passed in by
2028        // JNI code, so making sure that the pointer is aligned
2029        // correctly is valuable.  The other is that <= and > are
2030        // technically not defined on pointers, so the if guard can
2031        // pass spuriously; no modern compiler is likely to make that
2032        // a problem, though (and if one did, the guard could also
2033        // fail spuriously, which would be bad).
2034        ptrdiff_t idx = m - b->_methods;
2035        if (b->_methods + idx == m) {
2036          return true;
2037        }
2038      }
2039    }
2040    return false;  // not found
2041  }
2042
2043  // Doesn't really destroy it, just marks it as free so it can be reused.
2044  void destroy_method(Method** m) {
2045#ifdef ASSERT
2046    assert(contains(m), "should be a methodID");
2047#endif // ASSERT
2048    *m = _free_method;
2049  }
2050
2051  // During class unloading the methods are cleared, which is different
2052  // than freed.
2053  void clear_all_methods() {
2054    for (JNIMethodBlockNode* b = &_head; b != NULL; b = b->_next) {
2055      for (int i = 0; i< b->_number_of_methods; i++) {
2056        b->_methods[i] = NULL;
2057      }
2058    }
2059  }
2060#ifndef PRODUCT
2061  int count_methods() {
2062    // count all allocated methods
2063    int count = 0;
2064    for (JNIMethodBlockNode* b = &_head; b != NULL; b = b->_next) {
2065      for (int i = 0; i< b->_number_of_methods; i++) {
2066        if (b->_methods[i] != _free_method) count++;
2067      }
2068    }
2069    return count;
2070  }
2071#endif // PRODUCT
2072};
2073
2074// Something that can't be mistaken for an address or a markOop
2075Method* const JNIMethodBlock::_free_method = (Method*)55;
2076
2077JNIMethodBlockNode::JNIMethodBlockNode(int num_methods) : _next(NULL), _top(0) {
2078  _number_of_methods = MAX2(num_methods, min_block_size);
2079  _methods = NEW_C_HEAP_ARRAY(Method*, _number_of_methods, mtInternal);
2080  for (int i = 0; i < _number_of_methods; i++) {
2081    _methods[i] = JNIMethodBlock::_free_method;
2082  }
2083}
2084
2085void Method::ensure_jmethod_ids(ClassLoaderData* loader_data, int capacity) {
2086  ClassLoaderData* cld = loader_data;
2087  if (!SafepointSynchronize::is_at_safepoint()) {
2088    // Have to add jmethod_ids() to class loader data thread-safely.
2089    // Also have to add the method to the list safely, which the cld lock
2090    // protects as well.
2091    MutexLockerEx ml(cld->metaspace_lock(),  Mutex::_no_safepoint_check_flag);
2092    if (cld->jmethod_ids() == NULL) {
2093      cld->set_jmethod_ids(new JNIMethodBlock(capacity));
2094    } else {
2095      cld->jmethod_ids()->ensure_methods(capacity);
2096    }
2097  } else {
2098    // At safepoint, we are single threaded and can set this.
2099    if (cld->jmethod_ids() == NULL) {
2100      cld->set_jmethod_ids(new JNIMethodBlock(capacity));
2101    } else {
2102      cld->jmethod_ids()->ensure_methods(capacity);
2103    }
2104  }
2105}
2106
2107// Add a method id to the jmethod_ids
2108jmethodID Method::make_jmethod_id(ClassLoaderData* loader_data, Method* m) {
2109  ClassLoaderData* cld = loader_data;
2110
2111  if (!SafepointSynchronize::is_at_safepoint()) {
2112    // Have to add jmethod_ids() to class loader data thread-safely.
2113    // Also have to add the method to the list safely, which the cld lock
2114    // protects as well.
2115    MutexLockerEx ml(cld->metaspace_lock(),  Mutex::_no_safepoint_check_flag);
2116    if (cld->jmethod_ids() == NULL) {
2117      cld->set_jmethod_ids(new JNIMethodBlock());
2118    }
2119    // jmethodID is a pointer to Method*
2120    return (jmethodID)cld->jmethod_ids()->add_method(m);
2121  } else {
2122    // At safepoint, we are single threaded and can set this.
2123    if (cld->jmethod_ids() == NULL) {
2124      cld->set_jmethod_ids(new JNIMethodBlock());
2125    }
2126    // jmethodID is a pointer to Method*
2127    return (jmethodID)cld->jmethod_ids()->add_method(m);
2128  }
2129}
2130
2131// Mark a jmethodID as free.  This is called when there is a data race in
2132// InstanceKlass while creating the jmethodID cache.
2133void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {
2134  ClassLoaderData* cld = loader_data;
2135  Method** ptr = (Method**)m;
2136  assert(cld->jmethod_ids() != NULL, "should have method handles");
2137  cld->jmethod_ids()->destroy_method(ptr);
2138}
2139
2140void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
2141  // Can't assert the method_holder is the same because the new method has the
2142  // scratch method holder.
2143  assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
2144           == new_method->method_holder()->class_loader(),
2145         "changing to a different class loader");
2146  // Just change the method in place, jmethodID pointer doesn't change.
2147  *((Method**)jmid) = new_method;
2148}
2149
2150bool Method::is_method_id(jmethodID mid) {
2151  Method* m = resolve_jmethod_id(mid);
2152  assert(m != NULL, "should be called with non-null method");
2153  InstanceKlass* ik = m->method_holder();
2154  ClassLoaderData* cld = ik->class_loader_data();
2155  if (cld->jmethod_ids() == NULL) return false;
2156  return (cld->jmethod_ids()->contains((Method**)mid));
2157}
2158
2159Method* Method::checked_resolve_jmethod_id(jmethodID mid) {
2160  if (mid == NULL) return NULL;
2161  Method* o = resolve_jmethod_id(mid);
2162  if (o == NULL || o == JNIMethodBlock::_free_method || !((Metadata*)o)->is_method()) {
2163    return NULL;
2164  }
2165  return o;
2166};
2167
2168void Method::set_on_stack(const bool value) {
2169  // Set both the method itself and its constant pool.  The constant pool
2170  // on stack means some method referring to it is also on the stack.
2171  constants()->set_on_stack(value);
2172
2173  bool already_set = on_stack();
2174  _access_flags.set_on_stack(value);
2175  if (value && !already_set) {
2176    MetadataOnStackMark::record(this);
2177  }
2178}
2179
2180// Called when the class loader is unloaded to make all methods weak.
2181void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
2182  loader_data->jmethod_ids()->clear_all_methods();
2183}
2184
2185bool Method::has_method_vptr(const void* ptr) {
2186  Method m;
2187  // This assumes that the vtbl pointer is the first word of a C++ object.
2188  return dereference_vptr(&m) == dereference_vptr(ptr);
2189}
2190
2191// Check that this pointer is valid by checking that the vtbl pointer matches
2192bool Method::is_valid_method() const {
2193  if (this == NULL) {
2194    return false;
2195  } else if ((intptr_t(this) & (wordSize-1)) != 0) {
2196    // Quick sanity check on pointer.
2197    return false;
2198  } else if (MetaspaceShared::is_in_shared_space(this)) {
2199    return MetaspaceShared::is_valid_shared_method(this);
2200  } else if (Metaspace::contains_non_shared(this)) {
2201    return has_method_vptr((const void*)this);
2202  } else {
2203    return false;
2204  }
2205}
2206
2207#ifndef PRODUCT
2208void Method::print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) {
2209  out->print_cr("jni_method_id count = %d", loader_data->jmethod_ids()->count_methods());
2210}
2211#endif // PRODUCT
2212
2213
2214// Printing
2215
2216#ifndef PRODUCT
2217
2218void Method::print_on(outputStream* st) const {
2219  ResourceMark rm;
2220  assert(is_method(), "must be method");
2221  st->print_cr("%s", internal_name());
2222  st->print_cr(" - this oop:          " INTPTR_FORMAT, p2i(this));
2223  st->print   (" - method holder:     "); method_holder()->print_value_on(st); st->cr();
2224  st->print   (" - constants:         " INTPTR_FORMAT " ", p2i(constants()));
2225  constants()->print_value_on(st); st->cr();
2226  st->print   (" - access:            0x%x  ", access_flags().as_int()); access_flags().print_on(st); st->cr();
2227  st->print   (" - name:              ");    name()->print_value_on(st); st->cr();
2228  st->print   (" - signature:         ");    signature()->print_value_on(st); st->cr();
2229  st->print_cr(" - max stack:         %d",   max_stack());
2230  st->print_cr(" - max locals:        %d",   max_locals());
2231  st->print_cr(" - size of params:    %d",   size_of_parameters());
2232  st->print_cr(" - method size:       %d",   method_size());
2233  if (intrinsic_id() != vmIntrinsics::_none)
2234    st->print_cr(" - intrinsic id:      %d %s", intrinsic_id(), vmIntrinsics::name_at(intrinsic_id()));
2235  if (highest_comp_level() != CompLevel_none)
2236    st->print_cr(" - highest level:     %d", highest_comp_level());
2237  st->print_cr(" - vtable index:      %d",   _vtable_index);
2238  st->print_cr(" - i2i entry:         " INTPTR_FORMAT, p2i(interpreter_entry()));
2239  st->print(   " - adapters:          ");
2240  AdapterHandlerEntry* a = ((Method*)this)->adapter();
2241  if (a == NULL)
2242    st->print_cr(INTPTR_FORMAT, p2i(a));
2243  else
2244    a->print_adapter_on(st);
2245  st->print_cr(" - compiled entry     " INTPTR_FORMAT, p2i(from_compiled_entry()));
2246  st->print_cr(" - code size:         %d",   code_size());
2247  if (code_size() != 0) {
2248    st->print_cr(" - code start:        " INTPTR_FORMAT, p2i(code_base()));
2249    st->print_cr(" - code end (excl):   " INTPTR_FORMAT, p2i(code_base() + code_size()));
2250  }
2251  if (method_data() != NULL) {
2252    st->print_cr(" - method data:       " INTPTR_FORMAT, p2i(method_data()));
2253  }
2254  st->print_cr(" - checked ex length: %d",   checked_exceptions_length());
2255  if (checked_exceptions_length() > 0) {
2256    CheckedExceptionElement* table = checked_exceptions_start();
2257    st->print_cr(" - checked ex start:  " INTPTR_FORMAT, p2i(table));
2258    if (Verbose) {
2259      for (int i = 0; i < checked_exceptions_length(); i++) {
2260        st->print_cr("   - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2261      }
2262    }
2263  }
2264  if (has_linenumber_table()) {
2265    u_char* table = compressed_linenumber_table();
2266    st->print_cr(" - linenumber start:  " INTPTR_FORMAT, p2i(table));
2267    if (Verbose) {
2268      CompressedLineNumberReadStream stream(table);
2269      while (stream.read_pair()) {
2270        st->print_cr("   - line %d: %d", stream.line(), stream.bci());
2271      }
2272    }
2273  }
2274  st->print_cr(" - localvar length:   %d",   localvariable_table_length());
2275  if (localvariable_table_length() > 0) {
2276    LocalVariableTableElement* table = localvariable_table_start();
2277    st->print_cr(" - localvar start:    " INTPTR_FORMAT, p2i(table));
2278    if (Verbose) {
2279      for (int i = 0; i < localvariable_table_length(); i++) {
2280        int bci = table[i].start_bci;
2281        int len = table[i].length;
2282        const char* name = constants()->printable_name_at(table[i].name_cp_index);
2283        const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index);
2284        int slot = table[i].slot;
2285        st->print_cr("   - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot);
2286      }
2287    }
2288  }
2289  if (code() != NULL) {
2290    st->print   (" - compiled code: ");
2291    code()->print_value_on(st);
2292  }
2293  if (is_native()) {
2294    st->print_cr(" - native function:   " INTPTR_FORMAT, p2i(native_function()));
2295    st->print_cr(" - signature handler: " INTPTR_FORMAT, p2i(signature_handler()));
2296  }
2297}
2298
2299void Method::print_linkage_flags(outputStream* st) {
2300  access_flags().print_on(st);
2301  if (is_default_method()) {
2302    st->print("default ");
2303  }
2304  if (is_overpass()) {
2305    st->print("overpass ");
2306  }
2307}
2308#endif //PRODUCT
2309
2310void Method::print_value_on(outputStream* st) const {
2311  assert(is_method(), "must be method");
2312  st->print("%s", internal_name());
2313  print_address_on(st);
2314  st->print(" ");
2315  name()->print_value_on(st);
2316  st->print(" ");
2317  signature()->print_value_on(st);
2318  st->print(" in ");
2319  method_holder()->print_value_on(st);
2320  if (WizardMode) st->print("#%d", _vtable_index);
2321  if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2322  if (WizardMode && code() != NULL) st->print(" ((nmethod*)%p)", code());
2323}
2324
2325#if INCLUDE_SERVICES
2326// Size Statistics
2327void Method::collect_statistics(KlassSizeStats *sz) const {
2328  int mysize = sz->count(this);
2329  sz->_method_bytes += mysize;
2330  sz->_method_all_bytes += mysize;
2331  sz->_rw_bytes += mysize;
2332
2333  if (constMethod()) {
2334    constMethod()->collect_statistics(sz);
2335  }
2336  if (method_data()) {
2337    method_data()->collect_statistics(sz);
2338  }
2339}
2340#endif // INCLUDE_SERVICES
2341
2342// LogTouchedMethods and PrintTouchedMethods
2343
2344// TouchedMethodRecord -- we can't use a HashtableEntry<Method*> because
2345// the Method may be garbage collected. Let's roll our own hash table.
2346class TouchedMethodRecord : CHeapObj<mtTracing> {
2347public:
2348  // It's OK to store Symbols here because they will NOT be GC'ed if
2349  // LogTouchedMethods is enabled.
2350  TouchedMethodRecord* _next;
2351  Symbol* _class_name;
2352  Symbol* _method_name;
2353  Symbol* _method_signature;
2354};
2355
2356static const int TOUCHED_METHOD_TABLE_SIZE = 20011;
2357static TouchedMethodRecord** _touched_method_table = NULL;
2358
2359void Method::log_touched(TRAPS) {
2360
2361  const int table_size = TOUCHED_METHOD_TABLE_SIZE;
2362  Symbol* my_class = klass_name();
2363  Symbol* my_name  = name();
2364  Symbol* my_sig   = signature();
2365
2366  unsigned int hash = my_class->identity_hash() +
2367                      my_name->identity_hash() +
2368                      my_sig->identity_hash();
2369  juint index = juint(hash) % table_size;
2370
2371  MutexLocker ml(TouchedMethodLog_lock, THREAD);
2372  if (_touched_method_table == NULL) {
2373    _touched_method_table = NEW_C_HEAP_ARRAY2(TouchedMethodRecord*, table_size,
2374                                              mtTracing, CURRENT_PC);
2375    memset(_touched_method_table, 0, sizeof(TouchedMethodRecord*)*table_size);
2376  }
2377
2378  TouchedMethodRecord* ptr = _touched_method_table[index];
2379  while (ptr) {
2380    if (ptr->_class_name       == my_class &&
2381        ptr->_method_name      == my_name &&
2382        ptr->_method_signature == my_sig) {
2383      return;
2384    }
2385    if (ptr->_next == NULL) break;
2386    ptr = ptr->_next;
2387  }
2388  TouchedMethodRecord* nptr = NEW_C_HEAP_OBJ(TouchedMethodRecord, mtTracing);
2389  my_class->set_permanent();  // prevent reclaimed by GC
2390  my_name->set_permanent();
2391  my_sig->set_permanent();
2392  nptr->_class_name         = my_class;
2393  nptr->_method_name        = my_name;
2394  nptr->_method_signature   = my_sig;
2395  nptr->_next               = NULL;
2396
2397  if (ptr == NULL) {
2398    // first
2399    _touched_method_table[index] = nptr;
2400  } else {
2401    ptr->_next = nptr;
2402  }
2403}
2404
2405void Method::print_touched_methods(outputStream* out) {
2406  MutexLockerEx ml(Thread::current()->is_VM_thread() ? NULL : TouchedMethodLog_lock);
2407  out->print_cr("# Method::print_touched_methods version 1");
2408  if (_touched_method_table) {
2409    for (int i = 0; i < TOUCHED_METHOD_TABLE_SIZE; i++) {
2410      TouchedMethodRecord* ptr = _touched_method_table[i];
2411      while(ptr) {
2412        ptr->_class_name->print_symbol_on(out);       out->print(".");
2413        ptr->_method_name->print_symbol_on(out);      out->print(":");
2414        ptr->_method_signature->print_symbol_on(out); out->cr();
2415        ptr = ptr->_next;
2416      }
2417    }
2418  }
2419}
2420
2421// Verification
2422
2423void Method::verify_on(outputStream* st) {
2424  guarantee(is_method(), "object must be method");
2425  guarantee(constants()->is_constantPool(), "should be constant pool");
2426  guarantee(constMethod()->is_constMethod(), "should be ConstMethod*");
2427  MethodData* md = method_data();
2428  guarantee(md == NULL ||
2429      md->is_methodData(), "should be method data");
2430}
2431