compiledIC.cpp revision 4565:a6e09d6dd8e5
138889Sjdp/*
238889Sjdp * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
338889Sjdp * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
438889Sjdp *
538889Sjdp * This code is free software; you can redistribute it and/or modify it
638889Sjdp * under the terms of the GNU General Public License version 2 only, as
738889Sjdp * published by the Free Software Foundation.
833965Sjdp *
938889Sjdp * This code is distributed in the hope that it will be useful, but WITHOUT
1038889Sjdp * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
1138889Sjdp * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1233965Sjdp * version 2 for more details (a copy is included in the LICENSE file that
1338889Sjdp * accompanied this code).
1438889Sjdp *
1538889Sjdp * You should have received a copy of the GNU General Public License version
1638889Sjdp * 2 along with this work; if not, write to the Free Software Foundation,
1738889Sjdp * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
1833965Sjdp *
1933965Sjdp * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
2038889Sjdp * or visit www.oracle.com if you need additional information or have any
2133965Sjdp * questions.
2233965Sjdp *
2338889Sjdp */
2438889Sjdp
2538889Sjdp#include "precompiled.hpp"
2638889Sjdp#include "classfile/systemDictionary.hpp"
2738889Sjdp#include "code/codeCache.hpp"
2838889Sjdp#include "code/compiledIC.hpp"
2933965Sjdp#include "code/icBuffer.hpp"
3038889Sjdp#include "code/nmethod.hpp"
3133965Sjdp#include "code/vtableStubs.hpp"
3233965Sjdp#include "interpreter/interpreter.hpp"
3338889Sjdp#include "interpreter/linkResolver.hpp"
3433965Sjdp#include "memory/metadataFactory.hpp"
3538889Sjdp#include "memory/oopFactory.hpp"
3638889Sjdp#include "oops/method.hpp"
3738889Sjdp#include "oops/oop.inline.hpp"
3833965Sjdp#include "oops/symbol.hpp"
3938889Sjdp#include "runtime/icache.hpp"
4038889Sjdp#include "runtime/sharedRuntime.hpp"
4138889Sjdp#include "runtime/stubRoutines.hpp"
4238889Sjdp#include "utilities/events.hpp"
4338889Sjdp
4438889Sjdp
4538889Sjdp// Every time a compiled IC is changed or its type is being accessed,
4633965Sjdp// either the CompiledIC_lock must be set or we must be at a safe point.
4733965Sjdp
4833965Sjdp//-----------------------------------------------------------------------------
4938889Sjdp// Low-level access to an inline cache. Private, since they might not be
5038889Sjdp// MT-safe to use.
5133965Sjdp
5238889Sjdpvoid* CompiledIC::cached_value() const {
5338889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
5438889Sjdp  assert (!is_optimized(), "an optimized virtual call does not have a cached metadata");
5538889Sjdp
5638889Sjdp  if (!is_in_transition_state()) {
5738889Sjdp    void* data = (void*)_value->data();
5838889Sjdp    // If we let the metadata value here be initialized to zero...
5938889Sjdp    assert(data != NULL || Universe::non_oop_word() == NULL,
6038889Sjdp           "no raw nulls in CompiledIC metadatas, because of patching races");
6138889Sjdp    return (data == (void*)Universe::non_oop_word()) ? NULL : data;
6238889Sjdp  } else {
6338889Sjdp    return InlineCacheBuffer::cached_value_for((CompiledIC *)this);
6438889Sjdp  }
6538889Sjdp}
6638889Sjdp
6738889Sjdp
6838889Sjdpvoid CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) {
6938889Sjdp  assert(entry_point != NULL, "must set legal entry point");
7038889Sjdp  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
7138889Sjdp  assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata");
7238889Sjdp  assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata");
7338889Sjdp
7438889Sjdp  assert(!is_icholder || is_icholder_entry(entry_point), "must be");
7538889Sjdp
7638889Sjdp  // Don't use ic_destination for this test since that forwards
7738889Sjdp  // through ICBuffer instead of returning the actual current state of
7838889Sjdp  // the CompiledIC.
7938889Sjdp  if (is_icholder_entry(_ic_call->destination())) {
8038889Sjdp    // When patching for the ICStub case the cached value isn't
8138889Sjdp    // overwritten until the ICStub copied into the CompiledIC during
8238889Sjdp    // the next safepoint.  Make sure that the CompiledICHolder* is
8338889Sjdp    // marked for release at this point since it won't be identifiable
8438889Sjdp    // once the entry point is overwritten.
8538889Sjdp    InlineCacheBuffer::queue_for_release((CompiledICHolder*)_value->data());
8638889Sjdp  }
8738889Sjdp
8838889Sjdp  if (TraceCompiledIC) {
8938889Sjdp    tty->print("  ");
9038889Sjdp    print_compiled_ic();
9133965Sjdp    tty->print(" changing destination to " INTPTR_FORMAT, entry_point);
9238889Sjdp    if (!is_optimized()) {
9333965Sjdp      tty->print(" changing cached %s to " INTPTR_FORMAT, is_icholder ? "icholder" : "metadata", (address)cache);
9433965Sjdp    }
9538889Sjdp    if (is_icstub) {
9633965Sjdp      tty->print(" (icstub)");
9733965Sjdp    }
9833965Sjdp    tty->cr();
9933965Sjdp  }
10033965Sjdp
10133965Sjdp  {
10233965Sjdp  MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
10333965Sjdp#ifdef ASSERT
10433965Sjdp  CodeBlob* cb = CodeCache::find_blob_unsafe(_ic_call);
10533965Sjdp  assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
10633965Sjdp#endif
10733965Sjdp  _ic_call->set_destination_mt_safe(entry_point);
10833965Sjdp}
10933965Sjdp
11033965Sjdp  if (is_optimized() || is_icstub) {
11133965Sjdp    // Optimized call sites don't have a cache value and ICStub call
11233965Sjdp    // sites only change the entry point.  Changing the value in that
11333965Sjdp    // case could lead to MT safety issues.
11433965Sjdp    assert(cache == NULL, "must be null");
11533965Sjdp    return;
11633965Sjdp  }
11733965Sjdp
11833965Sjdp  if (cache == NULL)  cache = (void*)Universe::non_oop_word();
11933965Sjdp
12033965Sjdp  _value->set_data((intptr_t)cache);
12133965Sjdp}
12233965Sjdp
12333965Sjdp
12433965Sjdpvoid CompiledIC::set_ic_destination(ICStub* stub) {
12533965Sjdp  internal_set_ic_destination(stub->code_begin(), true, NULL, false);
12633965Sjdp}
12733965Sjdp
12833965Sjdp
12933965Sjdp
13033965Sjdpaddress CompiledIC::ic_destination() const {
13133965Sjdp assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
13233965Sjdp if (!is_in_transition_state()) {
13333965Sjdp   return _ic_call->destination();
13433965Sjdp } else {
13538889Sjdp   return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
13633965Sjdp }
13738889Sjdp}
13833965Sjdp
13938889Sjdp
14033965Sjdpbool CompiledIC::is_in_transition_state() const {
14138889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
14238889Sjdp  return InlineCacheBuffer::contains(_ic_call->destination());
14338889Sjdp}
14438889Sjdp
14538889Sjdp
14638889Sjdpbool CompiledIC::is_icholder_call() const {
14738889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
14838889Sjdp  return !_is_optimized && is_icholder_entry(ic_destination());
14938889Sjdp}
15038889Sjdp
15138889Sjdp// Returns native address of 'call' instruction in inline-cache. Used by
15238889Sjdp// the InlineCacheBuffer when it needs to find the stub.
15338889Sjdpaddress CompiledIC::stub_address() const {
15438889Sjdp  assert(is_in_transition_state(), "should only be called when we are in a transition state");
15538889Sjdp  return _ic_call->destination();
15638889Sjdp}
15738889Sjdp
15838889Sjdp
15938889Sjdp//-----------------------------------------------------------------------------
16038889Sjdp// High-level access to an inline cache. Guaranteed to be MT-safe.
16138889Sjdp
16238889Sjdp
16338889Sjdpvoid CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
16438889Sjdp  methodHandle method = call_info->selected_method();
16538889Sjdp  bool is_invoke_interface = (bytecode == Bytecodes::_invokeinterface && !call_info->has_vtable_index());
16638889Sjdp  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
16738889Sjdp  assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
16838889Sjdp  assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
16938889Sjdp
17038889Sjdp  address entry;
17138889Sjdp  if (is_invoke_interface) {
17238889Sjdp    int index = klassItable::compute_itable_index(call_info->resolved_method()());
17338889Sjdp    entry = VtableStubs::create_stub(false, index, method());
17438889Sjdp    assert(entry != NULL, "entry not computed");
17538889Sjdp    InstanceKlass* k = call_info->resolved_method()->method_holder();
17638889Sjdp    assert(k->is_interface(), "sanity check");
17738889Sjdp    InlineCacheBuffer::create_transition_stub(this, k, entry);
17838889Sjdp  } else {
17938889Sjdp    // Can be different than method->vtable_index(), due to package-private etc.
18038889Sjdp    int vtable_index = call_info->vtable_index();
18138889Sjdp    entry = VtableStubs::create_stub(true, vtable_index, method());
18238889Sjdp    InlineCacheBuffer::create_transition_stub(this, method(), entry);
18338889Sjdp  }
18438889Sjdp
18538889Sjdp  if (TraceICs) {
18638889Sjdp    ResourceMark rm;
18738889Sjdp    tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
18838889Sjdp                   instruction_address(), method->print_value_string(), entry);
18938889Sjdp  }
19038889Sjdp
19138889Sjdp  // We can't check this anymore. With lazy deopt we could have already
19238889Sjdp  // cleaned this IC entry before we even return. This is possible if
19338889Sjdp  // we ran out of space in the inline cache buffer trying to do the
19438889Sjdp  // set_next and we safepointed to free up space. This is a benign
19538889Sjdp  // race because the IC entry was complete when we safepointed so
19638889Sjdp  // cleaning it immediately is harmless.
19738889Sjdp  // assert(is_megamorphic(), "sanity check");
19838889Sjdp}
19938889Sjdp
20038889Sjdp
20138889Sjdp// true if destination is megamorphic stub
20238889Sjdpbool CompiledIC::is_megamorphic() const {
20338889Sjdp  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
20438889Sjdp  assert(!is_optimized(), "an optimized call cannot be megamorphic");
20538889Sjdp
20638889Sjdp  // Cannot rely on cached_value. It is either an interface or a method.
20738889Sjdp  return VtableStubs::is_entry_point(ic_destination());
20838889Sjdp}
20938889Sjdp
21038889Sjdpbool CompiledIC::is_call_to_compiled() const {
21138889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
21238889Sjdp
21338889Sjdp  // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
21438889Sjdp  // method is guaranteed to still exist, since we only remove methods after all inline caches
21538889Sjdp  // has been cleaned up
21633965Sjdp  CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
21738889Sjdp  bool is_monomorphic = (cb != NULL && cb->is_nmethod());
21838889Sjdp  // Check that the cached_value is a klass for non-optimized monomorphic calls
21938889Sjdp  // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
22038889Sjdp  // for calling directly to vep without using the inline cache (i.e., cached_value == NULL)
22138889Sjdp#ifdef ASSERT
22238889Sjdp  CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
22338889Sjdp  bool is_c1_method = caller->is_compiled_by_c1();
22433965Sjdp  assert( is_c1_method ||
22538889Sjdp         !is_monomorphic ||
22638889Sjdp         is_optimized() ||
22738889Sjdp         (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
22838889Sjdp#endif // ASSERT
22938889Sjdp  return is_monomorphic;
23038889Sjdp}
23138889Sjdp
23238889Sjdp
23338889Sjdpbool CompiledIC::is_call_to_interpreted() const {
23438889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
23538889Sjdp  // Call to interpreter if destination is either calling to a stub (if it
23638889Sjdp  // is optimized), or calling to an I2C blob
23738889Sjdp  bool is_call_to_interpreted = false;
23833965Sjdp  if (!is_optimized()) {
23938889Sjdp    // must use unsafe because the destination can be a zombie (and we're cleaning)
24038889Sjdp    // and the print_compiled_ic code wants to know if site (in the non-zombie)
24138889Sjdp    // is to the interpreter.
24238889Sjdp    CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
24338889Sjdp    is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob());
24438889Sjdp    assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check");
24538889Sjdp  } else {
24638889Sjdp    // Check if we are calling into our own codeblob (i.e., to a stub)
24738889Sjdp    CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address());
24838889Sjdp    address dest = ic_destination();
24938889Sjdp#ifdef ASSERT
25038889Sjdp    {
25138889Sjdp      CodeBlob* db = CodeCache::find_blob_unsafe(dest);
25238889Sjdp      assert(!db->is_adapter_blob(), "must use stub!");
25338889Sjdp    }
25438889Sjdp#endif /* ASSERT */
25538889Sjdp    is_call_to_interpreted = cb->contains(dest);
25638889Sjdp  }
25738889Sjdp  return is_call_to_interpreted;
25838889Sjdp}
25938889Sjdp
26038889Sjdp
26138889Sjdpvoid CompiledIC::set_to_clean() {
26238889Sjdp  assert(SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->is_locked() , "MT-unsafe call");
26338889Sjdp  if (TraceInlineCacheClearing || TraceICs) {
26438889Sjdp    tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", instruction_address());
26538889Sjdp    print();
26638889Sjdp  }
26738889Sjdp
26838889Sjdp  address entry;
26938889Sjdp  if (is_optimized()) {
27038889Sjdp    entry = SharedRuntime::get_resolve_opt_virtual_call_stub();
27138889Sjdp  } else {
27238889Sjdp    entry = SharedRuntime::get_resolve_virtual_call_stub();
27338889Sjdp  }
27438889Sjdp
27538889Sjdp  // A zombie transition will always be safe, since the metadata has already been set to NULL, so
27638889Sjdp  // we only need to patch the destination
27738889Sjdp  bool safe_transition = is_optimized() || SafepointSynchronize::is_at_safepoint();
27838889Sjdp
27938889Sjdp  if (safe_transition) {
28038889Sjdp    // Kill any leftover stub we might have too
28138889Sjdp    if (is_in_transition_state()) {
28238889Sjdp      ICStub* old_stub = ICStub_from_destination_address(stub_address());
28338889Sjdp      old_stub->clear();
28438889Sjdp    }
28538889Sjdp    if (is_optimized()) {
28638889Sjdp    set_ic_destination(entry);
28738889Sjdp  } else {
28838889Sjdp      set_ic_destination_and_value(entry, (void*)NULL);
28938889Sjdp    }
29038889Sjdp  } else {
29138889Sjdp    // Unsafe transition - create stub.
29238889Sjdp    InlineCacheBuffer::create_transition_stub(this, NULL, entry);
29338889Sjdp  }
29438889Sjdp  // We can't check this anymore. With lazy deopt we could have already
29538889Sjdp  // cleaned this IC entry before we even return. This is possible if
29638889Sjdp  // we ran out of space in the inline cache buffer trying to do the
29738889Sjdp  // set_next and we safepointed to free up space. This is a benign
29838889Sjdp  // race because the IC entry was complete when we safepointed so
29938889Sjdp  // cleaning it immediately is harmless.
30038889Sjdp  // assert(is_clean(), "sanity check");
30138889Sjdp}
30238889Sjdp
30338889Sjdp
30438889Sjdpbool CompiledIC::is_clean() const {
30538889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
30638889Sjdp  bool is_clean = false;
30738889Sjdp  address dest = ic_destination();
30838889Sjdp  is_clean = dest == SharedRuntime::get_resolve_opt_virtual_call_stub() ||
30938889Sjdp             dest == SharedRuntime::get_resolve_virtual_call_stub();
31038889Sjdp  assert(!is_clean || is_optimized() || cached_value() == NULL, "sanity check");
31138889Sjdp  return is_clean;
31238889Sjdp}
31338889Sjdp
31438889Sjdp
31538889Sjdpvoid CompiledIC::set_to_monomorphic(CompiledICInfo& info) {
31638889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
31738889Sjdp  // Updating a cache to the wrong entry can cause bugs that are very hard
31838889Sjdp  // to track down - if cache entry gets invalid - we just clean it. In
31938889Sjdp  // this way it is always the same code path that is responsible for
32038889Sjdp  // updating and resolving an inline cache
32138889Sjdp  //
32238889Sjdp  // The above is no longer true. SharedRuntime::fixup_callers_callsite will change optimized
32338889Sjdp  // callsites. In addition ic_miss code will update a site to monomorphic if it determines
32438889Sjdp  // that an monomorphic call to the interpreter can now be monomorphic to compiled code.
32538889Sjdp  //
32638889Sjdp  // In both of these cases the only thing being modifed is the jump/call target and these
32738889Sjdp  // transitions are mt_safe
32838889Sjdp
32938889Sjdp  Thread *thread = Thread::current();
33038889Sjdp  if (info.to_interpreter()) {
33138889Sjdp    // Call to interpreter
33238889Sjdp    if (info.is_optimized() && is_optimized()) {
33338889Sjdp       assert(is_clean(), "unsafe IC path");
33438889Sjdp       MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
33538889Sjdp      // the call analysis (callee structure) specifies that the call is optimized
33638889Sjdp      // (either because of CHA or the static target is final)
33738889Sjdp      // At code generation time, this call has been emitted as static call
33838889Sjdp      // Call via stub
33938889Sjdp      assert(info.cached_metadata() != NULL && info.cached_metadata()->is_method(), "sanity check");
34038889Sjdp      CompiledStaticCall* csc = compiledStaticCall_at(instruction_address());
34133965Sjdp      methodHandle method (thread, (Method*)info.cached_metadata());
34233965Sjdp      csc->set_to_interpreted(method, info.entry());
34333965Sjdp      if (TraceICs) {
34433965Sjdp         ResourceMark rm(thread);
34533965Sjdp         tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter: %s",
34633965Sjdp           instruction_address(),
34733965Sjdp           method->print_value_string());
34833965Sjdp      }
34938889Sjdp    } else {
35033965Sjdp      // Call via method-klass-holder
35133965Sjdp      InlineCacheBuffer::create_transition_stub(this, info.claim_cached_icholder(), info.entry());
35233965Sjdp      if (TraceICs) {
35333965Sjdp         ResourceMark rm(thread);
35438889Sjdp         tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter via icholder ", instruction_address());
35533965Sjdp      }
35633965Sjdp    }
35733965Sjdp  } else {
35833965Sjdp    // Call to compiled code
35933965Sjdp    bool static_bound = info.is_optimized() || (info.cached_metadata() == NULL);
36033965Sjdp#ifdef ASSERT
36138889Sjdp    CodeBlob* cb = CodeCache::find_blob_unsafe(info.entry());
36238889Sjdp    assert (cb->is_nmethod(), "must be compiled!");
36333965Sjdp#endif /* ASSERT */
36438889Sjdp
36538889Sjdp    // This is MT safe if we come from a clean-cache and go through a
36633965Sjdp    // non-verified entry point
36738889Sjdp    bool safe = SafepointSynchronize::is_at_safepoint() ||
36838889Sjdp                (!is_in_transition_state() && (info.is_optimized() || static_bound || is_clean()));
36933965Sjdp
37038889Sjdp    if (!safe) {
37138889Sjdp      InlineCacheBuffer::create_transition_stub(this, info.cached_metadata(), info.entry());
37233965Sjdp    } else {
37338889Sjdp      if (is_optimized()) {
37438889Sjdp      set_ic_destination(info.entry());
37533965Sjdp      } else {
37638889Sjdp        set_ic_destination_and_value(info.entry(), info.cached_metadata());
37738889Sjdp      }
37833965Sjdp    }
37938889Sjdp
38038889Sjdp    if (TraceICs) {
38138889Sjdp      ResourceMark rm(thread);
38238889Sjdp      assert(info.cached_metadata() == NULL || info.cached_metadata()->is_klass(), "must be");
38338889Sjdp      tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to compiled (rcvr klass) %s: %s",
38438889Sjdp        instruction_address(),
38538889Sjdp        ((Klass*)info.cached_metadata())->print_value_string(),
38633965Sjdp        (safe) ? "" : "via stub");
38738889Sjdp    }
38838889Sjdp  }
38933965Sjdp  // We can't check this anymore. With lazy deopt we could have already
39038889Sjdp  // cleaned this IC entry before we even return. This is possible if
39138889Sjdp  // we ran out of space in the inline cache buffer trying to do the
39233965Sjdp  // set_next and we safepointed to free up space. This is a benign
39338889Sjdp  // race because the IC entry was complete when we safepointed so
39438889Sjdp  // cleaning it immediately is harmless.
39533965Sjdp  // assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
39638889Sjdp}
39738889Sjdp
39833965Sjdp
39938889Sjdp// is_optimized: Compiler has generated an optimized call (i.e., no inline
40038889Sjdp// cache) static_bound: The call can be static bound (i.e, no need to use
40133965Sjdp// inline cache)
40238889Sjdpvoid CompiledIC::compute_monomorphic_entry(methodHandle method,
40338889Sjdp                                           KlassHandle receiver_klass,
40433965Sjdp                                           bool is_optimized,
40538889Sjdp                                           bool static_bound,
40638889Sjdp                                           CompiledICInfo& info,
40733965Sjdp                                           TRAPS) {
40838889Sjdp  nmethod* method_code = method->code();
40938889Sjdp  address entry = NULL;
41033965Sjdp  if (method_code != NULL) {
41138889Sjdp    // Call to compiled code
41238889Sjdp    if (static_bound || is_optimized) {
41333965Sjdp      entry      = method_code->verified_entry_point();
41438889Sjdp    } else {
41538889Sjdp      entry      = method_code->entry_point();
41633965Sjdp    }
41738889Sjdp  }
41838889Sjdp  if (entry != NULL) {
41933965Sjdp    // Call to compiled code
42038889Sjdp    info.set_compiled_entry(entry, (static_bound || is_optimized) ? NULL : receiver_klass(), is_optimized);
42138889Sjdp  } else {
42233965Sjdp    // Note: the following problem exists with Compiler1:
42338889Sjdp    //   - at compile time we may or may not know if the destination is final
42438889Sjdp    //   - if we know that the destination is final, we will emit an optimized
42533965Sjdp    //     virtual call (no inline cache), and need a Method* to make a call
42638889Sjdp    //     to the interpreter
42738889Sjdp    //   - if we do not know if the destination is final, we emit a standard
42833965Sjdp    //     virtual call, and use CompiledICHolder to call interpreted code
42938889Sjdp    //     (no static call stub has been generated)
43038889Sjdp    //     However in that case we will now notice it is static_bound
43133965Sjdp    //     and convert the call into what looks to be an optimized
43238889Sjdp    //     virtual call. This causes problems in verifying the IC because
43338889Sjdp    //     it look vanilla but is optimized. Code in is_call_to_interpreted
43433965Sjdp    //     is aware of this and weakens its asserts.
43538889Sjdp
43638889Sjdp    // static_bound should imply is_optimized -- otherwise we have a
43733965Sjdp    // performance bug (statically-bindable method is called via
43838889Sjdp    // dynamically-dispatched call note: the reverse implication isn't
43938889Sjdp    // necessarily true -- the call may have been optimized based on compiler
44033965Sjdp    // analysis (static_bound is only based on "final" etc.)
44138889Sjdp#ifdef COMPILER2
44238889Sjdp#ifdef TIERED
44333965Sjdp#if defined(ASSERT)
44438889Sjdp    // can't check the assert because we don't have the CompiledIC with which to
44538889Sjdp    // find the address if the call instruction.
44633965Sjdp    //
44738889Sjdp    // CodeBlob* cb = find_blob_unsafe(instruction_address());
44838889Sjdp    // assert(cb->is_compiled_by_c1() || !static_bound || is_optimized, "static_bound should imply is_optimized");
44933965Sjdp#endif // ASSERT
45038889Sjdp#else
45138889Sjdp    assert(!static_bound || is_optimized, "static_bound should imply is_optimized");
45233965Sjdp#endif // TIERED
45338889Sjdp#endif // COMPILER2
45438889Sjdp    if (is_optimized) {
45533965Sjdp      // Use stub entry
45638889Sjdp      info.set_interpreter_entry(method()->get_c2i_entry(), method());
45738889Sjdp    } else {
45833965Sjdp      // Use icholder entry
45938889Sjdp      CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass());
46038889Sjdp      info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
46133965Sjdp    }
46238889Sjdp  }
46338889Sjdp  assert(info.is_optimized() == is_optimized, "must agree");
46433965Sjdp}
46538889Sjdp
46638889Sjdp
46733965Sjdpbool CompiledIC::is_icholder_entry(address entry) {
46838889Sjdp  CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
46938889Sjdp  return (cb != NULL && cb->is_adapter_blob());
47033965Sjdp}
47138889Sjdp
47238889Sjdp// ----------------------------------------------------------------------------
47333965Sjdp
47438889Sjdpvoid CompiledStaticCall::set_to_clean() {
47538889Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
47633965Sjdp  // Reset call site
47733965Sjdp  MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
47833965Sjdp#ifdef ASSERT
47933965Sjdp  CodeBlob* cb = CodeCache::find_blob_unsafe(this);
48033965Sjdp  assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
48133965Sjdp#endif
48233965Sjdp  set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
48333965Sjdp
48433965Sjdp  // Do not reset stub here:  It is too expensive to call find_stub.
48533965Sjdp  // Instead, rely on caller (nmethod::clear_inline_caches) to clear
48638889Sjdp  // both the call and its stub.
48738889Sjdp}
48838889Sjdp
48938889Sjdp
49038889Sjdpbool CompiledStaticCall::is_clean() const {
49138889Sjdp  return destination() == SharedRuntime::get_resolve_static_call_stub();
49233965Sjdp}
49333965Sjdp
49433965Sjdpbool CompiledStaticCall::is_call_to_compiled() const {
49533965Sjdp  return CodeCache::contains(destination());
49633965Sjdp}
49733965Sjdp
49838889Sjdp
49933965Sjdpbool CompiledStaticCall::is_call_to_interpreted() const {
50033965Sjdp  // It is a call to interpreted, if it calls to a stub. Hence, the destination
50133965Sjdp  // must be in the stub part of the nmethod that contains the call
50233965Sjdp  nmethod* nm = CodeCache::find_nmethod(instruction_address());
50333965Sjdp  return nm->stub_contains(destination());
50433965Sjdp}
50533965Sjdp
50633965Sjdpvoid CompiledStaticCall::set(const StaticCallInfo& info) {
50733965Sjdp  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
50833965Sjdp  MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
50933965Sjdp  // Updating a cache to the wrong entry can cause bugs that are very hard
51033965Sjdp  // to track down - if cache entry gets invalid - we just clean it. In
51133965Sjdp  // this way it is always the same code path that is responsible for
51233965Sjdp  // updating and resolving an inline cache
51333965Sjdp  assert(is_clean(), "do not update a call entry - use clean");
51433965Sjdp
51533965Sjdp  if (info._to_interpreter) {
51638889Sjdp    // Call to interpreted code
51738889Sjdp    set_to_interpreted(info.callee(), info.entry());
51838889Sjdp  } else {
51938889Sjdp    if (TraceICs) {
52038889Sjdp      ResourceMark rm;
52138889Sjdp      tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_compiled " INTPTR_FORMAT,
52238889Sjdp                    instruction_address(),
52338889Sjdp                    info.entry());
52438889Sjdp    }
52538889Sjdp    // Call to compiled code
52638889Sjdp    assert (CodeCache::contains(info.entry()), "wrong entry point");
52738889Sjdp    set_destination_mt_safe(info.entry());
52833965Sjdp  }
52933965Sjdp}
53033965Sjdp
53133965Sjdp
53233965Sjdp// Compute settings for a CompiledStaticCall. Since we might have to set
53338889Sjdp// the stub when calling to the interpreter, we need to return arguments.
53438889Sjdpvoid CompiledStaticCall::compute_entry(methodHandle m, StaticCallInfo& info) {
53538889Sjdp  nmethod* m_code = m->code();
536  info._callee = m;
537  if (m_code != NULL) {
538    info._to_interpreter = false;
539    info._entry  = m_code->verified_entry_point();
540  } else {
541    // Callee is interpreted code.  In any case entering the interpreter
542    // puts a converter-frame on the stack to save arguments.
543    info._to_interpreter = true;
544    info._entry      = m()->get_c2i_entry();
545  }
546}
547
548address CompiledStaticCall::find_stub() {
549  // Find reloc. information containing this call-site
550  RelocIterator iter((nmethod*)NULL, instruction_address());
551  while (iter.next()) {
552    if (iter.addr() == instruction_address()) {
553      switch(iter.type()) {
554        case relocInfo::static_call_type:
555          return iter.static_call_reloc()->static_stub();
556        // We check here for opt_virtual_call_type, since we reuse the code
557        // from the CompiledIC implementation
558        case relocInfo::opt_virtual_call_type:
559          return iter.opt_virtual_call_reloc()->static_stub();
560        case relocInfo::poll_type:
561        case relocInfo::poll_return_type: // A safepoint can't overlap a call.
562        default:
563          ShouldNotReachHere();
564      }
565    }
566  }
567  return NULL;
568}
569
570
571//-----------------------------------------------------------------------------
572// Non-product mode code
573#ifndef PRODUCT
574
575void CompiledIC::verify() {
576  // make sure code pattern is actually a call imm32 instruction
577  _ic_call->verify();
578  if (os::is_MP()) {
579    _ic_call->verify_alignment();
580  }
581  assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted()
582          || is_optimized() || is_megamorphic(), "sanity check");
583}
584
585void CompiledIC::print() {
586  print_compiled_ic();
587  tty->cr();
588}
589
590void CompiledIC::print_compiled_ic() {
591  tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
592             instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination(), is_optimized() ? NULL : cached_value());
593}
594
595void CompiledStaticCall::print() {
596  tty->print("static call at " INTPTR_FORMAT " -> ", instruction_address());
597  if (is_clean()) {
598    tty->print("clean");
599  } else if (is_call_to_compiled()) {
600    tty->print("compiled");
601  } else if (is_call_to_interpreted()) {
602    tty->print("interpreted");
603  }
604  tty->cr();
605}
606
607#endif // !PRODUCT
608