compiledIC.cpp revision 2273:1d1603768966
14910Swollman/*
2139365Srik * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
34910Swollman * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4139823Simp *
5139823Simp * This code is free software; you can redistribute it and/or modify it
6139365Srik * under the terms of the GNU General Public License version 2 only, as
725944Sjoerg * published by the Free Software Foundation.
84910Swollman *
925944Sjoerg * This code is distributed in the hope that it will be useful, but WITHOUT
1088534Sjoerg * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
1125944Sjoerg * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
124910Swollman * version 2 for more details (a copy is included in the LICENSE file that
134910Swollman * accompanied this code).
144910Swollman *
154910Swollman * You should have received a copy of the GNU General Public License version
164910Swollman * 2 along with this work; if not, write to the Free Software Foundation,
174910Swollman * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
184910Swollman *
1930300Sjoerg * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
2016288Sgpalmer * or visit www.oracle.com if you need additional information or have any
2150477Speter * questions.
224910Swollman *
234910Swollman */
2440008Sjoerg
2540008Sjoerg#include "precompiled.hpp"
2632350Seivind#include "classfile/systemDictionary.hpp"
2754263Sshin#include "code/codeCache.hpp"
2831742Seivind#include "code/compiledIC.hpp"
2931742Seivind#include "code/icBuffer.hpp"
304952Sbde#include "code/nmethod.hpp"
314952Sbde#include "code/vtableStubs.hpp"
3270199Sjhay#include "interpreter/interpreter.hpp"
3324204Sbde#include "interpreter/linkResolver.hpp"
344910Swollman#include "memory/oopFactory.hpp"
3525706Sjoerg#include "oops/methodOop.hpp"
3659604Sobrien#include "oops/oop.inline.hpp"
3729024Sbde#include "oops/symbol.hpp"
384910Swollman#include "runtime/icache.hpp"
3940008Sjoerg#include "runtime/sharedRuntime.hpp"
4030300Sjoerg#include "runtime/stubRoutines.hpp"
414910Swollman#include "utilities/events.hpp"
424910Swollman
434910Swollman
444910Swollman// Every time a compiled IC is changed or its type is being accessed,
4542104Sphk// either the CompiledIC_lock must be set or we must be at a safe point.
46196019Srwatson
4788534Sjoerg//-----------------------------------------------------------------------------
4888534Sjoerg// Low-level access to an inline cache. Private, since they might not be
4988534Sjoerg// MT-safe to use.
5088534Sjoerg
514910Swollmanvoid CompiledIC::set_cached_oop(oop cache) {
5230300Sjoerg  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
5330300Sjoerg  assert (!is_optimized(), "an optimized virtual call does not have a cached oop");
544910Swollman  assert (cache == NULL || cache != badOop, "invalid oop");
5588705Sjoerg
5688705Sjoerg  if (TraceCompiledIC) {
574910Swollman    tty->print("  ");
584910Swollman    print_compiled_ic();
594910Swollman    tty->print_cr(" changing oop to " INTPTR_FORMAT, (address)cache);
604910Swollman  }
61148385Sume
62148385Sume  if (cache == NULL)  cache = (oop)Universe::non_oop_word();
63148385Sume
64148385Sume  *_oop_addr = cache;
65182121Simp  // fix up the relocations
6688705Sjoerg  RelocIterator iter = _oops;
6711819Sjulian  while (iter.next()) {
6811819Sjulian    if (iter.type() == relocInfo::oop_type) {
6911819Sjulian      oop_Relocation* r = iter.oop_reloc();
7011819Sjulian      if (r->oop_addr() == _oop_addr)
7111819Sjulian        r->fix_oop_relocation();
724910Swollman    }
734910Swollman  }
74182121Simp  return;
754910Swollman}
764910Swollman
7725944Sjoerg
7825944Sjoergoop CompiledIC::cached_oop() const {
7925944Sjoerg  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
8025955Sjoerg  assert (!is_optimized(), "an optimized virtual call does not have a cached oop");
8125944Sjoerg
8225944Sjoerg  if (!is_in_transition_state()) {
8325944Sjoerg    oop data = *_oop_addr;
8425955Sjoerg    // If we let the oop value here be initialized to zero...
8525955Sjoerg    assert(data != NULL || Universe::non_oop_word() == NULL,
8625955Sjoerg           "no raw nulls in CompiledIC oops, because of patching races");
8730300Sjoerg    return (data == (oop)Universe::non_oop_word()) ? (oop)NULL : data;
8830300Sjoerg  } else {
8930300Sjoerg    return InlineCacheBuffer::cached_oop_for((CompiledIC *)this);
9030300Sjoerg  }
9130300Sjoerg}
9230300Sjoerg
9330300Sjoerg
9430300Sjoergvoid CompiledIC::set_ic_destination(address entry_point) {
9530300Sjoerg  assert(entry_point != NULL, "must set legal entry point");
9625944Sjoerg  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
9725944Sjoerg  if (TraceCompiledIC) {
9825955Sjoerg    tty->print("  ");
9925955Sjoerg    print_compiled_ic();
10045152Sphk    tty->print_cr(" changing destination to " INTPTR_FORMAT, entry_point);
10125944Sjoerg  }
10230300Sjoerg  MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
10330300Sjoerg#ifdef ASSERT
10430300Sjoerg  CodeBlob* cb = CodeCache::find_blob_unsafe(_ic_call);
10530300Sjoerg  assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
10630300Sjoerg#endif
10730300Sjoerg  _ic_call->set_destination_mt_safe(entry_point);
10888534Sjoerg}
10988534Sjoerg
11078064Sume
11130300Sjoergaddress CompiledIC::ic_destination() const {
11230300Sjoerg assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
11330300Sjoerg if (!is_in_transition_state()) {
11430300Sjoerg   return _ic_call->destination();
11578064Sume } else {
1164910Swollman   return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
11725944Sjoerg }
11825944Sjoerg}
11925944Sjoerg
12025944Sjoerg
12125944Sjoergbool CompiledIC::is_in_transition_state() const {
12225944Sjoerg  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
12325944Sjoerg  return InlineCacheBuffer::contains(_ic_call->destination());
12425944Sjoerg}
12525944Sjoerg
12625944Sjoerg
12725944Sjoerg// Returns native address of 'call' instruction in inline-cache. Used by
1284910Swollman// the InlineCacheBuffer when it needs to find the stub.
12930300Sjoergaddress CompiledIC::stub_address() const {
13030300Sjoerg  assert(is_in_transition_state(), "should only be called when we are in a transition state");
13130300Sjoerg  return _ic_call->destination();
13230300Sjoerg}
13330300Sjoerg
13430300Sjoerg
13530300Sjoerg//-----------------------------------------------------------------------------
13630300Sjoerg// High-level access to an inline cache. Guaranteed to be MT-safe.
1374910Swollman
13825944Sjoerg
13925944Sjoergvoid CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
14025944Sjoerg  methodHandle method = call_info->selected_method();
1414910Swollman  bool is_invoke_interface = (bytecode == Bytecodes::_invokeinterface && !call_info->has_vtable_index());
14278064Sume  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
14378064Sume  assert(method->is_oop(), "cannot be NULL and must be oop");
14478064Sume  assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
14588534Sjoerg  assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
14688534Sjoerg
14730300Sjoerg  address entry;
14830300Sjoerg  if (is_invoke_interface) {
14930300Sjoerg    int index = klassItable::compute_itable_index(call_info->resolved_method()());
1504910Swollman    entry = VtableStubs::create_stub(false, index, method());
15130300Sjoerg    assert(entry != NULL, "entry not computed");
15230300Sjoerg    klassOop k = call_info->resolved_method()->method_holder();
15330300Sjoerg    assert(Klass::cast(k)->is_interface(), "sanity check");
15430300Sjoerg    InlineCacheBuffer::create_transition_stub(this, k, entry);
15530300Sjoerg  } else {
15630300Sjoerg    // Can be different than method->vtable_index(), due to package-private etc.
15730300Sjoerg    int vtable_index = call_info->vtable_index();
15830300Sjoerg    entry = VtableStubs::create_stub(true, vtable_index, method());
15930300Sjoerg    InlineCacheBuffer::create_transition_stub(this, method(), entry);
16030300Sjoerg  }
16130300Sjoerg
16230300Sjoerg  if (TraceICs) {
16330300Sjoerg    ResourceMark rm;
16430300Sjoerg    tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
16525944Sjoerg                   instruction_address(), method->print_value_string(), entry);
16625944Sjoerg  }
16725944Sjoerg
16825944Sjoerg  Events::log("compiledIC " INTPTR_FORMAT " --> megamorphic " INTPTR_FORMAT, this, (address)method());
16925944Sjoerg  // We can't check this anymore. With lazy deopt we could have already
17025944Sjoerg  // cleaned this IC entry before we even return. This is possible if
17125944Sjoerg  // we ran out of space in the inline cache buffer trying to do the
17225944Sjoerg  // set_next and we safepointed to free up space. This is a benign
17325944Sjoerg  // race because the IC entry was complete when we safepointed so
17425944Sjoerg  // cleaning it immediately is harmless.
17525944Sjoerg  // assert(is_megamorphic(), "sanity check");
17625944Sjoerg}
177227293Sed
178147256Sbrooks
1794910Swollman// true if destination is megamorphic stub
18011189Sjkhbool CompiledIC::is_megamorphic() const {
18111189Sjkh  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
18211189Sjkh  assert(!is_optimized(), "an optimized call cannot be megamorphic");
183103842Salfred
1844910Swollman  // Cannot rely on cached_oop. It is either an interface or a method.
1854910Swollman  return VtableStubs::is_entry_point(ic_destination());
1864910Swollman}
18711189Sjkh
18811189Sjkhbool CompiledIC::is_call_to_compiled() const {
18911189Sjkh  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
190103842Salfred
1914910Swollman  // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
1924910Swollman  // method is guaranteed to still exist, since we only remove methods after all inline caches
1934910Swollman  // has been cleaned up
19411189Sjkh  CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
19511189Sjkh  bool is_monomorphic = (cb != NULL && cb->is_nmethod());
19611189Sjkh  // Check that the cached_oop is a klass for non-optimized monomorphic calls
19711189Sjkh  // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
19811189Sjkh  // for calling directly to vep without using the inline cache (i.e., cached_oop == NULL)
19911189Sjkh#ifdef ASSERT
200103842Salfred#ifdef TIERED
20188704Sjoerg  CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
2024910Swollman  bool is_c1_method = caller->is_compiled_by_c1();
20325944Sjoerg#else
20425944Sjoerg#ifdef COMPILER1
20525944Sjoerg  bool is_c1_method = true;
20625944Sjoerg#else
20725944Sjoerg  bool is_c1_method = false;
20825944Sjoerg#endif // COMPILER1
20925944Sjoerg#endif // TIERED
21025944Sjoerg  assert( is_c1_method ||
21125944Sjoerg         !is_monomorphic ||
21225944Sjoerg         is_optimized() ||
21325944Sjoerg         (cached_oop() != NULL && cached_oop()->is_klass()), "sanity check");
21425944Sjoerg#endif // ASSERT
21525944Sjoerg  return is_monomorphic;
21625944Sjoerg}
21725944Sjoerg
21825944Sjoerg
21925944Sjoergbool CompiledIC::is_call_to_interpreted() const {
22025944Sjoerg  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
22125944Sjoerg  // Call to interpreter if destination is either calling to a stub (if it
22225944Sjoerg  // is optimized), or calling to an I2C blob
22325944Sjoerg  bool is_call_to_interpreted = false;
22425944Sjoerg  if (!is_optimized()) {
22525944Sjoerg    // must use unsafe because the destination can be a zombie (and we're cleaning)
22625944Sjoerg    // and the print_compiled_ic code wants to know if site (in the non-zombie)
22725944Sjoerg    // is to the interpreter.
22825944Sjoerg    CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
22925944Sjoerg    is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob());
23025944Sjoerg    assert(!is_call_to_interpreted ||  (cached_oop() != NULL && cached_oop()->is_compiledICHolder()), "sanity check");
23125944Sjoerg  } else {
23225944Sjoerg    // Check if we are calling into our own codeblob (i.e., to a stub)
23325944Sjoerg    CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address());
23425944Sjoerg    address dest = ic_destination();
23540008Sjoerg#ifdef ASSERT
23640008Sjoerg    {
23740008Sjoerg      CodeBlob* db = CodeCache::find_blob_unsafe(dest);
238188668Srwatson      assert(!db->is_adapter_blob(), "must use stub!");
239188668Srwatson    }
240188668Srwatson#endif /* ASSERT */
241190818Sed    is_call_to_interpreted = cb->contains(dest);
242138745Srik  }
24388705Sjoerg  return is_call_to_interpreted;
2444910Swollman}
2454910Swollman
2464910Swollman
2474910Swollmanvoid CompiledIC::set_to_clean() {
2484910Swollman  assert(SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->is_locked() , "MT-unsafe call");
24930300Sjoerg  if (TraceInlineCacheClearing || TraceICs) {
25030300Sjoerg    tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", instruction_address());
2514910Swollman    print();
252126910Srwatson  }
2534910Swollman
2544910Swollman  address entry;
2554910Swollman  if (is_optimized()) {
2564910Swollman    entry = SharedRuntime::get_resolve_opt_virtual_call_stub();
25788705Sjoerg  } else {
2584910Swollman    entry = SharedRuntime::get_resolve_virtual_call_stub();
25925944Sjoerg  }
26025944Sjoerg
261147256Sbrooks  // A zombie transition will always be safe, since the oop has already been set to NULL, so
26225944Sjoerg  // we only need to patch the destination
26311189Sjkh  bool safe_transition = is_optimized() || SafepointSynchronize::is_at_safepoint();
26430300Sjoerg
265191148Skmacy  if (safe_transition) {
2664910Swollman    if (!is_optimized()) set_cached_oop(NULL);
26725944Sjoerg    // Kill any leftover stub we might have too
26825944Sjoerg    if (is_in_transition_state()) {
26925944Sjoerg      ICStub* old_stub = ICStub_from_destination_address(stub_address());
27025944Sjoerg      old_stub->clear();
27125944Sjoerg    }
27225944Sjoerg    set_ic_destination(entry);
27325944Sjoerg  } else {
27442104Sphk    // Unsafe transition - create stub.
27525944Sjoerg    InlineCacheBuffer::create_transition_stub(this, NULL, entry);
27625944Sjoerg  }
27730300Sjoerg  // We can't check this anymore. With lazy deopt we could have already
27842104Sphk  // cleaned this IC entry before we even return. This is possible if
27930300Sjoerg  // we ran out of space in the inline cache buffer trying to do the
28025944Sjoerg  // set_next and we safepointed to free up space. This is a benign
28125944Sjoerg  // race because the IC entry was complete when we safepointed so
28225944Sjoerg  // cleaning it immediately is harmless.
28325944Sjoerg  // assert(is_clean(), "sanity check");
28425944Sjoerg}
28525944Sjoerg
28625944Sjoerg
28730300Sjoergbool CompiledIC::is_clean() const {
28830300Sjoerg  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
289138745Srik  bool is_clean = false;
290138745Srik  address dest = ic_destination();
291138745Srik  is_clean = dest == SharedRuntime::get_resolve_opt_virtual_call_stub() ||
29225944Sjoerg             dest == SharedRuntime::get_resolve_virtual_call_stub();
29325944Sjoerg  assert(!is_clean || is_optimized() || cached_oop() == NULL, "sanity check");
29425944Sjoerg  return is_clean;
29525944Sjoerg}
29625944Sjoerg
29725944Sjoerg
29825944Sjoergvoid CompiledIC::set_to_monomorphic(const CompiledICInfo& info) {
29925944Sjoerg  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
30025944Sjoerg  // Updating a cache to the wrong entry can cause bugs that are very hard
30125944Sjoerg  // to track down - if cache entry gets invalid - we just clean it. In
30225944Sjoerg  // this way it is always the same code path that is responsible for
30325944Sjoerg  // updating and resolving an inline cache
30425944Sjoerg  //
30525944Sjoerg  // The above is no longer true. SharedRuntime::fixup_callers_callsite will change optimized
30630300Sjoerg  // callsites. In addition ic_miss code will update a site to monomorphic if it determines
30730300Sjoerg  // that an monomorphic call to the interpreter can now be monomorphic to compiled code.
30825944Sjoerg  //
30925944Sjoerg  // In both of these cases the only thing being modifed is the jump/call target and these
31025944Sjoerg  // transitions are mt_safe
31125944Sjoerg
31225944Sjoerg  Thread *thread = Thread::current();
31325944Sjoerg  if (info._to_interpreter) {
31425944Sjoerg    // Call to interpreter
31525944Sjoerg    if (info.is_optimized() && is_optimized()) {
31625944Sjoerg       assert(is_clean(), "unsafe IC path");
31725944Sjoerg       MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
31825944Sjoerg      // the call analysis (callee structure) specifies that the call is optimized
31925944Sjoerg      // (either because of CHA or the static target is final)
32025944Sjoerg      // At code generation time, this call has been emitted as static call
32125944Sjoerg      // Call via stub
32225944Sjoerg      assert(info.cached_oop().not_null() && info.cached_oop()->is_method(), "sanity check");
32325944Sjoerg      CompiledStaticCall* csc = compiledStaticCall_at(instruction_address());
32478064Sume      methodHandle method (thread, (methodOop)info.cached_oop()());
32578064Sume      csc->set_to_interpreted(method, info.entry());
32678064Sume      if (TraceICs) {
32778064Sume         ResourceMark rm(thread);
32878064Sume         tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter: %s",
32978064Sume           instruction_address(),
33078064Sume           method->print_value_string());
33178064Sume      }
33278064Sume    } else {
33378064Sume      // Call via method-klass-holder
33478064Sume      assert(info.cached_oop().not_null(), "must be set");
33578064Sume      InlineCacheBuffer::create_transition_stub(this, info.cached_oop()(), info.entry());
33678064Sume
33778064Sume      if (TraceICs) {
33878064Sume         ResourceMark rm(thread);
33930300Sjoerg         tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter via mkh", instruction_address());
34030300Sjoerg      }
34130300Sjoerg    }
34230300Sjoerg  } else {
34330300Sjoerg    // Call to compiled code
34430300Sjoerg    bool static_bound = info.is_optimized() || (info.cached_oop().is_null());
34530300Sjoerg#ifdef ASSERT
34630300Sjoerg    CodeBlob* cb = CodeCache::find_blob_unsafe(info.entry());
34730300Sjoerg    assert (cb->is_nmethod(), "must be compiled!");
34830300Sjoerg#endif /* ASSERT */
34930300Sjoerg
35030300Sjoerg    // This is MT safe if we come from a clean-cache and go through a
35130300Sjoerg    // non-verified entry point
35230300Sjoerg    bool safe = SafepointSynchronize::is_at_safepoint() ||
35330300Sjoerg                (!is_in_transition_state() && (info.is_optimized() || static_bound || is_clean()));
35430300Sjoerg
35530300Sjoerg    if (!safe) {
35630300Sjoerg      InlineCacheBuffer::create_transition_stub(this, info.cached_oop()(), info.entry());
35730300Sjoerg    } else {
35830300Sjoerg      set_ic_destination(info.entry());
35925944Sjoerg      if (!is_optimized()) set_cached_oop(info.cached_oop()());
360184682Sbz    }
36130300Sjoerg
36230300Sjoerg    if (TraceICs) {
363184682Sbz      ResourceMark rm(thread);
36478064Sume      assert(info.cached_oop() == NULL || info.cached_oop()()->is_klass(), "must be");
36578064Sume      tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to compiled (rcvr klass) %s: %s",
36678064Sume        instruction_address(),
36725944Sjoerg        ((klassOop)info.cached_oop()())->print_value_string(),
36825944Sjoerg        (safe) ? "" : "via stub");
36925944Sjoerg    }
37030300Sjoerg  }
37138343Sbde  // We can't check this anymore. With lazy deopt we could have already
37230300Sjoerg  // cleaned this IC entry before we even return. This is possible if
37325944Sjoerg  // we ran out of space in the inline cache buffer trying to do the
37430300Sjoerg  // set_next and we safepointed to free up space. This is a benign
37530300Sjoerg  // race because the IC entry was complete when we safepointed so
37630300Sjoerg  // cleaning it immediately is harmless.
37725944Sjoerg  // assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
378184682Sbz}
37925944Sjoerg
380184682Sbz
38178064Sume// is_optimized: Compiler has generated an optimized call (i.e., no inline
38278064Sume// cache) static_bound: The call can be static bound (i.e, no need to use
38378064Sume// inline cache)
38478064Sumevoid CompiledIC::compute_monomorphic_entry(methodHandle method,
38578064Sume                                           KlassHandle receiver_klass,
38678064Sume                                           bool is_optimized,
38778064Sume                                           bool static_bound,
38878064Sume                                           CompiledICInfo& info,
38978064Sume                                           TRAPS) {
39025944Sjoerg  info._is_optimized = is_optimized;
391138745Srik
392138745Srik  nmethod* method_code = method->code();
393138745Srik  address entry = NULL;
39425944Sjoerg  if (method_code != NULL) {
39533181Seivind    // Call to compiled code
39625944Sjoerg    if (static_bound || is_optimized) {
39725944Sjoerg      entry      = method_code->verified_entry_point();
39825944Sjoerg    } else {
39925944Sjoerg      entry      = method_code->entry_point();
40025944Sjoerg    }
40125944Sjoerg  }
40225944Sjoerg  if (entry != NULL) {
40333181Seivind    // Call to compiled code
40488709Sjoerg    info._entry      = entry;
40588709Sjoerg    if (static_bound || is_optimized) {
40688709Sjoerg      info._cached_oop = Handle(THREAD, (oop)NULL);
40788709Sjoerg    } else {
40888709Sjoerg      info._cached_oop = receiver_klass;
40988709Sjoerg    }
41088709Sjoerg    info._to_interpreter = false;
41125944Sjoerg  } else {
41225944Sjoerg    // Note: the following problem exists with Compiler1:
41325944Sjoerg    //   - at compile time we may or may not know if the destination is final
41425944Sjoerg    //   - if we know that the destination is final, we will emit an optimized
41525944Sjoerg    //     virtual call (no inline cache), and need a methodOop to make a call
41625944Sjoerg    //     to the interpreter
41778064Sume    //   - if we do not know if the destination is final, we emit a standard
41878064Sume    //     virtual call, and use CompiledICHolder to call interpreted code
41978064Sume    //     (no static call stub has been generated)
42078064Sume    //     However in that case we will now notice it is static_bound
42178064Sume    //     and convert the call into what looks to be an optimized
42278064Sume    //     virtual call. This causes problems in verifying the IC because
42378064Sume    //     it look vanilla but is optimized. Code in is_call_to_interpreted
42478064Sume    //     is aware of this and weakens its asserts.
42578064Sume
42678064Sume    info._to_interpreter = true;
42778064Sume    // static_bound should imply is_optimized -- otherwise we have a
42878064Sume    // performance bug (statically-bindable method is called via
42978064Sume    // dynamically-dispatched call note: the reverse implication isn't
43078064Sume    // necessarily true -- the call may have been optimized based on compiler
43133181Seivind    // analysis (static_bound is only based on "final" etc.)
43230300Sjoerg#ifdef COMPILER2
43330300Sjoerg#ifdef TIERED
43430300Sjoerg#if defined(ASSERT)
43530300Sjoerg    // can't check the assert because we don't have the CompiledIC with which to
43630300Sjoerg    // find the address if the call instruction.
43730300Sjoerg    //
43830300Sjoerg    // CodeBlob* cb = find_blob_unsafe(instruction_address());
43933181Seivind    // assert(cb->is_compiled_by_c1() || !static_bound || is_optimized, "static_bound should imply is_optimized");
44030300Sjoerg#endif // ASSERT
44130300Sjoerg#else
44230300Sjoerg    assert(!static_bound || is_optimized, "static_bound should imply is_optimized");
44330300Sjoerg#endif // TIERED
44430300Sjoerg#endif // COMPILER2
44530300Sjoerg    if (is_optimized) {
44630300Sjoerg      // Use stub entry
44733181Seivind      info._entry      = method()->get_c2i_entry();
44825944Sjoerg      info._cached_oop = method;
44925944Sjoerg    } else {
45078064Sume      // Use mkh entry
45130300Sjoerg      oop holder = oopFactory::new_compiledICHolder(method, receiver_klass, CHECK);
45230300Sjoerg      info._cached_oop = Handle(THREAD, holder);
45325944Sjoerg      info._entry      = method()->get_c2i_unverified_entry();
45425944Sjoerg    }
455147256Sbrooks  }
456147256Sbrooks}
457147256Sbrooks
458147256Sbrooks
459147256Sbrooksinline static RelocIterator parse_ic(nmethod* nm, address ic_call, oop* &_oop_addr, bool *is_optimized) {
460147256Sbrooks   address  first_oop = NULL;
461147256Sbrooks   // Mergers please note: Sun SC5.x CC insists on an lvalue for a reference parameter.
462147256Sbrooks   nmethod* tmp_nm = nm;
463147256Sbrooks   return virtual_call_Relocation::parse_ic(tmp_nm, ic_call, first_oop, _oop_addr, is_optimized);
464147256Sbrooks}
465147256Sbrooks
466147256SbrooksCompiledIC::CompiledIC(NativeCall* ic_call)
467147256Sbrooks  : _ic_call(ic_call),
468147256Sbrooks    _oops(parse_ic(NULL, ic_call->instruction_address(), _oop_addr, &_is_optimized))
469147256Sbrooks{
470147256Sbrooks}
471147256Sbrooks
472147256Sbrooks
47370199SjhayCompiledIC::CompiledIC(Relocation* ic_reloc)
47470199Sjhay  : _ic_call(nativeCall_at(ic_reloc->addr())),
47570199Sjhay    _oops(parse_ic(ic_reloc->code(), ic_reloc->addr(), _oop_addr, &_is_optimized))
47670199Sjhay{
47770199Sjhay  assert(ic_reloc->type() == relocInfo::virtual_call_type ||
478147256Sbrooks         ic_reloc->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info");
479147256Sbrooks}
480147256Sbrooks
481147256Sbrooks
482147256Sbrooks// ----------------------------------------------------------------------------
483147256Sbrooks
484147256Sbrooksvoid CompiledStaticCall::set_to_clean() {
48570199Sjhay  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
48670199Sjhay  // Reset call site
487147256Sbrooks  MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
48870199Sjhay#ifdef ASSERT
48970199Sjhay  CodeBlob* cb = CodeCache::find_blob_unsafe(this);
490132199Sphk  assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
49170199Sjhay#endif
49270199Sjhay  set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
49370199Sjhay
49470199Sjhay  // Do not reset stub here:  It is too expensive to call find_stub.
49570199Sjhay  // Instead, rely on caller (nmethod::clear_inline_caches) to clear
49670199Sjhay  // both the call and its stub.
497241394Skevlo}
49870199Sjhay
49970199Sjhay
50070199Sjhaybool CompiledStaticCall::is_clean() const {
50125944Sjoerg  return destination() == SharedRuntime::get_resolve_static_call_stub();
50270199Sjhay}
50325944Sjoerg
5044910Swollmanbool CompiledStaticCall::is_call_to_compiled() const {
5054910Swollman  return CodeCache::contains(destination());
5064910Swollman}
5074910Swollman
5084910Swollman
50925706Sjoergbool CompiledStaticCall::is_call_to_interpreted() const {
51025706Sjoerg  // It is a call to interpreted, if it calls to a stub. Hence, the destination
5114910Swollman  // must be in the stub part of the nmethod that contains the call
5124910Swollman  nmethod* nm = CodeCache::find_nmethod(instruction_address());
513111888Sjlemon  return nm->stub_contains(destination());
514147256Sbrooks}
515184682Sbz
516184682Sbz
517184682Sbzvoid CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
51888700Sjoerg  address stub=find_stub();
519184682Sbz  assert(stub!=NULL, "stub not found");
5204910Swollman
521138745Srik  if (TraceICs) {
522138745Srik    ResourceMark rm;
523138745Srik    tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
5244910Swollman                  instruction_address(),
5254910Swollman                  callee->name_and_sig_as_C_string());
5264910Swollman  }
5274910Swollman
5284910Swollman  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);   // creation also verifies the object
5294910Swollman  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
53025944Sjoerg
53125706Sjoerg  assert(method_holder->data()    == 0           || method_holder->data()    == (intptr_t)callee(), "a) MT-unsafe modification of inline cache");
53240008Sjoerg  assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache");
53340008Sjoerg
53425944Sjoerg  // Update stub
53588700Sjoerg  method_holder->set_data((intptr_t)callee());
536138745Srik  jump->set_jump_destination(entry);
53788700Sjoerg
53825944Sjoerg  // Update jump to call
53925944Sjoerg  set_destination_mt_safe(stub);
5404910Swollman}
5414910Swollman
5424910Swollman
543139365Srikvoid CompiledStaticCall::set(const StaticCallInfo& info) {
544139365Srik  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
545139365Srik  MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
546139365Srik  // Updating a cache to the wrong entry can cause bugs that are very hard
547139365Srik  // to track down - if cache entry gets invalid - we just clean it. In
548139365Srik  // this way it is always the same code path that is responsible for
5494910Swollman  // updating and resolving an inline cache
5504910Swollman  assert(is_clean(), "do not update a call entry - use clean");
5514910Swollman
5524910Swollman  if (info._to_interpreter) {
5534910Swollman    // Call to interpreted code
5544910Swollman    set_to_interpreted(info.callee(), info.entry());
5554910Swollman  } else {
5564910Swollman    if (TraceICs) {
55745152Sphk      ResourceMark rm;
55825944Sjoerg      tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_compiled " INTPTR_FORMAT,
55925706Sjoerg                    instruction_address(),
56040008Sjoerg                    info.entry());
56125706Sjoerg    }
56240008Sjoerg    // Call to compiled code
56325706Sjoerg    assert (CodeCache::contains(info.entry()), "wrong entry point");
56411189Sjkh    set_destination_mt_safe(info.entry());
56511189Sjkh  }
5664910Swollman}
5674910Swollman
56825944Sjoerg
56925706Sjoerg// Compute settings for a CompiledStaticCall. Since we might have to set
57044145Sphk// the stub when calling to the interpreter, we need to return arguments.
57125706Sjoergvoid CompiledStaticCall::compute_entry(methodHandle m, StaticCallInfo& info) {
57240008Sjoerg  nmethod* m_code = m->code();
57325706Sjoerg  info._callee = m;
57444145Sphk  if (m_code != NULL) {
57544145Sphk    info._to_interpreter = false;
57678064Sume    info._entry  = m_code->verified_entry_point();
57744145Sphk  } else {
5784910Swollman    // Callee is interpreted code.  In any case entering the interpreter
5794910Swollman    // puts a converter-frame on the stack to save arguments.
5804910Swollman    info._to_interpreter = true;
58130300Sjoerg    info._entry      = m()->get_c2i_entry();
5824910Swollman  }
583138745Srik}
5844910Swollman
58530300Sjoerg
58630300Sjoergvoid CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
58730300Sjoerg  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
58830300Sjoerg  // Reset stub
589138745Srik  address stub = static_stub->addr();
59030300Sjoerg  assert(stub!=NULL, "stub not found");
59130300Sjoerg  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);   // creation also verifies the object
59230300Sjoerg  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
59330300Sjoerg  method_holder->set_data(0);
59430300Sjoerg  jump->set_jump_destination((address)-1);
595138745Srik}
59630300Sjoerg
5974910Swollman
5984910Swollmanaddress CompiledStaticCall::find_stub() {
59925944Sjoerg  // Find reloc. information containing this call-site
60030300Sjoerg  RelocIterator iter((nmethod*)NULL, instruction_address());
6014910Swollman  while (iter.next()) {
602138745Srik    if (iter.addr() == instruction_address()) {
6034910Swollman      switch(iter.type()) {
6044910Swollman        case relocInfo::static_call_type:
60525944Sjoerg          return iter.static_call_reloc()->static_stub();
606111888Sjlemon        // We check here for opt_virtual_call_type, since we reuse the code
6074910Swollman        // from the CompiledIC implementation
60888577Sjoerg        case relocInfo::opt_virtual_call_type:
6094910Swollman          return iter.opt_virtual_call_reloc()->static_stub();
61088534Sjoerg        case relocInfo::poll_type:
61188534Sjoerg        case relocInfo::poll_return_type: // A safepoint can't overlap a call.
61288700Sjoerg        default:
61388700Sjoerg          ShouldNotReachHere();
61488700Sjoerg      }
61588700Sjoerg    }
61688700Sjoerg  }
61788700Sjoerg  return NULL;
61888700Sjoerg}
61988700Sjoerg
62088700Sjoerg
62188700Sjoerg//-----------------------------------------------------------------------------
62288534Sjoerg// Non-product mode code
62388700Sjoerg#ifndef PRODUCT
62488700Sjoerg
62588700Sjoergvoid CompiledIC::verify() {
62688700Sjoerg  // make sure code pattern is actually a call imm32 instruction
62788700Sjoerg  _ic_call->verify();
62888700Sjoerg  if (os::is_MP()) {
62988700Sjoerg    _ic_call->verify_alignment();
63088700Sjoerg  }
63188700Sjoerg  assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted()
632243882Sglebius          || is_optimized() || is_megamorphic(), "sanity check");
633138745Srik}
634138745Srik
63588700Sjoerg
636138745Srikvoid CompiledIC::print() {
63788700Sjoerg  print_compiled_ic();
638111888Sjlemon  tty->cr();
63988534Sjoerg}
64088599Sjoerg
64188534Sjoerg
64288534Sjoergvoid CompiledIC::print_compiled_ic() {
64388534Sjoerg  tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT,
64488700Sjoerg             instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination());
64588700Sjoerg}
64688700Sjoerg
64788700Sjoerg
64888700Sjoergvoid CompiledStaticCall::print() {
64988700Sjoerg  tty->print("static call at " INTPTR_FORMAT " -> ", instruction_address());
65088700Sjoerg  if (is_clean()) {
65188700Sjoerg    tty->print("clean");
65288700Sjoerg  } else if (is_call_to_compiled()) {
65388534Sjoerg    tty->print("compiled");
65488700Sjoerg  } else if (is_call_to_interpreted()) {
655111888Sjlemon    tty->print("interpreted");
65688534Sjoerg  }
65788599Sjoerg  tty->cr();
65888534Sjoerg}
65978064Sume
66088599Sjoergvoid CompiledStaticCall::verify() {
66188599Sjoerg  // Verify call
66288599Sjoerg  NativeCall::verify();
66388599Sjoerg  if (os::is_MP()) {
66488599Sjoerg    verify_alignment();
665138745Srik  }
66688599Sjoerg
66788599Sjoerg  // Verify stub
66888599Sjoerg  address stub = find_stub();
669111888Sjlemon  assert(stub != NULL, "no stub found for static call");
670111888Sjlemon  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);   // creation also verifies the object
67188599Sjoerg  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
67288599Sjoerg
67388599Sjoerg  // Verify state
67412495Speter  assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
67512495Speter}
67612495Speter
677111888Sjlemon#endif
678111888Sjlemon