1/*
2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "asm/macroAssembler.inline.hpp"
27#include "code/codeCache.hpp"
28#include "code/compiledIC.hpp"
29#include "code/icBuffer.hpp"
30#include "code/nmethod.hpp"
31#include "memory/resourceArea.hpp"
32#include "runtime/mutexLocker.hpp"
33#include "runtime/safepoint.hpp"
34
35// ----------------------------------------------------------------------------
36
37#define __ _masm.
38address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
39  // Stub is fixed up when the corresponding call is converted from
40  // calling compiled code to calling interpreted code.
41  // movq rbx, 0
42  // jmp -5 # to self
43
44  if (mark == NULL) {
45    mark = cbuf.insts_mark();  // Get mark within main instrs section.
46  }
47
48  // Note that the code buffer's insts_mark is always relative to insts.
49  // That's why we must use the macroassembler to generate a stub.
50  MacroAssembler _masm(&cbuf);
51
52  address base = __ start_a_stub(to_interp_stub_size());
53  if (base == NULL) {
54    return NULL;  // CodeBuffer::expand failed.
55  }
56  // Static stub relocation stores the instruction address of the call.
57  __ relocate(static_stub_Relocation::spec(mark, false), Assembler::imm_operand);
58  // Static stub relocation also tags the Method* in the code-stream.
59  __ mov_metadata(rbx, (Metadata*) NULL);  // Method is zapped till fixup time.
60  // This is recognized as unresolved by relocs/nativeinst/ic code.
61  __ jump(RuntimeAddress(__ pc()));
62
63  assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size");
64
65  // Update current stubs pointer and restore insts_end.
66  __ end_a_stub();
67  return base;
68}
69#undef __
70
71int CompiledStaticCall::to_interp_stub_size() {
72  return NOT_LP64(10)    // movl; jmp
73         LP64_ONLY(15);  // movq (1+1+8); jmp (1+4)
74}
75
76// Relocation entries for call stub, compiled java to interpreter.
77int CompiledStaticCall::reloc_to_interp_stub() {
78  return 4; // 3 in emit_to_interp_stub + 1 in emit_call
79}
80
81#if INCLUDE_AOT
82#define __ _masm.
83void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
84  if (!UseAOT) {
85    return;
86  }
87  // Stub is fixed up when the corresponding call is converted from
88  // calling compiled code to calling aot code.
89  // movq rax, imm64_aot_code_address
90  // jmp  rax
91
92  if (mark == NULL) {
93    mark = cbuf.insts_mark();  // Get mark within main instrs section.
94  }
95
96  // Note that the code buffer's insts_mark is always relative to insts.
97  // That's why we must use the macroassembler to generate a stub.
98  MacroAssembler _masm(&cbuf);
99
100  address base =
101  __ start_a_stub(to_aot_stub_size());
102  guarantee(base != NULL, "out of space");
103
104  // Static stub relocation stores the instruction address of the call.
105  __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */), Assembler::imm_operand);
106  // Load destination AOT code address.
107#ifdef _LP64
108  __ mov64(rax, CONST64(0));  // address is zapped till fixup time.
109#else
110  __ movl(rax, 0);  // address is zapped till fixup time.
111#endif
112  // This is recognized as unresolved by relocs/nativeinst/ic code.
113  __ jmp(rax);
114
115  assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size");
116
117  // Update current stubs pointer and restore insts_end.
118  __ end_a_stub();
119}
120#undef __
121
122int CompiledStaticCall::to_aot_stub_size() {
123  if (UseAOT) {
124    return NOT_LP64(7)    // movl; jmp
125           LP64_ONLY(12);  // movq (1+1+8); jmp (2)
126  } else {
127    return 0;
128  }
129}
130
131// Relocation entries for call stub, compiled java to aot.
132int CompiledStaticCall::reloc_to_aot_stub() {
133  if (UseAOT) {
134    return 2; // 1 in emit_to_aot_stub + 1 in emit_call
135  } else {
136    return 0;
137  }
138}
139#endif // INCLUDE_AOT
140
141void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
142  address stub = find_stub(false /* is_aot */);
143  guarantee(stub != NULL, "stub not found");
144
145  if (TraceICs) {
146    ResourceMark rm;
147    tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
148                  p2i(instruction_address()),
149                  callee->name_and_sig_as_C_string());
150  }
151
152  // Creation also verifies the object.
153  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
154  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
155
156#ifdef ASSERT
157  // read the value once
158  intptr_t data = method_holder->data();
159  address destination = jump->jump_destination();
160  assert(data == 0 || data == (intptr_t)callee(),
161         "a) MT-unsafe modification of inline cache");
162  assert(destination == (address)-1 || destination == entry,
163         "b) MT-unsafe modification of inline cache");
164#endif
165
166  // Update stub.
167  method_holder->set_data((intptr_t)callee());
168  jump->set_jump_destination(entry);
169
170  // Update jump to call.
171  set_destination_mt_safe(stub);
172}
173
174void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
175  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
176  // Reset stub.
177  address stub = static_stub->addr();
178  assert(stub != NULL, "stub not found");
179  // Creation also verifies the object.
180  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
181  method_holder->set_data(0);
182  if (!static_stub->is_aot()) {
183    NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
184    jump->set_jump_destination((address)-1);
185  }
186}
187
188
189//-----------------------------------------------------------------------------
190// Non-product mode code
191#ifndef PRODUCT
192
193void CompiledDirectStaticCall::verify() {
194  // Verify call.
195  _call->verify();
196  if (os::is_MP()) {
197    _call->verify_alignment();
198  }
199
200#ifdef ASSERT
201  CodeBlob *cb = CodeCache::find_blob_unsafe((address) _call);
202  assert(cb && !cb->is_aot(), "CompiledDirectStaticCall cannot be used on AOTCompiledMethod");
203#endif
204
205  // Verify stub.
206  address stub = find_stub(false /* is_aot */);
207  assert(stub != NULL, "no stub found for static call");
208  // Creation also verifies the object.
209  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
210  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
211
212  // Verify state.
213  assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
214}
215#endif // !PRODUCT
216