1/*
2 * Copyright (c) 2008, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "asm/assembler.hpp"
27#include "assembler_arm.inline.hpp"
28#include "code/vtableStubs.hpp"
29#include "interp_masm_arm.hpp"
30#include "memory/resourceArea.hpp"
31#include "oops/instanceKlass.hpp"
32#include "oops/klassVtable.hpp"
33#include "runtime/sharedRuntime.hpp"
34#include "vmreg_arm.inline.hpp"
35#ifdef COMPILER2
36#include "opto/runtime.hpp"
37#endif
38
39// machine-dependent part of VtableStubs: create VtableStub of correct size and
40// initialize its code
41
42#define __ masm->
43
44#ifndef PRODUCT
45extern "C" void bad_compiled_vtable_index(JavaThread* thread, oop receiver, int index);
46#endif
47
48VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
49  const int code_length = VtableStub::pd_code_size_limit(true);
50  VtableStub* s = new(code_length) VtableStub(true, vtable_index);
51  // Can be NULL if there is no free space in the code cache.
52  if (s == NULL) {
53    return NULL;
54  }
55
56  ResourceMark rm;
57  CodeBuffer cb(s->entry_point(), code_length);
58  MacroAssembler* masm = new MacroAssembler(&cb);
59
60  assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");
61
62  const Register tmp = Rtemp; // Rtemp OK, should be free at call sites
63
64  address npe_addr = __ pc();
65  __ load_klass(tmp, R0);
66
67  {
68  int entry_offset = in_bytes(Klass::vtable_start_offset()) + vtable_index * vtableEntry::size_in_bytes();
69  int method_offset = vtableEntry::method_offset_in_bytes() + entry_offset;
70
71  assert ((method_offset & (wordSize - 1)) == 0, "offset should be aligned");
72  int offset_mask = AARCH64_ONLY(0xfff << LogBytesPerWord) NOT_AARCH64(0xfff);
73  if (method_offset & ~offset_mask) {
74    __ add(tmp, tmp, method_offset & ~offset_mask);
75  }
76  __ ldr(Rmethod, Address(tmp, method_offset & offset_mask));
77  }
78
79  address ame_addr = __ pc();
80#ifdef AARCH64
81  __ ldr(tmp, Address(Rmethod, Method::from_compiled_offset()));
82  __ br(tmp);
83#else
84  __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));
85#endif // AARCH64
86
87  masm->flush();
88
89  if (PrintMiscellaneous && (WizardMode || Verbose)) {
90    tty->print_cr("vtable #%d at " PTR_FORMAT "[%d] left over: %d",
91                  vtable_index, p2i(s->entry_point()),
92                  (int)(s->code_end() - s->entry_point()),
93                  (int)(s->code_end() - __ pc()));
94  }
95  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
96  // FIXME ARM: need correct 'slop' - below is x86 code
97  // shut the door on sizing bugs
98  //int slop = 8;  // 32-bit offset is this much larger than a 13-bit one
99  //assert(vtable_index > 10 || __ pc() + slop <= s->code_end(), "room for 32-bit offset");
100
101  s->set_exception_points(npe_addr, ame_addr);
102  return s;
103}
104
105VtableStub* VtableStubs::create_itable_stub(int itable_index) {
106  const int code_length = VtableStub::pd_code_size_limit(false);
107  VtableStub* s = new(code_length) VtableStub(false, itable_index);
108  // Can be NULL if there is no free space in the code cache.
109  if (s == NULL) {
110    return NULL;
111  }
112
113  ResourceMark rm;
114  CodeBuffer cb(s->entry_point(), code_length);
115  MacroAssembler* masm = new MacroAssembler(&cb);
116
117  assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");
118
119  // R0-R3 / R0-R7 registers hold the arguments and cannot be spoiled
120  const Register Rclass  = AARCH64_ONLY(R9)  NOT_AARCH64(R4);
121  const Register Rlength = AARCH64_ONLY(R10)  NOT_AARCH64(R5);
122  const Register Rscan   = AARCH64_ONLY(R11) NOT_AARCH64(R6);
123  const Register tmp     = Rtemp;
124
125  assert_different_registers(Ricklass, Rclass, Rlength, Rscan, tmp);
126
127  // Calculate the start of itable (itable goes after vtable)
128  const int scale = exact_log2(vtableEntry::size_in_bytes());
129  address npe_addr = __ pc();
130  __ load_klass(Rclass, R0);
131  __ ldr_s32(Rlength, Address(Rclass, Klass::vtable_length_offset()));
132
133  __ add(Rscan, Rclass, in_bytes(Klass::vtable_start_offset()));
134  __ add(Rscan, Rscan, AsmOperand(Rlength, lsl, scale));
135
136  // Search through the itable for an interface equal to incoming Ricklass
137  // itable looks like [intface][offset][intface][offset][intface][offset]
138  const int entry_size = itableOffsetEntry::size() * HeapWordSize;
139  assert(itableOffsetEntry::interface_offset_in_bytes() == 0, "not added for convenience");
140
141  Label loop;
142  __ bind(loop);
143  __ ldr(tmp, Address(Rscan, entry_size, post_indexed));
144#ifdef AARCH64
145  Label found;
146  __ cmp(tmp, Ricklass);
147  __ b(found, eq);
148  __ cbnz(tmp, loop);
149#else
150  __ cmp(tmp, Ricklass);  // set ZF and CF if interface is found
151  __ cmn(tmp, 0, ne);     // check if tmp == 0 and clear CF if it is
152  __ b(loop, ne);
153#endif // AARCH64
154
155  assert(StubRoutines::throw_IncompatibleClassChangeError_entry() != NULL, "Check initialization order");
156#ifdef AARCH64
157  __ jump(StubRoutines::throw_IncompatibleClassChangeError_entry(), relocInfo::runtime_call_type, tmp);
158  __ bind(found);
159#else
160  // CF == 0 means we reached the end of itable without finding icklass
161  __ jump(StubRoutines::throw_IncompatibleClassChangeError_entry(), relocInfo::runtime_call_type, noreg, cc);
162#endif // !AARCH64
163
164  // Interface found at previous position of Rscan, now load the method oop
165  __ ldr_s32(tmp, Address(Rscan, itableOffsetEntry::offset_offset_in_bytes() - entry_size));
166  {
167    const int method_offset = itableMethodEntry::size() * HeapWordSize * itable_index +
168      itableMethodEntry::method_offset_in_bytes();
169    __ add_slow(Rmethod, Rclass, method_offset);
170  }
171  __ ldr(Rmethod, Address(Rmethod, tmp));
172
173  address ame_addr = __ pc();
174
175#ifdef AARCH64
176  __ ldr(tmp, Address(Rmethod, Method::from_compiled_offset()));
177  __ br(tmp);
178#else
179  __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));
180#endif // AARCH64
181
182  masm->flush();
183
184  if (PrintMiscellaneous && (WizardMode || Verbose)) {
185    tty->print_cr("itable #%d at " PTR_FORMAT "[%d] left over: %d",
186                  itable_index, p2i(s->entry_point()),
187                  (int)(s->code_end() - s->entry_point()),
188                  (int)(s->code_end() - __ pc()));
189  }
190  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
191  // FIXME ARM: need correct 'slop' - below is x86 code
192  // shut the door on sizing bugs
193  //int slop = 8;  // 32-bit offset is this much larger than a 13-bit one
194  //assert(itable_index > 10 || __ pc() + slop <= s->code_end(), "room for 32-bit offset");
195
196  s->set_exception_points(npe_addr, ame_addr);
197  return s;
198}
199
200int VtableStub::pd_code_size_limit(bool is_vtable_stub) {
201  int instr_count;
202
203  if (is_vtable_stub) {
204    // vtable stub size
205    instr_count = NOT_AARCH64(4) AARCH64_ONLY(5);
206  } else {
207    // itable stub size
208    instr_count = NOT_AARCH64(20) AARCH64_ONLY(20);
209  }
210
211#ifdef AARCH64
212  if (UseCompressedClassPointers) {
213    instr_count += MacroAssembler::instr_count_for_decode_klass_not_null();
214  }
215#endif // AARCH64
216
217  return instr_count * Assembler::InstructionSize;
218}
219
220int VtableStub::pd_code_alignment() {
221  return 8;
222}
223