vtableStubs_sparc.cpp revision 196:d1605aabd0a1
1/*
2 * Copyright 1997-2008 Sun Microsystems, Inc.  All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
24
25#include "incls/_precompiled.incl"
26#include "incls/_vtableStubs_sparc.cpp.incl"
27
28// machine-dependent part of VtableStubs: create vtableStub of correct size and
29// initialize its code
30
31#define __ masm->
32
33
34#ifndef PRODUCT
35extern "C" void bad_compiled_vtable_index(JavaThread* thread, oopDesc* receiver, int index);
36#endif
37
38
39// Used by compiler only; may use only caller saved, non-argument registers
40// NOTE:  %%%% if any change is made to this stub make sure that the function
41//             pd_code_size_limit is changed to ensure the correct size for VtableStub
42VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
43  const int sparc_code_length = VtableStub::pd_code_size_limit(true);
44  VtableStub* s = new(sparc_code_length) VtableStub(true, vtable_index);
45  ResourceMark rm;
46  CodeBuffer cb(s->entry_point(), sparc_code_length);
47  MacroAssembler* masm = new MacroAssembler(&cb);
48
49#ifndef PRODUCT
50  if (CountCompiledCalls) {
51    Address ctr(G5, SharedRuntime::nof_megamorphic_calls_addr());
52    __ sethi(ctr);
53    __ ld(ctr, G3_scratch);
54    __ inc(G3_scratch);
55    __ st(G3_scratch, ctr);
56  }
57#endif /* PRODUCT */
58
59  assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0");
60
61  // get receiver klass
62  address npe_addr = __ pc();
63  __ load_klass(O0, G3_scratch);
64
65  // set methodOop (in case of interpreted method), and destination address
66  int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
67#ifndef PRODUCT
68  if (DebugVtables) {
69    Label L;
70    // check offset vs vtable length
71    __ ld(G3_scratch, instanceKlass::vtable_length_offset()*wordSize, G5);
72    __ cmp(G5, vtable_index*vtableEntry::size());
73    __ br(Assembler::greaterUnsigned, false, Assembler::pt, L);
74    __ delayed()->nop();
75    __ set(vtable_index, O2);
76    __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), O0, O2);
77    __ bind(L);
78  }
79#endif
80  int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
81  if( __ is_simm13(v_off) ) {
82    __ ld_ptr(G3, v_off, G5_method);
83  } else {
84    __ set(v_off,G5);
85    __ ld_ptr(G3, G5, G5_method);
86  }
87
88#ifndef PRODUCT
89  if (DebugVtables) {
90    Label L;
91    __ br_notnull(G5_method, false, Assembler::pt, L);
92    __ delayed()->nop();
93    __ stop("Vtable entry is ZERO");
94    __ bind(L);
95  }
96#endif
97
98  address ame_addr = __ pc();  // if the vtable entry is null, the method is abstract
99                               // NOTE: for vtable dispatches, the vtable entry will never be null.
100
101  __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch);
102
103  // jump to target (either compiled code or c2iadapter)
104  __ JMP(G3_scratch, 0);
105  // load methodOop (in case we call c2iadapter)
106  __ delayed()->nop();
107
108  masm->flush();
109  s->set_exception_points(npe_addr, ame_addr);
110  return s;
111}
112
113
114// NOTE:  %%%% if any change is made to this stub make sure that the function
115//             pd_code_size_limit is changed to ensure the correct size for VtableStub
116VtableStub* VtableStubs::create_itable_stub(int vtable_index) {
117  const int sparc_code_length = VtableStub::pd_code_size_limit(false);
118  VtableStub* s = new(sparc_code_length) VtableStub(false, vtable_index);
119  ResourceMark rm;
120  CodeBuffer cb(s->entry_point(), sparc_code_length);
121  MacroAssembler* masm = new MacroAssembler(&cb);
122
123  Register G3_klassOop = G3_scratch;
124  Register G5_interface = G5;  // Passed in as an argument
125  Label search;
126
127  // Entry arguments:
128  //  G5_interface: Interface
129  //  O0:           Receiver
130  assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0");
131
132  // get receiver klass (also an implicit null-check)
133  address npe_addr = __ pc();
134  __ load_klass(O0, G3_klassOop);
135  __ verify_oop(G3_klassOop);
136
137  // Push a new window to get some temp registers.  This chops the head of all
138  // my 64-bit %o registers in the LION build, but this is OK because no longs
139  // are passed in the %o registers.  Instead, longs are passed in G1 and G4
140  // and so those registers are not available here.
141  __ save(SP,-frame::register_save_words*wordSize,SP);
142  Register I0_receiver = I0;    // Location of receiver after save
143
144#ifndef PRODUCT
145  if (CountCompiledCalls) {
146    Address ctr(L0, SharedRuntime::nof_megamorphic_calls_addr());
147    __ sethi(ctr);
148    __ ld(ctr, L1);
149    __ inc(L1);
150    __ st(L1, ctr);
151  }
152#endif /* PRODUCT */
153
154  // load start of itable entries into L0 register
155  const int base = instanceKlass::vtable_start_offset() * wordSize;
156  __ ld(Address(G3_klassOop, 0, instanceKlass::vtable_length_offset() * wordSize), L0);
157
158  // %%% Could store the aligned, prescaled offset in the klassoop.
159  __ sll(L0, exact_log2(vtableEntry::size() * wordSize), L0);
160  // see code for instanceKlass::start_of_itable!
161  const int vtable_alignment = align_object_offset(1);
162  assert(vtable_alignment == 1 || vtable_alignment == 2, "");
163  const int odd_bit = vtableEntry::size() * wordSize;
164  if (vtable_alignment == 2) {
165    __ and3(L0, odd_bit, L1);   // isolate the odd bit
166  }
167  __ add(G3_klassOop, L0, L0);
168  if (vtable_alignment == 2) {
169    __ add(L0, L1, L0);         // double the odd bit, to align up
170  }
171
172  // Loop over all itable entries until desired interfaceOop (G5_interface) found
173  __ bind(search);
174
175  // %%%% Could load both offset and interface in one ldx, if they were
176  // in the opposite order.  This would save a load.
177  __ ld_ptr(L0, base + itableOffsetEntry::interface_offset_in_bytes(), L1);
178
179  // If the entry is NULL then we've reached the end of the table
180  // without finding the expected interface, so throw an exception
181  Label throw_icce;
182  __ bpr(Assembler::rc_z, false, Assembler::pn, L1, throw_icce);
183  __ delayed()->cmp(G5_interface, L1);
184  __ brx(Assembler::notEqual, true, Assembler::pn, search);
185  __ delayed()->add(L0, itableOffsetEntry::size() * wordSize, L0);
186
187  // entry found and L0 points to it, move offset of vtable for interface into L0
188  __ ld(L0, base + itableOffsetEntry::offset_offset_in_bytes(), L0);
189
190  // Compute itableMethodEntry and get methodOop(G5_method) and entrypoint(L0) for compiler
191  const int method_offset = (itableMethodEntry::size() * wordSize * vtable_index) + itableMethodEntry::method_offset_in_bytes();
192  __ add(G3_klassOop, L0, L1);
193  __ ld_ptr(L1, method_offset, G5_method);
194
195#ifndef PRODUCT
196  if (DebugVtables) {
197    Label L01;
198    __ ld_ptr(L1, method_offset, G5_method);
199    __ bpr(Assembler::rc_nz, false, Assembler::pt, G5_method, L01);
200    __ delayed()->nop();
201    __ stop("methodOop is null");
202    __ bind(L01);
203    __ verify_oop(G5_method);
204  }
205#endif
206
207  // If the following load is through a NULL pointer, we'll take an OS
208  // exception that should translate into an AbstractMethodError.  We need the
209  // window count to be correct at that time.
210  __ restore();                 // Restore registers BEFORE the AME point
211
212  address ame_addr = __ pc();   // if the vtable entry is null, the method is abstract
213  __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch);
214
215  // G5_method:  methodOop
216  // O0:         Receiver
217  // G3_scratch: entry point
218  __ JMP(G3_scratch, 0);
219  __ delayed()->nop();
220
221  __ bind(throw_icce);
222  Address icce(G3_scratch, StubRoutines::throw_IncompatibleClassChangeError_entry());
223  __ jump_to(icce, 0);
224  __ delayed()->restore();
225
226  masm->flush();
227
228  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
229
230  s->set_exception_points(npe_addr, ame_addr);
231  return s;
232}
233
234
235int VtableStub::pd_code_size_limit(bool is_vtable_stub) {
236  if (TraceJumps || DebugVtables || CountCompiledCalls || VerifyOops) return 1000;
237  else {
238    const int slop = 2*BytesPerInstWord; // sethi;add  (needed for long offsets)
239    if (is_vtable_stub) {
240      // ld;ld;ld,jmp,nop
241      const int basic = 5*BytesPerInstWord +
242                        // shift;add for load_klass
243                        (UseCompressedOops ? 2*BytesPerInstWord : 0);
244      return basic + slop;
245    } else {
246      // save, ld, ld, sll, and, add, add, ld, cmp, br, add, ld, add, ld, ld, jmp, restore, sethi, jmpl, restore
247      const int basic = (20 LP64_ONLY(+ 6)) * BytesPerInstWord +
248                        // shift;add for load_klass
249                        (UseCompressedOops ? 2*BytesPerInstWord : 0);
250      return (basic + slop);
251    }
252  }
253}
254
255
256int VtableStub::pd_code_alignment() {
257  // UltraSPARC cache line size is 8 instructions:
258  const unsigned int icache_line_size = 32;
259  return icache_line_size;
260}
261