stubRoutines.hpp revision 9801:80f8be586fae
1/*
2 * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_VM_RUNTIME_STUBROUTINES_HPP
26#define SHARE_VM_RUNTIME_STUBROUTINES_HPP
27
28#include "code/codeBlob.hpp"
29#include "memory/allocation.hpp"
30#include "runtime/frame.hpp"
31#include "runtime/mutexLocker.hpp"
32#include "runtime/stubCodeGenerator.hpp"
33#include "utilities/top.hpp"
34
35// StubRoutines provides entry points to assembly routines used by
36// compiled code and the run-time system. Platform-specific entry
37// points are defined in the platform-specific inner class.
38//
39// Class scheme:
40//
41//    platform-independent               platform-dependent
42//
43//    stubRoutines.hpp  <-- included --  stubRoutines_<arch>.hpp
44//           ^                                  ^
45//           |                                  |
46//       implements                         implements
47//           |                                  |
48//           |                                  |
49//    stubRoutines.cpp                   stubRoutines_<arch>.cpp
50//    stubRoutines_<os_family>.cpp       stubGenerator_<arch>.cpp
51//    stubRoutines_<os_arch>.cpp
52//
53// Note 1: The important thing is a clean decoupling between stub
54//         entry points (interfacing to the whole vm; i.e., 1-to-n
55//         relationship) and stub generators (interfacing only to
56//         the entry points implementation; i.e., 1-to-1 relationship).
57//         This significantly simplifies changes in the generator
58//         structure since the rest of the vm is not affected.
59//
60// Note 2: stubGenerator_<arch>.cpp contains a minimal portion of
61//         machine-independent code; namely the generator calls of
62//         the generator functions that are used platform-independently.
63//         However, it comes with the advantage of having a 1-file
64//         implementation of the generator. It should be fairly easy
65//         to change, should it become a problem later.
66//
67// Scheme for adding a new entry point:
68//
69// 1. determine if it's a platform-dependent or independent entry point
70//    a) if platform independent: make subsequent changes in the independent files
71//    b) if platform   dependent: make subsequent changes in the   dependent files
72// 2. add a private instance variable holding the entry point address
73// 3. add a public accessor function to the instance variable
74// 4. implement the corresponding generator function in the platform-dependent
75//    stubGenerator_<arch>.cpp file and call the function in generate_all() of that file
76
77
78class StubRoutines: AllStatic {
79
80 public:
81  enum platform_independent_constants {
82    max_size_of_parameters = 256                           // max. parameter size supported by megamorphic lookups
83  };
84
85  // Dependencies
86  friend class StubGenerator;
87#if defined STUBROUTINES_MD_HPP
88# include STUBROUTINES_MD_HPP
89#elif defined TARGET_ARCH_MODEL_x86_32
90# include "stubRoutines_x86_32.hpp"
91#elif defined TARGET_ARCH_MODEL_x86_64
92# include "stubRoutines_x86_64.hpp"
93#elif defined TARGET_ARCH_MODEL_sparc
94# include "stubRoutines_sparc.hpp"
95#elif defined TARGET_ARCH_MODEL_zero
96# include "stubRoutines_zero.hpp"
97#elif defined TARGET_ARCH_MODEL_ppc_64
98# include "stubRoutines_ppc_64.hpp"
99#elif defined TARGET_ARCH_MODEL_aarch64
100# include "stubRoutines_aarch64.hpp"
101#endif
102
103  static jint    _verify_oop_count;
104  static address _verify_oop_subroutine_entry;
105
106  static address _call_stub_return_address;                // the return PC, when returning to a call stub
107  static address _call_stub_entry;
108  static address _forward_exception_entry;
109  static address _catch_exception_entry;
110  static address _throw_AbstractMethodError_entry;
111  static address _throw_IncompatibleClassChangeError_entry;
112  static address _throw_NullPointerException_at_call_entry;
113  static address _throw_StackOverflowError_entry;
114  static address _throw_delayed_StackOverflowError_entry;
115  static address _handler_for_unsafe_access_entry;
116
117  static address _atomic_xchg_entry;
118  static address _atomic_xchg_ptr_entry;
119  static address _atomic_store_entry;
120  static address _atomic_store_ptr_entry;
121  static address _atomic_cmpxchg_entry;
122  static address _atomic_cmpxchg_ptr_entry;
123  static address _atomic_cmpxchg_byte_entry;
124  static address _atomic_cmpxchg_long_entry;
125  static address _atomic_add_entry;
126  static address _atomic_add_ptr_entry;
127  static address _fence_entry;
128  static address _d2i_wrapper;
129  static address _d2l_wrapper;
130
131  static jint    _fpu_cntrl_wrd_std;
132  static jint    _fpu_cntrl_wrd_24;
133  static jint    _fpu_cntrl_wrd_64;
134  static jint    _fpu_cntrl_wrd_trunc;
135  static jint    _mxcsr_std;
136  static jint    _fpu_subnormal_bias1[3];
137  static jint    _fpu_subnormal_bias2[3];
138
139  static BufferBlob* _code1;                               // code buffer for initial routines
140  static BufferBlob* _code2;                               // code buffer for all other routines
141
142  // Leaf routines which implement arraycopy and their addresses
143  // arraycopy operands aligned on element type boundary
144  static address _jbyte_arraycopy;
145  static address _jshort_arraycopy;
146  static address _jint_arraycopy;
147  static address _jlong_arraycopy;
148  static address _oop_arraycopy, _oop_arraycopy_uninit;
149  static address _jbyte_disjoint_arraycopy;
150  static address _jshort_disjoint_arraycopy;
151  static address _jint_disjoint_arraycopy;
152  static address _jlong_disjoint_arraycopy;
153  static address _oop_disjoint_arraycopy, _oop_disjoint_arraycopy_uninit;
154
155  // arraycopy operands aligned on zero'th element boundary
156  // These are identical to the ones aligned aligned on an
157  // element type boundary, except that they assume that both
158  // source and destination are HeapWord aligned.
159  static address _arrayof_jbyte_arraycopy;
160  static address _arrayof_jshort_arraycopy;
161  static address _arrayof_jint_arraycopy;
162  static address _arrayof_jlong_arraycopy;
163  static address _arrayof_oop_arraycopy, _arrayof_oop_arraycopy_uninit;
164  static address _arrayof_jbyte_disjoint_arraycopy;
165  static address _arrayof_jshort_disjoint_arraycopy;
166  static address _arrayof_jint_disjoint_arraycopy;
167  static address _arrayof_jlong_disjoint_arraycopy;
168  static address _arrayof_oop_disjoint_arraycopy, _arrayof_oop_disjoint_arraycopy_uninit;
169
170  // these are recommended but optional:
171  static address _checkcast_arraycopy, _checkcast_arraycopy_uninit;
172  static address _unsafe_arraycopy;
173  static address _generic_arraycopy;
174
175  static address _jbyte_fill;
176  static address _jshort_fill;
177  static address _jint_fill;
178  static address _arrayof_jbyte_fill;
179  static address _arrayof_jshort_fill;
180  static address _arrayof_jint_fill;
181
182  // zero heap space aligned to jlong (8 bytes)
183  static address _zero_aligned_words;
184
185  static address _aescrypt_encryptBlock;
186  static address _aescrypt_decryptBlock;
187  static address _cipherBlockChaining_encryptAESCrypt;
188  static address _cipherBlockChaining_decryptAESCrypt;
189  static address _ghash_processBlocks;
190
191  static address _sha1_implCompress;
192  static address _sha1_implCompressMB;
193  static address _sha256_implCompress;
194  static address _sha256_implCompressMB;
195  static address _sha512_implCompress;
196  static address _sha512_implCompressMB;
197
198  static address _updateBytesCRC32;
199  static address _crc_table_adr;
200
201  static address _crc32c_table_addr;
202  static address _updateBytesCRC32C;
203  static address _updateBytesAdler32;
204
205  static address _multiplyToLen;
206  static address _squareToLen;
207  static address _mulAdd;
208  static address _montgomeryMultiply;
209  static address _montgomerySquare;
210
211  static address _vectorizedMismatch;
212
213  static address _dexp;
214  static address _dlog;
215
216  // These are versions of the java.lang.Math methods which perform
217  // the same operations as the intrinsic version.  They are used for
218  // constant folding in the compiler to ensure equivalence.  If the
219  // intrinsic version returns the same result as the strict version
220  // then they can be set to the appropriate function from
221  // SharedRuntime.
222  static double (*_intrinsic_log10)(double);
223  static double (*_intrinsic_pow)(double, double);
224  static double (*_intrinsic_sin)(double);
225  static double (*_intrinsic_cos)(double);
226  static double (*_intrinsic_tan)(double);
227
228  // Safefetch stubs.
229  static address _safefetch32_entry;
230  static address _safefetch32_fault_pc;
231  static address _safefetch32_continuation_pc;
232  static address _safefetchN_entry;
233  static address _safefetchN_fault_pc;
234  static address _safefetchN_continuation_pc;
235
236 public:
237  // Initialization/Testing
238  static void    initialize1();                            // must happen before universe::genesis
239  static void    initialize2();                            // must happen after  universe::genesis
240
241  static bool is_stub_code(address addr)                   { return contains(addr); }
242
243  static bool contains(address addr) {
244    return
245      (_code1 != NULL && _code1->blob_contains(addr)) ||
246      (_code2 != NULL && _code2->blob_contains(addr)) ;
247  }
248
249  static CodeBlob* code1() { return _code1; }
250  static CodeBlob* code2() { return _code2; }
251
252  // Debugging
253  static jint    verify_oop_count()                        { return _verify_oop_count; }
254  static jint*   verify_oop_count_addr()                   { return &_verify_oop_count; }
255  // a subroutine for debugging the GC
256  static address verify_oop_subroutine_entry_address()    { return (address)&_verify_oop_subroutine_entry; }
257
258  static address catch_exception_entry()                   { return _catch_exception_entry; }
259
260  // Calls to Java
261  typedef void (*CallStub)(
262    address   link,
263    intptr_t* result,
264    BasicType result_type,
265    Method* method,
266    address   entry_point,
267    intptr_t* parameters,
268    int       size_of_parameters,
269    TRAPS
270  );
271
272  static CallStub call_stub()                              { return CAST_TO_FN_PTR(CallStub, _call_stub_entry); }
273
274  // Exceptions
275  static address forward_exception_entry()                 { return _forward_exception_entry; }
276  // Implicit exceptions
277  static address throw_AbstractMethodError_entry()         { return _throw_AbstractMethodError_entry; }
278  static address throw_IncompatibleClassChangeError_entry(){ return _throw_IncompatibleClassChangeError_entry; }
279  static address throw_NullPointerException_at_call_entry(){ return _throw_NullPointerException_at_call_entry; }
280  static address throw_StackOverflowError_entry()          { return _throw_StackOverflowError_entry; }
281  static address throw_delayed_StackOverflowError_entry()  { return _throw_delayed_StackOverflowError_entry; }
282
283  // Exceptions during unsafe access - should throw Java exception rather
284  // than crash.
285  static address handler_for_unsafe_access()               { return _handler_for_unsafe_access_entry; }
286
287  static address atomic_xchg_entry()                       { return _atomic_xchg_entry; }
288  static address atomic_xchg_ptr_entry()                   { return _atomic_xchg_ptr_entry; }
289  static address atomic_store_entry()                      { return _atomic_store_entry; }
290  static address atomic_store_ptr_entry()                  { return _atomic_store_ptr_entry; }
291  static address atomic_cmpxchg_entry()                    { return _atomic_cmpxchg_entry; }
292  static address atomic_cmpxchg_ptr_entry()                { return _atomic_cmpxchg_ptr_entry; }
293  static address atomic_cmpxchg_byte_entry()               { return _atomic_cmpxchg_byte_entry; }
294  static address atomic_cmpxchg_long_entry()               { return _atomic_cmpxchg_long_entry; }
295  static address atomic_add_entry()                        { return _atomic_add_entry; }
296  static address atomic_add_ptr_entry()                    { return _atomic_add_ptr_entry; }
297  static address fence_entry()                             { return _fence_entry; }
298
299  static address d2i_wrapper()                             { return _d2i_wrapper; }
300  static address d2l_wrapper()                             { return _d2l_wrapper; }
301  static jint    fpu_cntrl_wrd_std()                       { return _fpu_cntrl_wrd_std;   }
302  static address addr_fpu_cntrl_wrd_std()                  { return (address)&_fpu_cntrl_wrd_std;   }
303  static address addr_fpu_cntrl_wrd_24()                   { return (address)&_fpu_cntrl_wrd_24;   }
304  static address addr_fpu_cntrl_wrd_64()                   { return (address)&_fpu_cntrl_wrd_64;   }
305  static address addr_fpu_cntrl_wrd_trunc()                { return (address)&_fpu_cntrl_wrd_trunc; }
306  static address addr_mxcsr_std()                          { return (address)&_mxcsr_std; }
307  static address addr_fpu_subnormal_bias1()                { return (address)&_fpu_subnormal_bias1; }
308  static address addr_fpu_subnormal_bias2()                { return (address)&_fpu_subnormal_bias2; }
309
310
311  static address select_arraycopy_function(BasicType t, bool aligned, bool disjoint, const char* &name, bool dest_uninitialized);
312
313  static address jbyte_arraycopy()  { return _jbyte_arraycopy; }
314  static address jshort_arraycopy() { return _jshort_arraycopy; }
315  static address jint_arraycopy()   { return _jint_arraycopy; }
316  static address jlong_arraycopy()  { return _jlong_arraycopy; }
317  static address oop_arraycopy(bool dest_uninitialized = false) {
318    return dest_uninitialized ? _oop_arraycopy_uninit : _oop_arraycopy;
319  }
320  static address jbyte_disjoint_arraycopy()  { return _jbyte_disjoint_arraycopy; }
321  static address jshort_disjoint_arraycopy() { return _jshort_disjoint_arraycopy; }
322  static address jint_disjoint_arraycopy()   { return _jint_disjoint_arraycopy; }
323  static address jlong_disjoint_arraycopy()  { return _jlong_disjoint_arraycopy; }
324  static address oop_disjoint_arraycopy(bool dest_uninitialized = false) {
325    return dest_uninitialized ?  _oop_disjoint_arraycopy_uninit : _oop_disjoint_arraycopy;
326  }
327
328  static address arrayof_jbyte_arraycopy()  { return _arrayof_jbyte_arraycopy; }
329  static address arrayof_jshort_arraycopy() { return _arrayof_jshort_arraycopy; }
330  static address arrayof_jint_arraycopy()   { return _arrayof_jint_arraycopy; }
331  static address arrayof_jlong_arraycopy()  { return _arrayof_jlong_arraycopy; }
332  static address arrayof_oop_arraycopy(bool dest_uninitialized = false) {
333    return dest_uninitialized ? _arrayof_oop_arraycopy_uninit : _arrayof_oop_arraycopy;
334  }
335
336  static address arrayof_jbyte_disjoint_arraycopy()  { return _arrayof_jbyte_disjoint_arraycopy; }
337  static address arrayof_jshort_disjoint_arraycopy() { return _arrayof_jshort_disjoint_arraycopy; }
338  static address arrayof_jint_disjoint_arraycopy()   { return _arrayof_jint_disjoint_arraycopy; }
339  static address arrayof_jlong_disjoint_arraycopy()  { return _arrayof_jlong_disjoint_arraycopy; }
340  static address arrayof_oop_disjoint_arraycopy(bool dest_uninitialized = false) {
341    return dest_uninitialized ? _arrayof_oop_disjoint_arraycopy_uninit : _arrayof_oop_disjoint_arraycopy;
342  }
343
344  static address checkcast_arraycopy(bool dest_uninitialized = false) {
345    return dest_uninitialized ? _checkcast_arraycopy_uninit : _checkcast_arraycopy;
346  }
347  static address unsafe_arraycopy()        { return _unsafe_arraycopy; }
348  static address generic_arraycopy()       { return _generic_arraycopy; }
349
350  static address jbyte_fill()          { return _jbyte_fill; }
351  static address jshort_fill()         { return _jshort_fill; }
352  static address jint_fill()           { return _jint_fill; }
353  static address arrayof_jbyte_fill()  { return _arrayof_jbyte_fill; }
354  static address arrayof_jshort_fill() { return _arrayof_jshort_fill; }
355  static address arrayof_jint_fill()   { return _arrayof_jint_fill; }
356
357  static address aescrypt_encryptBlock()                { return _aescrypt_encryptBlock; }
358  static address aescrypt_decryptBlock()                { return _aescrypt_decryptBlock; }
359  static address cipherBlockChaining_encryptAESCrypt()  { return _cipherBlockChaining_encryptAESCrypt; }
360  static address cipherBlockChaining_decryptAESCrypt()  { return _cipherBlockChaining_decryptAESCrypt; }
361  static address ghash_processBlocks() { return _ghash_processBlocks; }
362
363  static address sha1_implCompress()     { return _sha1_implCompress; }
364  static address sha1_implCompressMB()   { return _sha1_implCompressMB; }
365  static address sha256_implCompress()   { return _sha256_implCompress; }
366  static address sha256_implCompressMB() { return _sha256_implCompressMB; }
367  static address sha512_implCompress()   { return _sha512_implCompress; }
368  static address sha512_implCompressMB() { return _sha512_implCompressMB; }
369
370  static address updateBytesCRC32()    { return _updateBytesCRC32; }
371  static address crc_table_addr()      { return _crc_table_adr; }
372
373  static address crc32c_table_addr()   { return _crc32c_table_addr; }
374  static address updateBytesCRC32C()   { return _updateBytesCRC32C; }
375  static address updateBytesAdler32()  { return _updateBytesAdler32; }
376
377  static address multiplyToLen()       {return _multiplyToLen; }
378  static address squareToLen()         {return _squareToLen; }
379  static address mulAdd()              {return _mulAdd; }
380  static address montgomeryMultiply()  { return _montgomeryMultiply; }
381  static address montgomerySquare()    { return _montgomerySquare; }
382
383  static address vectorizedMismatch()  { return _vectorizedMismatch; }
384
385  static address dexp()                { return _dexp; }
386  static address dlog()                { return _dlog; }
387
388  static address select_fill_function(BasicType t, bool aligned, const char* &name);
389
390  static address zero_aligned_words()   { return _zero_aligned_words; }
391
392  static double  intrinsic_log10(double d) {
393    assert(_intrinsic_log10 != NULL, "must be defined");
394    return _intrinsic_log10(d);
395  }
396  static double  intrinsic_pow(double d, double d2) {
397    assert(_intrinsic_pow != NULL, "must be defined");
398    return _intrinsic_pow(d, d2);
399  }
400  static double  intrinsic_sin(double d) {
401    assert(_intrinsic_sin != NULL, "must be defined");
402    return _intrinsic_sin(d);
403  }
404  static double  intrinsic_cos(double d) {
405    assert(_intrinsic_cos != NULL, "must be defined");
406    return _intrinsic_cos(d);
407  }
408  static double  intrinsic_tan(double d) {
409    assert(_intrinsic_tan != NULL, "must be defined");
410    return _intrinsic_tan(d);
411  }
412
413  //
414  // Safefetch stub support
415  //
416
417  typedef int      (*SafeFetch32Stub)(int*      adr, int      errValue);
418  typedef intptr_t (*SafeFetchNStub) (intptr_t* adr, intptr_t errValue);
419
420  static SafeFetch32Stub SafeFetch32_stub() { return CAST_TO_FN_PTR(SafeFetch32Stub, _safefetch32_entry); }
421  static SafeFetchNStub  SafeFetchN_stub()  { return CAST_TO_FN_PTR(SafeFetchNStub,  _safefetchN_entry); }
422
423  static bool is_safefetch_fault(address pc) {
424    return pc != NULL &&
425          (pc == _safefetch32_fault_pc ||
426           pc == _safefetchN_fault_pc);
427  }
428
429  static address continuation_for_safefetch_fault(address pc) {
430    assert(_safefetch32_continuation_pc != NULL &&
431           _safefetchN_continuation_pc  != NULL,
432           "not initialized");
433
434    if (pc == _safefetch32_fault_pc) return _safefetch32_continuation_pc;
435    if (pc == _safefetchN_fault_pc)  return _safefetchN_continuation_pc;
436
437    ShouldNotReachHere();
438    return NULL;
439  }
440
441  //
442  // Default versions of the above arraycopy functions for platforms which do
443  // not have specialized versions
444  //
445  static void jbyte_copy     (jbyte*  src, jbyte*  dest, size_t count);
446  static void jshort_copy    (jshort* src, jshort* dest, size_t count);
447  static void jint_copy      (jint*   src, jint*   dest, size_t count);
448  static void jlong_copy     (jlong*  src, jlong*  dest, size_t count);
449  static void oop_copy       (oop*    src, oop*    dest, size_t count);
450  static void oop_copy_uninit(oop*    src, oop*    dest, size_t count);
451
452  static void arrayof_jbyte_copy     (HeapWord* src, HeapWord* dest, size_t count);
453  static void arrayof_jshort_copy    (HeapWord* src, HeapWord* dest, size_t count);
454  static void arrayof_jint_copy      (HeapWord* src, HeapWord* dest, size_t count);
455  static void arrayof_jlong_copy     (HeapWord* src, HeapWord* dest, size_t count);
456  static void arrayof_oop_copy       (HeapWord* src, HeapWord* dest, size_t count);
457  static void arrayof_oop_copy_uninit(HeapWord* src, HeapWord* dest, size_t count);
458};
459
460// Safefetch allows to load a value from a location that's not known
461// to be valid. If the load causes a fault, the error value is returned.
462inline int SafeFetch32(int* adr, int errValue) {
463  assert(StubRoutines::SafeFetch32_stub(), "stub not yet generated");
464  return StubRoutines::SafeFetch32_stub()(adr, errValue);
465}
466inline intptr_t SafeFetchN(intptr_t* adr, intptr_t errValue) {
467  assert(StubRoutines::SafeFetchN_stub(), "stub not yet generated");
468  return StubRoutines::SafeFetchN_stub()(adr, errValue);
469}
470
471
472// returns true if SafeFetch32 and SafeFetchN can be used safely (stubroutines are already generated)
473inline bool CanUseSafeFetch32() {
474  return StubRoutines::SafeFetch32_stub() ? true : false;
475}
476
477inline bool CanUseSafeFetchN() {
478  return StubRoutines::SafeFetchN_stub() ? true : false;
479}
480#endif // SHARE_VM_RUNTIME_STUBROUTINES_HPP
481