c1_Compiler.cpp revision 9248:6ab7e19c9220
1/*
2 * Copyright (c) 1999, 2015, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "c1/c1_Compilation.hpp"
27#include "c1/c1_Compiler.hpp"
28#include "c1/c1_FrameMap.hpp"
29#include "c1/c1_GraphBuilder.hpp"
30#include "c1/c1_LinearScan.hpp"
31#include "c1/c1_MacroAssembler.hpp"
32#include "c1/c1_Runtime1.hpp"
33#include "c1/c1_ValueType.hpp"
34#include "compiler/compileBroker.hpp"
35#include "interpreter/linkResolver.hpp"
36#include "memory/allocation.hpp"
37#include "memory/allocation.inline.hpp"
38#include "memory/resourceArea.hpp"
39#include "prims/nativeLookup.hpp"
40#include "runtime/arguments.hpp"
41#include "runtime/interfaceSupport.hpp"
42#include "runtime/sharedRuntime.hpp"
43
44
45Compiler::Compiler() : AbstractCompiler(c1) {
46}
47
48void Compiler::init_c1_runtime() {
49  BufferBlob* buffer_blob = CompilerThread::current()->get_buffer_blob();
50  Arena* arena = new (mtCompiler) Arena(mtCompiler);
51  Runtime1::initialize(buffer_blob);
52  FrameMap::initialize();
53  // initialize data structures
54  ValueType::initialize(arena);
55  GraphBuilder::initialize();
56  // note: to use more than one instance of LinearScan at a time this function call has to
57  //       be moved somewhere outside of this constructor:
58  Interval::initialize(arena);
59}
60
61
62void Compiler::initialize() {
63  // Buffer blob must be allocated per C1 compiler thread at startup
64  BufferBlob* buffer_blob = init_buffer_blob();
65
66  if (should_perform_init()) {
67    if (buffer_blob == NULL) {
68      // When we come here we are in state 'initializing'; entire C1 compilation
69      // can be shut down.
70      set_state(failed);
71    } else {
72      init_c1_runtime();
73      set_state(initialized);
74    }
75  }
76}
77
78int Compiler::code_buffer_size() {
79  assert(SegmentedCodeCache, "Should be only used with a segmented code cache");
80  return Compilation::desired_max_code_buffer_size() + Compilation::desired_max_constant_size();
81}
82
83BufferBlob* Compiler::init_buffer_blob() {
84  // Allocate buffer blob once at startup since allocation for each
85  // compilation seems to be too expensive (at least on Intel win32).
86  assert (CompilerThread::current()->get_buffer_blob() == NULL, "Should initialize only once");
87
88  // setup CodeBuffer.  Preallocate a BufferBlob of size
89  // NMethodSizeLimit plus some extra space for constants.
90  int code_buffer_size = Compilation::desired_max_code_buffer_size() +
91    Compilation::desired_max_constant_size();
92
93  BufferBlob* buffer_blob = BufferBlob::create("C1 temporary CodeBuffer", code_buffer_size);
94  if (buffer_blob != NULL) {
95    CompilerThread::current()->set_buffer_blob(buffer_blob);
96  }
97
98  return buffer_blob;
99}
100
101bool Compiler::is_intrinsic_supported(const methodHandle& method) {
102  vmIntrinsics::ID id = method->intrinsic_id();
103  assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
104
105  if (method->is_synchronized()) {
106    // C1 does not support intrinsification of synchronized methods.
107    return false;
108  }
109
110  switch (id) {
111  case vmIntrinsics::_compareAndSwapLong:
112    if (!VM_Version::supports_cx8()) return false;
113    break;
114  case vmIntrinsics::_getAndAddInt:
115    if (!VM_Version::supports_atomic_getadd4()) return false;
116    break;
117  case vmIntrinsics::_getAndAddLong:
118    if (!VM_Version::supports_atomic_getadd8()) return false;
119    break;
120  case vmIntrinsics::_getAndSetInt:
121    if (!VM_Version::supports_atomic_getset4()) return false;
122    break;
123  case vmIntrinsics::_getAndSetLong:
124    if (!VM_Version::supports_atomic_getset8()) return false;
125    break;
126  case vmIntrinsics::_getAndSetObject:
127#ifdef _LP64
128    if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) return false;
129    if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) return false;
130#else
131    if (!VM_Version::supports_atomic_getset4()) return false;
132#endif
133    break;
134  case vmIntrinsics::_arraycopy:
135  case vmIntrinsics::_currentTimeMillis:
136  case vmIntrinsics::_nanoTime:
137  case vmIntrinsics::_Reference_get:
138    // Use the intrinsic version of Reference.get() so that the value in
139    // the referent field can be registered by the G1 pre-barrier code.
140    // Also to prevent commoning reads from this field across safepoint
141    // since GC can change its value.
142  case vmIntrinsics::_loadFence:
143  case vmIntrinsics::_storeFence:
144  case vmIntrinsics::_fullFence:
145  case vmIntrinsics::_floatToRawIntBits:
146  case vmIntrinsics::_intBitsToFloat:
147  case vmIntrinsics::_doubleToRawLongBits:
148  case vmIntrinsics::_longBitsToDouble:
149  case vmIntrinsics::_getClass:
150  case vmIntrinsics::_isInstance:
151  case vmIntrinsics::_currentThread:
152  case vmIntrinsics::_dabs:
153  case vmIntrinsics::_dsqrt:
154  case vmIntrinsics::_dsin:
155  case vmIntrinsics::_dcos:
156  case vmIntrinsics::_dtan:
157  case vmIntrinsics::_dlog:
158  case vmIntrinsics::_dlog10:
159  case vmIntrinsics::_dexp:
160  case vmIntrinsics::_dpow:
161  case vmIntrinsics::_getObject:
162  case vmIntrinsics::_getBoolean:
163  case vmIntrinsics::_getByte:
164  case vmIntrinsics::_getShort:
165  case vmIntrinsics::_getChar:
166  case vmIntrinsics::_getInt:
167  case vmIntrinsics::_getLong:
168  case vmIntrinsics::_getFloat:
169  case vmIntrinsics::_getDouble:
170  case vmIntrinsics::_putObject:
171  case vmIntrinsics::_putBoolean:
172  case vmIntrinsics::_putByte:
173  case vmIntrinsics::_putShort:
174  case vmIntrinsics::_putChar:
175  case vmIntrinsics::_putInt:
176  case vmIntrinsics::_putLong:
177  case vmIntrinsics::_putFloat:
178  case vmIntrinsics::_putDouble:
179  case vmIntrinsics::_getObjectVolatile:
180  case vmIntrinsics::_getBooleanVolatile:
181  case vmIntrinsics::_getByteVolatile:
182  case vmIntrinsics::_getShortVolatile:
183  case vmIntrinsics::_getCharVolatile:
184  case vmIntrinsics::_getIntVolatile:
185  case vmIntrinsics::_getLongVolatile:
186  case vmIntrinsics::_getFloatVolatile:
187  case vmIntrinsics::_getDoubleVolatile:
188  case vmIntrinsics::_putObjectVolatile:
189  case vmIntrinsics::_putBooleanVolatile:
190  case vmIntrinsics::_putByteVolatile:
191  case vmIntrinsics::_putShortVolatile:
192  case vmIntrinsics::_putCharVolatile:
193  case vmIntrinsics::_putIntVolatile:
194  case vmIntrinsics::_putLongVolatile:
195  case vmIntrinsics::_putFloatVolatile:
196  case vmIntrinsics::_putDoubleVolatile:
197  case vmIntrinsics::_getByte_raw:
198  case vmIntrinsics::_getShort_raw:
199  case vmIntrinsics::_getChar_raw:
200  case vmIntrinsics::_getInt_raw:
201  case vmIntrinsics::_getLong_raw:
202  case vmIntrinsics::_getFloat_raw:
203  case vmIntrinsics::_getDouble_raw:
204  case vmIntrinsics::_putByte_raw:
205  case vmIntrinsics::_putShort_raw:
206  case vmIntrinsics::_putChar_raw:
207  case vmIntrinsics::_putInt_raw:
208  case vmIntrinsics::_putLong_raw:
209  case vmIntrinsics::_putFloat_raw:
210  case vmIntrinsics::_putDouble_raw:
211  case vmIntrinsics::_putOrderedObject:
212  case vmIntrinsics::_putOrderedInt:
213  case vmIntrinsics::_putOrderedLong:
214  case vmIntrinsics::_getShortUnaligned:
215  case vmIntrinsics::_getCharUnaligned:
216  case vmIntrinsics::_getIntUnaligned:
217  case vmIntrinsics::_getLongUnaligned:
218  case vmIntrinsics::_putShortUnaligned:
219  case vmIntrinsics::_putCharUnaligned:
220  case vmIntrinsics::_putIntUnaligned:
221  case vmIntrinsics::_putLongUnaligned:
222  case vmIntrinsics::_checkIndex:
223  case vmIntrinsics::_updateCRC32:
224  case vmIntrinsics::_updateBytesCRC32:
225  case vmIntrinsics::_updateByteBufferCRC32:
226  case vmIntrinsics::_compareAndSwapInt:
227  case vmIntrinsics::_compareAndSwapObject:
228#ifdef TRACE_HAVE_INTRINSICS
229  case vmIntrinsics::_classID:
230  case vmIntrinsics::_threadID:
231  case vmIntrinsics::_counterTime:
232#endif
233    break;
234  default:
235    return false; // Intrinsics not on the previous list are not available.
236  }
237
238  return true;
239}
240
241void Compiler::compile_method(ciEnv* env, ciMethod* method, int entry_bci) {
242  BufferBlob* buffer_blob = CompilerThread::current()->get_buffer_blob();
243  assert(buffer_blob != NULL, "Must exist");
244  // invoke compilation
245  {
246    // We are nested here because we need for the destructor
247    // of Compilation to occur before we release the any
248    // competing compiler thread
249    ResourceMark rm;
250    Compilation c(this, env, method, entry_bci, buffer_blob);
251  }
252}
253
254
255void Compiler::print_timers() {
256  Compilation::print_timers();
257}
258