MonitorSnippets.java revision 13264:48566d838608
1/*
2 * Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23package org.graalvm.compiler.hotspot.replacements;
24
25import static jdk.vm.ci.code.MemoryBarriers.LOAD_STORE;
26import static jdk.vm.ci.code.MemoryBarriers.STORE_STORE;
27import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG;
28import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope;
29import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope;
30import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError;
31import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION;
32import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION;
33import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_CXQ_LOCATION;
34import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_ENTRY_LIST_LOCATION;
35import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_OWNER_LOCATION;
36import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_RECURSION_LOCATION;
37import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION;
38import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace;
39import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace;
40import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern;
41import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.config;
42import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace;
43import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject;
44import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset;
45import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset;
46import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.monitorMask;
47import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorCxqOffset;
48import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorEntryListOffset;
49import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorOwnerOffset;
50import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorRecursionsOffset;
51import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize;
52import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset;
53import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord;
54import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask;
55import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking;
56import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop;
57import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize;
58import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors;
59import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.SimpleFastInflatedLocking;
60import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter;
61import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter;
62import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors;
63import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FAST_PATH_PROBABILITY;
64import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY;
65import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_FREQUENT_PROBABILITY;
66import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_LIKELY_PROBABILITY;
67import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY;
68import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY;
69import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability;
70import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER;
71
72import java.util.List;
73
74import org.graalvm.compiler.api.replacements.Fold;
75import org.graalvm.compiler.api.replacements.Snippet;
76import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter;
77import org.graalvm.compiler.bytecode.Bytecode;
78import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode;
79import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor;
80import org.graalvm.compiler.core.common.type.ObjectStamp;
81import org.graalvm.compiler.core.common.type.StampFactory;
82import org.graalvm.compiler.core.common.type.StampPair;
83import org.graalvm.compiler.debug.DebugHandlersFactory;
84import org.graalvm.compiler.graph.Node.ConstantNodeParameter;
85import org.graalvm.compiler.graph.Node.NodeIntrinsic;
86import org.graalvm.compiler.graph.iterators.NodeIterable;
87import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
88import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
89import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider;
90import org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode;
91import org.graalvm.compiler.hotspot.nodes.CurrentLockNode;
92import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode;
93import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode;
94import org.graalvm.compiler.hotspot.word.KlassPointer;
95import org.graalvm.compiler.nodes.BreakpointNode;
96import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind;
97import org.graalvm.compiler.nodes.ConstantNode;
98import org.graalvm.compiler.nodes.DeoptimizeNode;
99import org.graalvm.compiler.nodes.FrameState;
100import org.graalvm.compiler.nodes.InvokeNode;
101import org.graalvm.compiler.nodes.NamedLocationIdentity;
102import org.graalvm.compiler.nodes.ReturnNode;
103import org.graalvm.compiler.nodes.StructuredGraph;
104import org.graalvm.compiler.nodes.ValueNode;
105import org.graalvm.compiler.nodes.debug.DynamicCounterNode;
106import org.graalvm.compiler.nodes.extended.ForeignCallNode;
107import org.graalvm.compiler.nodes.extended.MembarNode;
108import org.graalvm.compiler.nodes.java.MethodCallTargetNode;
109import org.graalvm.compiler.nodes.java.MonitorExitNode;
110import org.graalvm.compiler.nodes.java.RawMonitorEnterNode;
111import org.graalvm.compiler.nodes.spi.LoweringTool;
112import org.graalvm.compiler.nodes.type.StampTool;
113import org.graalvm.compiler.options.OptionValues;
114import org.graalvm.compiler.phases.common.inlining.InliningUtil;
115import org.graalvm.compiler.replacements.Log;
116import org.graalvm.compiler.replacements.SnippetCounter;
117import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates;
118import org.graalvm.compiler.replacements.SnippetTemplate.Arguments;
119import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo;
120import org.graalvm.compiler.replacements.Snippets;
121import org.graalvm.compiler.word.Word;
122import org.graalvm.word.LocationIdentity;
123import org.graalvm.word.Pointer;
124import org.graalvm.word.WordBase;
125import org.graalvm.word.WordFactory;
126
127import jdk.vm.ci.code.BytecodeFrame;
128import jdk.vm.ci.code.Register;
129import jdk.vm.ci.code.TargetDescription;
130import jdk.vm.ci.meta.DeoptimizationAction;
131import jdk.vm.ci.meta.DeoptimizationReason;
132import jdk.vm.ci.meta.JavaType;
133import jdk.vm.ci.meta.ResolvedJavaType;
134
135/**
136 * Snippets used for implementing the monitorenter and monitorexit instructions.
137 *
138 * The locking algorithm used is described in the paper
139 * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related
140 * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David
141 * Detlefs.
142 *
143 * Comment below is reproduced from {@code markOop.hpp} for convenience:
144 *
145 * <pre>
146 *  Bit-format of an object header (most significant first, big endian layout below):
147 *  32 bits:
148 *  --------
149 *             hash:25 ------------>| age:4    biased_lock:1 lock:2 (normal object)
150 *             JavaThread*:23 epoch:2 age:4    biased_lock:1 lock:2 (biased object)
151 *             size:32 ------------------------------------------>| (CMS free block)
152 *             PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object)
153 *
154 *  64 bits:
155 *  --------
156 *  unused:25 hash:31 -->| unused:1   age:4    biased_lock:1 lock:2 (normal object)
157 *  JavaThread*:54 epoch:2 unused:1   age:4    biased_lock:1 lock:2 (biased object)
158 *  PromotedObject*:61 --------------------->| promo_bits:3 ----->| (CMS promoted object)
159 *  size:64 ----------------------------------------------------->| (CMS free block)
160 *
161 *  unused:25 hash:31 -->| cms_free:1 age:4    biased_lock:1 lock:2 (COOPs && normal object)
162 *  JavaThread*:54 epoch:2 cms_free:1 age:4    biased_lock:1 lock:2 (COOPs && biased object)
163 *  narrowOop:32 unused:24 cms_free:1 unused:4 promo_bits:3 ----->| (COOPs && CMS promoted object)
164 *  unused:21 size:35 -->| cms_free:1 unused:7 ------------------>| (COOPs && CMS free block)
165 *
166 *  - hash contains the identity hash value: largest value is
167 *    31 bits, see os::random().  Also, 64-bit vm's require
168 *    a hash value no bigger than 32 bits because they will not
169 *    properly generate a mask larger than that: see library_call.cpp
170 *    and c1_CodePatterns_sparc.cpp.
171 *
172 *  - the biased lock pattern is used to bias a lock toward a given
173 *    thread. When this pattern is set in the low three bits, the lock
174 *    is either biased toward a given thread or "anonymously" biased,
175 *    indicating that it is possible for it to be biased. When the
176 *    lock is biased toward a given thread, locking and unlocking can
177 *    be performed by that thread without using atomic operations.
178 *    When a lock's bias is revoked, it reverts back to the normal
179 *    locking scheme described below.
180 *
181 *    Note that we are overloading the meaning of the "unlocked" state
182 *    of the header. Because we steal a bit from the age we can
183 *    guarantee that the bias pattern will never be seen for a truly
184 *    unlocked object.
185 *
186 *    Note also that the biased state contains the age bits normally
187 *    contained in the object header. Large increases in scavenge
188 *    times were seen when these bits were absent and an arbitrary age
189 *    assigned to all biased objects, because they tended to consume a
190 *    significant fraction of the eden semispaces and were not
191 *    promoted promptly, causing an increase in the amount of copying
192 *    performed. The runtime system aligns all JavaThread* pointers to
193 *    a very large value (currently 128 bytes (32bVM) or 256 bytes (64bVM))
194 *    to make room for the age bits & the epoch bits (used in support of
195 *    biased locking), and for the CMS "freeness" bit in the 64bVM (+COOPs).
196 *
197 *    [JavaThread* | epoch | age | 1 | 01]       lock is biased toward given thread
198 *    [0           | epoch | age | 1 | 01]       lock is anonymously biased
199 *
200 *  - the two lock bits are used to describe three states: locked/unlocked and monitor.
201 *
202 *    [ptr             | 00]  locked             ptr points to real header on stack
203 *    [header      | 0 | 01]  unlocked           regular object header
204 *    [ptr             | 10]  monitor            inflated lock (header is wapped out)
205 *    [ptr             | 11]  marked             used by markSweep to mark an object
206 *                                               not valid at any other time
207 *
208 *    We assume that stack/thread pointers have the lowest two bits cleared.
209 * </pre>
210 *
211 * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned
212 * appropriately to comply with the layouts above.
213 */
214public class MonitorSnippets implements Snippets {
215
216    private static final boolean PROFILE_CONTEXT = false;
217
218    @Fold
219    static boolean doProfile(OptionValues options) {
220        return ProfileMonitors.getValue(options);
221    }
222
223    @Snippet
224    public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister,
225                    @ConstantParameter boolean trace, @ConstantParameter OptionValues options, @ConstantParameter Counters counters) {
226        verifyOop(object);
227
228        // Load the mark word - this includes a null-check on object
229        final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
230
231        final Word lock = beginLockScope(lockDepth);
232
233        Pointer objectPointer = Word.objectToTrackedPointer(object);
234        trace(trace, "           object: 0x%016lx\n", objectPointer);
235        trace(trace, "             lock: 0x%016lx\n", lock);
236        trace(trace, "             mark: 0x%016lx\n", mark);
237
238        incCounter(options);
239
240        if (useBiasedLocking(INJECTED_VMCONFIG)) {
241            if (tryEnterBiased(object, hub, lock, mark, threadRegister, trace, options, counters)) {
242                return;
243            }
244            // not biased, fall-through
245        }
246        if (inlineFastLockSupported(options) && probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) {
247            // Inflated case
248            if (tryEnterInflated(object, lock, mark, threadRegister, trace, options, counters)) {
249                return;
250            }
251        } else {
252            // Create the unlocked mark word pattern
253            Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG));
254            trace(trace, "     unlockedMark: 0x%016lx\n", unlockedMark);
255
256            // Copy this unlocked mark word into the lock slot on the stack
257            lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION);
258
259            // make sure previous store does not float below compareAndSwap
260            MembarNode.memoryBarrier(STORE_STORE);
261
262            // Test if the object's mark word is unlocked, and if so, store the
263            // (address of) the lock slot into the object's mark word.
264            Word currentMark = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), unlockedMark, lock, MARK_WORD_LOCATION);
265            if (probability(FAST_PATH_PROBABILITY, currentMark.equal(unlockedMark))) {
266                traceObject(trace, "+lock{cas}", object, true, options);
267                counters.lockCas.inc();
268                AcquiredCASLockNode.mark(object);
269                return;
270            } else {
271                trace(trace, "      currentMark: 0x%016lx\n", currentMark);
272                // The mark word in the object header was not the same.
273                // Either the object is locked by another thread or is already locked
274                // by the current thread. The latter is true if the mark word
275                // is a stack pointer into the current thread's stack, i.e.:
276                //
277                // 1) (currentMark & aligned_mask) == 0
278                // 2) rsp <= currentMark
279                // 3) currentMark <= rsp + page_size
280                //
281                // These 3 tests can be done by evaluating the following expression:
282                //
283                // (currentMark - rsp) & (aligned_mask - page_size)
284                //
285                // assuming both the stack pointer and page_size have their least
286                // significant 2 bits cleared and page_size is a power of 2
287                final Word alignedMask = WordFactory.unsigned(wordSize() - 1);
288                final Word stackPointer = registerAsWord(stackPointerRegister).add(config(INJECTED_VMCONFIG).stackBias);
289                if (probability(FAST_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).equal(0))) {
290                    // Recursively locked => write 0 to the lock slot
291                    lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), WordFactory.zero(), DISPLACED_MARK_WORD_LOCATION);
292                    traceObject(trace, "+lock{cas:recursive}", object, true, options);
293                    counters.lockCasRecursive.inc();
294                    return;
295                }
296                traceObject(trace, "+lock{stub:failed-cas/stack}", object, true, options);
297                counters.lockStubFailedCas.inc();
298            }
299        }
300        // slow-path runtime-call
301        monitorenterStubC(MONITORENTER, object, lock);
302    }
303
304    private static boolean tryEnterBiased(Object object, KlassPointer hub, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) {
305        // See whether the lock is currently biased toward our thread and
306        // whether the epoch is still valid.
307        // Note that the runtime guarantees sufficient alignment of JavaThread
308        // pointers to allow age to be placed into low bits.
309        final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG));
310
311        // Check whether the bias pattern is present in the object's mark word
312        // and the bias owner and the epoch are both still current.
313        final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION);
314        final Word thread = registerAsWord(threadRegister);
315        final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG));
316        trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord);
317        trace(trace, "           thread: 0x%016lx\n", thread);
318        trace(trace, "              tmp: 0x%016lx\n", tmp);
319        if (probability(FAST_PATH_PROBABILITY, tmp.equal(0))) {
320            // Object is already biased to current thread -> done
321            traceObject(trace, "+lock{bias:existing}", object, true, options);
322            counters.lockBiasExisting.inc();
323            FastAcquireBiasedLockNode.mark(object);
324            return true;
325        }
326
327        // Now check to see whether biasing is enabled for this object
328        if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
329            Pointer objectPointer = Word.objectToTrackedPointer(object);
330            // At this point we know that the mark word has the bias pattern and
331            // that we are not the bias owner in the current epoch. We need to
332            // figure out more details about the state of the mark word in order to
333            // know what operations can be legally performed on the object's
334            // mark word.
335
336            // If the low three bits in the xor result aren't clear, that means
337            // the prototype header is no longer biasable and we have to revoke
338            // the bias on this object.
339            if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
340                // Biasing is still enabled for object's type. See whether the
341                // epoch of the current bias is still valid, meaning that the epoch
342                // bits of the mark word are equal to the epoch bits of the
343                // prototype mark word. (Note that the prototype mark word's epoch bits
344                // only change at a safepoint.) If not, attempt to rebias the object
345                // toward the current thread. Note that we must be absolutely sure
346                // that the current epoch is invalid in order to do this because
347                // otherwise the manipulations it performs on the mark word are
348                // illegal.
349                if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
350                    // The epoch of the current bias is still valid but we know nothing
351                    // about the owner; it might be set or it might be clear. Try to
352                    // acquire the bias of the object using an atomic operation. If this
353                    // fails we will go in to the runtime to revoke the object's bias.
354                    // Note that we first construct the presumed unbiased header so we
355                    // don't accidentally blow away another thread's valid bias.
356                    Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG));
357                    Word biasedMark = unbiasedMark.or(thread);
358                    trace(trace, "     unbiasedMark: 0x%016lx\n", unbiasedMark);
359                    trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
360                    if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), unbiasedMark, biasedMark, MARK_WORD_LOCATION))) {
361                        // Object is now biased to current thread -> done
362                        traceObject(trace, "+lock{bias:acquired}", object, true, options);
363                        counters.lockBiasAcquired.inc();
364                        return true;
365                    }
366                    // If the biasing toward our thread failed, this means that another thread
367                    // owns the bias and we need to revoke that bias. The revocation will occur
368                    // in the interpreter runtime.
369                    traceObject(trace, "+lock{stub:revoke}", object, true, options);
370                    counters.lockStubRevoke.inc();
371                } else {
372                    // At this point we know the epoch has expired, meaning that the
373                    // current bias owner, if any, is actually invalid. Under these
374                    // circumstances _only_, are we allowed to use the current mark word
375                    // value as the comparison value when doing the CAS to acquire the
376                    // bias in the current epoch. In other words, we allow transfer of
377                    // the bias from one thread to another directly in this situation.
378                    Word biasedMark = prototypeMarkWord.or(thread);
379                    trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
380                    if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, biasedMark, MARK_WORD_LOCATION))) {
381                        // Object is now biased to current thread -> done
382                        traceObject(trace, "+lock{bias:transfer}", object, true, options);
383                        counters.lockBiasTransfer.inc();
384                        return true;
385                    }
386                    // If the biasing toward our thread failed, then another thread
387                    // succeeded in biasing it toward itself and we need to revoke that
388                    // bias. The revocation will occur in the runtime in the slow case.
389                    traceObject(trace, "+lock{stub:epoch-expired}", object, true, options);
390                    counters.lockStubEpochExpired.inc();
391                }
392                // slow-path runtime-call
393                monitorenterStubC(MONITORENTER, object, lock);
394                return true;
395            } else {
396                // The prototype mark word doesn't have the bias bit set any
397                // more, indicating that objects of this data type are not supposed
398                // to be biased any more. We are going to try to reset the mark of
399                // this object to the prototype value and fall through to the
400                // CAS-based locking scheme. Note that if our CAS fails, it means
401                // that another thread raced us for the privilege of revoking the
402                // bias of this particular object, so it's okay to continue in the
403                // normal locking code.
404                Word result = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, prototypeMarkWord, MARK_WORD_LOCATION);
405
406                // Fall through to the normal CAS-based lock, because no matter what
407                // the result of the above CAS, some thread must have succeeded in
408                // removing the bias bit from the object's header.
409
410                if (ENABLE_BREAKPOINT) {
411                    bkpt(object, mark, tmp, result);
412                }
413                counters.revokeBias.inc();
414                return false;
415            }
416        } else {
417            // Biasing not enabled -> fall through to lightweight locking
418            counters.unbiasable.inc();
419            return false;
420        }
421    }
422
423    @Fold
424    public static boolean useFastInflatedLocking(OptionValues options) {
425        return SimpleFastInflatedLocking.getValue(options);
426    }
427
428    private static boolean inlineFastLockSupported(OptionValues options) {
429        return inlineFastLockSupported(INJECTED_VMCONFIG, options);
430    }
431
432    private static boolean inlineFastLockSupported(GraalHotSpotVMConfig config, OptionValues options) {
433        return useFastInflatedLocking(options) && monitorMask(config) >= 0 && objectMonitorOwnerOffset(config) >= 0;
434    }
435
436    private static boolean tryEnterInflated(Object object, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) {
437        // write non-zero value to lock slot
438        lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), lock, DISPLACED_MARK_WORD_LOCATION);
439        // mark is a pointer to the ObjectMonitor + monitorMask
440        Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG));
441        int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG);
442        Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION);
443        if (probability(FREQUENT_PROBABILITY, owner.equal(0))) {
444            // it appears unlocked (owner == 0)
445            if (probability(FREQUENT_PROBABILITY, monitor.logicCompareAndSwapWord(ownerOffset, owner, registerAsWord(threadRegister), OBJECT_MONITOR_OWNER_LOCATION))) {
446                // success
447                traceObject(trace, "+lock{inflated:cas}", object, true, options);
448                counters.inflatedCas.inc();
449                return true;
450            } else {
451                traceObject(trace, "+lock{stub:inflated:failed-cas}", object, true, options);
452                counters.inflatedFailedCas.inc();
453            }
454        } else {
455            traceObject(trace, "+lock{stub:inflated:owned}", object, true, options);
456            counters.inflatedOwned.inc();
457        }
458        return false;
459    }
460
461    /**
462     * Calls straight out to the monitorenter stub.
463     */
464    @Snippet
465    public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) {
466        verifyOop(object);
467        incCounter(options);
468        if (object == null) {
469            DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException);
470        }
471        // BeginLockScope nodes do not read from object so a use of object
472        // cannot float about the null check above
473        final Word lock = beginLockScope(lockDepth);
474        traceObject(trace, "+lock{stub}", object, true, options);
475        monitorenterStubC(MONITORENTER, object, lock);
476    }
477
478    @Snippet
479    public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter boolean trace,
480                    @ConstantParameter OptionValues options, @ConstantParameter Counters counters) {
481        trace(trace, "           object: 0x%016lx\n", Word.objectToTrackedPointer(object));
482        final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
483        if (useBiasedLocking(INJECTED_VMCONFIG)) {
484            // Check for biased locking unlock case, which is a no-op
485            // Note: we do not have to check the thread ID for two reasons.
486            // First, the interpreter checks for IllegalMonitorStateException at
487            // a higher level. Second, if the bias was revoked while we held the
488            // lock, the object could not be rebiased toward another thread, so
489            // the bias bit would be clear.
490            trace(trace, "             mark: 0x%016lx\n", mark);
491            if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
492                endLockScope();
493                decCounter(options);
494                traceObject(trace, "-lock{bias}", object, false, options);
495                counters.unlockBias.inc();
496                return;
497            }
498        }
499
500        final Word lock = CurrentLockNode.currentLock(lockDepth);
501
502        // Load displaced mark
503        final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION);
504        trace(trace, "    displacedMark: 0x%016lx\n", displacedMark);
505
506        if (probability(NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) {
507            // Recursive locking => done
508            traceObject(trace, "-lock{recursive}", object, false, options);
509            counters.unlockCasRecursive.inc();
510        } else {
511            if (!tryExitInflated(object, mark, lock, threadRegister, trace, options, counters)) {
512                verifyOop(object);
513                // Test if object's mark word is pointing to the displaced mark word, and if so,
514                // restore
515                // the displaced mark in the object - if the object's mark word is not pointing to
516                // the displaced mark word, do unlocking via runtime call.
517                Pointer objectPointer = Word.objectToTrackedPointer(object);
518                if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), lock, displacedMark, MARK_WORD_LOCATION))) {
519                    traceObject(trace, "-lock{cas}", object, false, options);
520                    counters.unlockCas.inc();
521                } else {
522                    // The object's mark word was not pointing to the displaced header
523                    traceObject(trace, "-lock{stub}", object, false, options);
524                    counters.unlockStub.inc();
525                    monitorexitStubC(MONITOREXIT, object, lock);
526                }
527            }
528        }
529        endLockScope();
530        decCounter(options);
531    }
532
533    private static boolean inlineFastUnlockSupported(OptionValues options) {
534        return inlineFastUnlockSupported(INJECTED_VMCONFIG, options);
535    }
536
537    private static boolean inlineFastUnlockSupported(GraalHotSpotVMConfig config, OptionValues options) {
538        return useFastInflatedLocking(options) && objectMonitorEntryListOffset(config) >= 0 && objectMonitorCxqOffset(config) >= 0 && monitorMask(config) >= 0 &&
539                        objectMonitorOwnerOffset(config) >= 0 && objectMonitorRecursionsOffset(config) >= 0;
540    }
541
542    private static boolean tryExitInflated(Object object, Word mark, Word lock, Register threadRegister, boolean trace, OptionValues options, Counters counters) {
543        if (!inlineFastUnlockSupported(options)) {
544            return false;
545        }
546        if (probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) {
547            // Inflated case
548            // mark is a pointer to the ObjectMonitor + monitorMask
549            Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG));
550            int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG);
551            Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION);
552            int recursionsOffset = objectMonitorRecursionsOffset(INJECTED_VMCONFIG);
553            Word recursions = monitor.readWord(recursionsOffset, OBJECT_MONITOR_RECURSION_LOCATION);
554            Word thread = registerAsWord(threadRegister);
555            if (probability(FAST_PATH_PROBABILITY, owner.xor(thread).or(recursions).equal(0))) {
556                // owner == thread && recursions == 0
557                int cxqOffset = objectMonitorCxqOffset(INJECTED_VMCONFIG);
558                Word cxq = monitor.readWord(cxqOffset, OBJECT_MONITOR_CXQ_LOCATION);
559                int entryListOffset = objectMonitorEntryListOffset(INJECTED_VMCONFIG);
560                Word entryList = monitor.readWord(entryListOffset, OBJECT_MONITOR_ENTRY_LIST_LOCATION);
561                if (probability(FREQUENT_PROBABILITY, cxq.or(entryList).equal(0))) {
562                    // cxq == 0 && entryList == 0
563                    // Nobody is waiting, success
564                    // release_store
565                    MembarNode.memoryBarrier(LOAD_STORE | STORE_STORE);
566                    monitor.writeWord(ownerOffset, WordFactory.zero());
567                    traceObject(trace, "-lock{inflated:simple}", object, false, options);
568                    counters.unlockInflatedSimple.inc();
569                    return true;
570                }
571            }
572            counters.unlockStubInflated.inc();
573            traceObject(trace, "-lock{stub:inflated}", object, false, options);
574            monitorexitStubC(MONITOREXIT, object, lock);
575            return true;
576        }
577        return false;
578    }
579
580    /**
581     * Calls straight out to the monitorexit stub.
582     */
583    @Snippet
584    public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) {
585        verifyOop(object);
586        traceObject(trace, "-lock{stub}", object, false, options);
587        final Word lock = CurrentLockNode.currentLock(lockDepth);
588        monitorexitStubC(MONITOREXIT, object, lock);
589        endLockScope();
590        decCounter(options);
591    }
592
593    public static void traceObject(boolean enabled, String action, Object object, boolean enter, OptionValues options) {
594        if (doProfile(options)) {
595            DynamicCounterNode.counter(action, enter ? "number of monitor enters" : "number of monitor exits", 1, PROFILE_CONTEXT);
596        }
597        if (enabled) {
598            Log.print(action);
599            Log.print(' ');
600            Log.printlnObject(object);
601        }
602    }
603
604    public static void trace(boolean enabled, String format, WordBase value) {
605        if (enabled) {
606            Log.printf(format, value.rawValue());
607        }
608    }
609
610    /**
611     * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode}
612     * intrinsic.
613     */
614    private static final boolean ENABLE_BREAKPOINT = false;
615
616    private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter");
617
618    @NodeIntrinsic(BreakpointNode.class)
619    static native void bkpt(Object object, Word mark, Word tmp, Word value);
620
621    @Fold
622    static boolean verifyBalancedMonitors(OptionValues options) {
623        return VerifyBalancedMonitors.getValue(options);
624    }
625
626    public static void incCounter(OptionValues options) {
627        if (verifyBalancedMonitors(options)) {
628            final Word counter = MonitorCounterNode.counter();
629            final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
630            counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION);
631        }
632    }
633
634    public static void decCounter(OptionValues options) {
635        if (verifyBalancedMonitors(options)) {
636            final Word counter = MonitorCounterNode.counter();
637            final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
638            counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION);
639        }
640    }
641
642    @Snippet
643    private static void initCounter() {
644        final Word counter = MonitorCounterNode.counter();
645        counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION);
646    }
647
648    @Snippet
649    private static void checkCounter(@ConstantParameter String errMsg) {
650        final Word counter = MonitorCounterNode.counter();
651        final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
652        if (count != 0) {
653            vmError(errMsg, count);
654        }
655    }
656
657    public static class Counters {
658        /**
659         * Counters for the various paths for acquiring a lock. The counters whose names start with
660         * {@code "lock"} are mutually exclusive. The other counters are for paths that may be
661         * shared.
662         */
663        public final SnippetCounter lockBiasExisting;
664        public final SnippetCounter lockBiasAcquired;
665        public final SnippetCounter lockBiasTransfer;
666        public final SnippetCounter lockCas;
667        public final SnippetCounter lockCasRecursive;
668        public final SnippetCounter lockStubEpochExpired;
669        public final SnippetCounter lockStubRevoke;
670        public final SnippetCounter lockStubFailedCas;
671        public final SnippetCounter inflatedCas;
672        public final SnippetCounter inflatedFailedCas;
673        public final SnippetCounter inflatedOwned;
674        public final SnippetCounter unbiasable;
675        public final SnippetCounter revokeBias;
676
677        /**
678         * Counters for the various paths for releasing a lock. The counters whose names start with
679         * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be
680         * shared.
681         */
682        public final SnippetCounter unlockBias;
683        public final SnippetCounter unlockCas;
684        public final SnippetCounter unlockCasRecursive;
685        public final SnippetCounter unlockStub;
686        public final SnippetCounter unlockStubInflated;
687        public final SnippetCounter unlockInflatedSimple;
688
689        public Counters(SnippetCounter.Group.Factory factory) {
690            SnippetCounter.Group enter = factory.createSnippetCounterGroup("MonitorEnters");
691            SnippetCounter.Group exit = factory.createSnippetCounterGroup("MonitorExits");
692            lockBiasExisting = new SnippetCounter(enter, "lock{bias:existing}", "bias-locked previously biased object");
693            lockBiasAcquired = new SnippetCounter(enter, "lock{bias:acquired}", "bias-locked newly biased object");
694            lockBiasTransfer = new SnippetCounter(enter, "lock{bias:transfer}", "bias-locked, biased transferred");
695            lockCas = new SnippetCounter(enter, "lock{cas}", "cas-locked an object");
696            lockCasRecursive = new SnippetCounter(enter, "lock{cas:recursive}", "cas-locked, recursive");
697            lockStubEpochExpired = new SnippetCounter(enter, "lock{stub:epoch-expired}", "stub-locked, epoch expired");
698            lockStubRevoke = new SnippetCounter(enter, "lock{stub:revoke}", "stub-locked, biased revoked");
699            lockStubFailedCas = new SnippetCounter(enter, "lock{stub:failed-cas/stack}", "stub-locked, failed cas and stack locking");
700            inflatedCas = new SnippetCounter(enter, "lock{inflated:cas}", "heavyweight-locked, cas-locked");
701            inflatedFailedCas = new SnippetCounter(enter, "lock{inflated:failed-cas}", "heavyweight-locked, failed cas");
702            inflatedOwned = new SnippetCounter(enter, "lock{inflated:owned}", "heavyweight-locked, already owned");
703            unbiasable = new SnippetCounter(enter, "unbiasable", "object with unbiasable type");
704            revokeBias = new SnippetCounter(enter, "revokeBias", "object had bias revoked");
705
706            unlockBias = new SnippetCounter(exit, "unlock{bias}", "bias-unlocked an object");
707            unlockCas = new SnippetCounter(exit, "unlock{cas}", "cas-unlocked an object");
708            unlockCasRecursive = new SnippetCounter(exit, "unlock{cas:recursive}", "cas-unlocked an object, recursive");
709            unlockStub = new SnippetCounter(exit, "unlock{stub}", "stub-unlocked an object");
710            unlockStubInflated = new SnippetCounter(exit, "unlock{stub:inflated}", "stub-unlocked an object with inflated monitor");
711            unlockInflatedSimple = new SnippetCounter(exit, "unlock{inflated}", "unlocked an object monitor");
712        }
713    }
714
715    public static class Templates extends AbstractTemplates {
716
717        private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter");
718        private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit");
719        private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub");
720        private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub");
721        private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter");
722        private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter");
723
724        private final boolean useFastLocking;
725        public final Counters counters;
726
727        public Templates(OptionValues options, Iterable<DebugHandlersFactory> factories, SnippetCounter.Group.Factory factory, HotSpotProviders providers, TargetDescription target,
728                        boolean useFastLocking) {
729            super(options, factories, providers, providers.getSnippetReflection(), target);
730            this.useFastLocking = useFastLocking;
731
732            this.counters = new Counters(factory);
733        }
734
735        public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) {
736            StructuredGraph graph = monitorenterNode.graph();
737            checkBalancedMonitors(graph, tool);
738
739            assert ((ObjectStamp) monitorenterNode.object().stamp()).nonNull();
740
741            Arguments args;
742            if (useFastLocking) {
743                args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage());
744                args.add("object", monitorenterNode.object());
745                args.add("hub", monitorenterNode.getHub());
746                args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
747                args.addConst("threadRegister", registers.getThreadRegister());
748                args.addConst("stackPointerRegister", registers.getStackPointerRegister());
749                args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph));
750                args.addConst("options", graph.getOptions());
751                args.addConst("counters", counters);
752            } else {
753                args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage());
754                args.add("object", monitorenterNode.object());
755                args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
756                args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph));
757                args.addConst("options", graph.getOptions());
758                args.addConst("counters", counters);
759            }
760
761            template(graph.getDebug(), args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args);
762        }
763
764        public void lower(MonitorExitNode monitorexitNode, HotSpotRegistersProvider registers, LoweringTool tool) {
765            StructuredGraph graph = monitorexitNode.graph();
766
767            Arguments args;
768            if (useFastLocking) {
769                args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage());
770            } else {
771                args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage());
772            }
773            args.add("object", monitorexitNode.object());
774            args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth());
775            args.addConst("threadRegister", registers.getThreadRegister());
776            args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph));
777            args.addConst("options", graph.getOptions());
778            args.addConst("counters", counters);
779
780            template(graph.getDebug(), args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args);
781        }
782
783        public static boolean isTracingEnabledForType(ValueNode object) {
784            ResolvedJavaType type = StampTool.typeOrNull(object.stamp());
785            String filter = TraceMonitorsTypeFilter.getValue(object.getOptions());
786            if (filter == null) {
787                return false;
788            } else {
789                if (filter.length() == 0) {
790                    return true;
791                }
792                if (type == null) {
793                    return false;
794                }
795                return (type.getName().contains(filter));
796            }
797        }
798
799        public static boolean isTracingEnabledForMethod(StructuredGraph graph) {
800            String filter = TraceMonitorsMethodFilter.getValue(graph.getOptions());
801            if (filter == null) {
802                return false;
803            } else {
804                if (filter.length() == 0) {
805                    return true;
806                }
807                if (graph.method() == null) {
808                    return false;
809                }
810                return (graph.method().format("%H.%n").contains(filter));
811            }
812        }
813
814        /**
815         * If balanced monitor checking is enabled then nodes are inserted at the start and all
816         * return points of the graph to initialize and check the monitor counter respectively.
817         */
818        private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) {
819            if (VerifyBalancedMonitors.getValue(options)) {
820                NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class);
821                if (nodes.isEmpty()) {
822                    // Only insert the nodes if this is the first monitorenter being lowered.
823                    JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass());
824                    StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
825                    MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null));
826                    InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0));
827                    invoke.setStateAfter(graph.start().stateAfter());
828                    graph.addAfterFixed(graph.start(), invoke);
829
830                    StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null);
831                    InliningUtil.inline(invoke, inlineeGraph, false, null);
832
833                    List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot();
834                    for (ReturnNode ret : rets) {
835                        returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass());
836                        String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d";
837                        ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph);
838                        returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
839                        callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null));
840                        invoke = graph.add(new InvokeNode(callTarget, 0));
841                        Bytecode code = new ResolvedJavaMethodBytecode(graph.method());
842                        FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false);
843                        invoke.setStateAfter(graph.add(stateAfter));
844                        graph.addBeforeFixed(ret, invoke);
845
846                        Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage());
847                        args.addConst("errMsg", msg);
848                        inlineeGraph = template(graph.getDebug(), args).copySpecializedGraph(graph.getDebug());
849                        InliningUtil.inline(invoke, inlineeGraph, false, null);
850                    }
851                }
852            }
853        }
854    }
855
856    public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class);
857    public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class);
858
859    @NodeIntrinsic(ForeignCallNode.class)
860    private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
861
862    @NodeIntrinsic(ForeignCallNode.class)
863    public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
864}
865