instanceRefKlass.cpp revision 3602:da91efe96a93
1/*
2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "classfile/javaClasses.hpp"
27#include "classfile/systemDictionary.hpp"
28#include "gc_implementation/shared/markSweep.inline.hpp"
29#include "gc_interface/collectedHeap.hpp"
30#include "gc_interface/collectedHeap.inline.hpp"
31#include "memory/genCollectedHeap.hpp"
32#include "memory/genOopClosures.inline.hpp"
33#include "oops/instanceRefKlass.hpp"
34#include "oops/oop.inline.hpp"
35#include "utilities/preserveException.hpp"
36#ifndef SERIALGC
37#include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
38#include "gc_implementation/g1/g1OopClosures.inline.hpp"
39#include "gc_implementation/g1/g1RemSet.inline.hpp"
40#include "gc_implementation/g1/heapRegionSeq.inline.hpp"
41#include "gc_implementation/parNew/parOopClosures.inline.hpp"
42#include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
43#include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
44#include "oops/oop.pcgc.inline.hpp"
45#endif
46
47template <class T>
48void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
49  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
50  T heap_oop = oopDesc::load_heap_oop(referent_addr);
51  debug_only(
52    if(TraceReferenceGC && PrintGCDetails) {
53      gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
54    }
55  )
56  if (!oopDesc::is_null(heap_oop)) {
57    oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
58    if (!referent->is_gc_marked() &&
59        MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
60      // reference was discovered, referent will be traversed later
61      ref->InstanceKlass::oop_follow_contents(obj);
62      debug_only(
63        if(TraceReferenceGC && PrintGCDetails) {
64          gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
65        }
66      )
67      return;
68    } else {
69      // treat referent as normal oop
70      debug_only(
71        if(TraceReferenceGC && PrintGCDetails) {
72          gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
73        }
74      )
75      MarkSweep::mark_and_push(referent_addr);
76    }
77  }
78  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
79  if (ReferenceProcessor::pending_list_uses_discovered_field()) {
80    // Treat discovered as normal oop, if ref is not "active",
81    // i.e. if next is non-NULL.
82    T  next_oop = oopDesc::load_heap_oop(next_addr);
83    if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
84      T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
85      debug_only(
86        if(TraceReferenceGC && PrintGCDetails) {
87          gclog_or_tty->print_cr("   Process discovered as normal "
88                                 INTPTR_FORMAT, discovered_addr);
89        }
90      )
91      MarkSweep::mark_and_push(discovered_addr);
92    }
93  } else {
94#ifdef ASSERT
95    // In the case of older JDKs which do not use the discovered
96    // field for the pending list, an inactive ref (next != NULL)
97    // must always have a NULL discovered field.
98    oop next = oopDesc::load_decode_heap_oop(next_addr);
99    oop discovered = java_lang_ref_Reference::discovered(obj);
100    assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
101           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
102                   (oopDesc*)obj));
103#endif
104  }
105  // treat next as normal oop.  next is a link in the reference queue.
106  debug_only(
107    if(TraceReferenceGC && PrintGCDetails) {
108      gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
109    }
110  )
111  MarkSweep::mark_and_push(next_addr);
112  ref->InstanceKlass::oop_follow_contents(obj);
113}
114
115void instanceRefKlass::oop_follow_contents(oop obj) {
116  if (UseCompressedOops) {
117    specialized_oop_follow_contents<narrowOop>(this, obj);
118  } else {
119    specialized_oop_follow_contents<oop>(this, obj);
120  }
121}
122
123#ifndef SERIALGC
124template <class T>
125void specialized_oop_follow_contents(instanceRefKlass* ref,
126                                     ParCompactionManager* cm,
127                                     oop obj) {
128  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
129  T heap_oop = oopDesc::load_heap_oop(referent_addr);
130  debug_only(
131    if(TraceReferenceGC && PrintGCDetails) {
132      gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
133    }
134  )
135  if (!oopDesc::is_null(heap_oop)) {
136    oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
137    if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
138        PSParallelCompact::ref_processor()->
139          discover_reference(obj, ref->reference_type())) {
140      // reference already enqueued, referent will be traversed later
141      ref->InstanceKlass::oop_follow_contents(cm, obj);
142      debug_only(
143        if(TraceReferenceGC && PrintGCDetails) {
144          gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
145        }
146      )
147      return;
148    } else {
149      // treat referent as normal oop
150      debug_only(
151        if(TraceReferenceGC && PrintGCDetails) {
152          gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
153        }
154      )
155      PSParallelCompact::mark_and_push(cm, referent_addr);
156    }
157  }
158  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
159  if (ReferenceProcessor::pending_list_uses_discovered_field()) {
160    // Treat discovered as normal oop, if ref is not "active",
161    // i.e. if next is non-NULL.
162    T  next_oop = oopDesc::load_heap_oop(next_addr);
163    if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
164      T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
165      debug_only(
166        if(TraceReferenceGC && PrintGCDetails) {
167          gclog_or_tty->print_cr("   Process discovered as normal "
168                                 INTPTR_FORMAT, discovered_addr);
169        }
170      )
171      PSParallelCompact::mark_and_push(cm, discovered_addr);
172    }
173  } else {
174#ifdef ASSERT
175    // In the case of older JDKs which do not use the discovered
176    // field for the pending list, an inactive ref (next != NULL)
177    // must always have a NULL discovered field.
178    T next = oopDesc::load_heap_oop(next_addr);
179    oop discovered = java_lang_ref_Reference::discovered(obj);
180    assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
181           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
182                   (oopDesc*)obj));
183#endif
184  }
185  PSParallelCompact::mark_and_push(cm, next_addr);
186  ref->InstanceKlass::oop_follow_contents(cm, obj);
187}
188
189void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
190                                           oop obj) {
191  if (UseCompressedOops) {
192    specialized_oop_follow_contents<narrowOop>(this, cm, obj);
193  } else {
194    specialized_oop_follow_contents<oop>(this, cm, obj);
195  }
196}
197#endif // SERIALGC
198
199#ifdef ASSERT
200template <class T> void trace_reference_gc(const char *s, oop obj,
201                                           T* referent_addr,
202                                           T* next_addr,
203                                           T* discovered_addr) {
204  if(TraceReferenceGC && PrintGCDetails) {
205    gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
206    gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
207         INTPTR_FORMAT, referent_addr,
208         referent_addr ?
209           (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
210    gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
211         INTPTR_FORMAT, next_addr,
212         next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
213    gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
214         INTPTR_FORMAT, discovered_addr,
215         discovered_addr ?
216           (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
217  }
218}
219#endif
220
221template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
222  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
223  MarkSweep::adjust_pointer(referent_addr);
224  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
225  MarkSweep::adjust_pointer(next_addr);
226  T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
227  MarkSweep::adjust_pointer(discovered_addr);
228  debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
229                                referent_addr, next_addr, discovered_addr);)
230}
231
232int instanceRefKlass::oop_adjust_pointers(oop obj) {
233  int size = size_helper();
234  InstanceKlass::oop_adjust_pointers(obj);
235
236  if (UseCompressedOops) {
237    specialized_oop_adjust_pointers<narrowOop>(this, obj);
238  } else {
239    specialized_oop_adjust_pointers<oop>(this, obj);
240  }
241  return size;
242}
243
244#define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
245  T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);             \
246  if (closure->apply_to_weak_ref_discovered_field()) {                          \
247    closure->do_oop##nv_suffix(disc_addr);                                      \
248  }                                                                             \
249                                                                                \
250  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
251  T heap_oop = oopDesc::load_heap_oop(referent_addr);                           \
252  ReferenceProcessor* rp = closure->_ref_processor;                             \
253  if (!oopDesc::is_null(heap_oop)) {                                            \
254    oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);                 \
255    if (!referent->is_gc_marked() && (rp != NULL) &&                            \
256        rp->discover_reference(obj, reference_type())) {                        \
257      return size;                                                              \
258    } else if (contains(referent_addr)) {                                       \
259      /* treat referent as normal oop */                                        \
260      SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
261      closure->do_oop##nv_suffix(referent_addr);                                \
262    }                                                                           \
263  }                                                                             \
264  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
265  if (ReferenceProcessor::pending_list_uses_discovered_field()) {               \
266    T next_oop  = oopDesc::load_heap_oop(next_addr);                            \
267    /* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\
268    if (!oopDesc::is_null(next_oop) && contains(disc_addr)) {                   \
269        /* i.e. ref is not "active" */                                          \
270      debug_only(                                                               \
271        if(TraceReferenceGC && PrintGCDetails) {                                \
272          gclog_or_tty->print_cr("   Process discovered as normal "             \
273                                 INTPTR_FORMAT, disc_addr);                     \
274        }                                                                       \
275      )                                                                         \
276      SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
277      closure->do_oop##nv_suffix(disc_addr);                                    \
278    }                                                                           \
279  } else {                                                                      \
280    /* In the case of older JDKs which do not use the discovered field for  */  \
281    /* the pending list, an inactive ref (next != NULL) must always have a  */  \
282    /* NULL discovered field. */                                                \
283    debug_only(                                                                 \
284      T next_oop = oopDesc::load_heap_oop(next_addr);                           \
285      T disc_oop = oopDesc::load_heap_oop(disc_addr);                           \
286      assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop),          \
287           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \
288                   "discovered field", (oopDesc*)obj));                                   \
289    )                                                                           \
290  }                                                                             \
291  /* treat next as normal oop */                                                \
292  if (contains(next_addr)) {                                                    \
293    SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
294    closure->do_oop##nv_suffix(next_addr);                                      \
295  }                                                                             \
296  return size;                                                                  \
297
298
299template <class T> bool contains(T *t) { return true; }
300
301// Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
302// all closures.  Macros calling macros above for each oop size.
303
304#define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
305                                                                                \
306int instanceRefKlass::                                                          \
307oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
308  /* Get size before changing pointers */                                       \
309  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
310                                                                                \
311  int size = InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
312                                                                                \
313  if (UseCompressedOops) {                                                      \
314    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
315  } else {                                                                      \
316    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
317  }                                                                             \
318}
319
320#ifndef SERIALGC
321#define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
322                                                                                \
323int instanceRefKlass::                                                          \
324oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {        \
325  /* Get size before changing pointers */                                       \
326  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
327                                                                                \
328  int size = InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
329                                                                                \
330  if (UseCompressedOops) {                                                      \
331    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
332  } else {                                                                      \
333    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
334  }                                                                             \
335}
336#endif // !SERIALGC
337
338
339#define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
340                                                                                \
341int instanceRefKlass::                                                          \
342oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
343                               OopClosureType* closure,                         \
344                               MemRegion mr) {                                  \
345  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
346                                                                                \
347  int size = InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
348  if (UseCompressedOops) {                                                      \
349    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
350  } else {                                                                      \
351    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
352  }                                                                             \
353}
354
355ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
356ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
357#ifndef SERIALGC
358ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
359ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
360#endif // SERIALGC
361ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
362ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
363
364#ifndef SERIALGC
365template <class T>
366void specialized_oop_push_contents(instanceRefKlass *ref,
367                                   PSPromotionManager* pm, oop obj) {
368  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
369  if (PSScavenge::should_scavenge(referent_addr)) {
370    ReferenceProcessor* rp = PSScavenge::reference_processor();
371    if (rp->discover_reference(obj, ref->reference_type())) {
372      // reference already enqueued, referent and next will be traversed later
373      ref->InstanceKlass::oop_push_contents(pm, obj);
374      return;
375    } else {
376      // treat referent as normal oop
377      pm->claim_or_forward_depth(referent_addr);
378    }
379  }
380  // Treat discovered as normal oop, if ref is not "active",
381  // i.e. if next is non-NULL.
382  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
383  if (ReferenceProcessor::pending_list_uses_discovered_field()) {
384    T  next_oop = oopDesc::load_heap_oop(next_addr);
385    if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
386      T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
387      debug_only(
388        if(TraceReferenceGC && PrintGCDetails) {
389          gclog_or_tty->print_cr("   Process discovered as normal "
390                                 INTPTR_FORMAT, discovered_addr);
391        }
392      )
393      if (PSScavenge::should_scavenge(discovered_addr)) {
394        pm->claim_or_forward_depth(discovered_addr);
395      }
396    }
397  } else {
398#ifdef ASSERT
399    // In the case of older JDKs which do not use the discovered
400    // field for the pending list, an inactive ref (next != NULL)
401    // must always have a NULL discovered field.
402    oop next = oopDesc::load_decode_heap_oop(next_addr);
403    oop discovered = java_lang_ref_Reference::discovered(obj);
404    assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
405           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
406                   (oopDesc*)obj));
407#endif
408  }
409
410  // Treat next as normal oop;  next is a link in the reference queue.
411  if (PSScavenge::should_scavenge(next_addr)) {
412    pm->claim_or_forward_depth(next_addr);
413  }
414  ref->InstanceKlass::oop_push_contents(pm, obj);
415}
416
417void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
418  if (UseCompressedOops) {
419    specialized_oop_push_contents<narrowOop>(this, pm, obj);
420  } else {
421    specialized_oop_push_contents<oop>(this, pm, obj);
422  }
423}
424
425template <class T>
426void specialized_oop_update_pointers(instanceRefKlass *ref,
427                                    ParCompactionManager* cm, oop obj) {
428  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
429  PSParallelCompact::adjust_pointer(referent_addr);
430  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
431  PSParallelCompact::adjust_pointer(next_addr);
432  T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
433  PSParallelCompact::adjust_pointer(discovered_addr);
434  debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
435                                referent_addr, next_addr, discovered_addr);)
436}
437
438int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
439  InstanceKlass::oop_update_pointers(cm, obj);
440  if (UseCompressedOops) {
441    specialized_oop_update_pointers<narrowOop>(this, cm, obj);
442  } else {
443    specialized_oop_update_pointers<oop>(this, cm, obj);
444  }
445  return size_helper();
446}
447#endif // SERIALGC
448
449void instanceRefKlass::update_nonstatic_oop_maps(Klass* k) {
450  // Clear the nonstatic oop-map entries corresponding to referent
451  // and nextPending field.  They are treated specially by the
452  // garbage collector.
453  // The discovered field is used only by the garbage collector
454  // and is also treated specially.
455  InstanceKlass* ik = InstanceKlass::cast(k);
456
457  // Check that we have the right class
458  debug_only(static bool first_time = true);
459  assert(k == SystemDictionary::Reference_klass() && first_time,
460         "Invalid update of maps");
461  debug_only(first_time = false);
462  assert(ik->nonstatic_oop_map_count() == 1, "just checking");
463
464  OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
465
466  // Check that the current map is (2,4) - currently points at field with
467  // offset 2 (words) and has 4 map entries.
468  debug_only(int offset = java_lang_ref_Reference::referent_offset);
469  debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
470    java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
471
472  if (UseSharedSpaces) {
473    assert(map->offset() == java_lang_ref_Reference::queue_offset &&
474           map->count() == 1, "just checking");
475  } else {
476    assert(map->offset() == offset && map->count() == count,
477           "just checking");
478
479    // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
480    map->set_offset(java_lang_ref_Reference::queue_offset);
481    map->set_count(1);
482  }
483}
484
485
486// Verification
487
488void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
489  InstanceKlass::oop_verify_on(obj, st);
490  // Verify referent field
491  oop referent = java_lang_ref_Reference::referent(obj);
492
493  // We should make this general to all heaps
494  GenCollectedHeap* gch = NULL;
495  if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
496    gch = GenCollectedHeap::heap();
497
498  if (referent != NULL) {
499    guarantee(referent->is_oop(), "referent field heap failed");
500  }
501  // Verify next field
502  oop next = java_lang_ref_Reference::next(obj);
503  if (next != NULL) {
504    guarantee(next->is_oop(), "next field verify failed");
505    guarantee(next->is_instanceRef(), "next field verify failed");
506  }
507}
508
509bool instanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
510  if (java_lang_ref_Reference::pending_list_lock() == NULL) return false;
511  Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
512  return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
513}
514
515void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
516  // we may enter this with pending exception set
517  PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
518
519  // Create a HandleMark in case we retry a GC multiple times.
520  // Each time we attempt the GC, we allocate the handle below
521  // to hold the pending list lock. We want to free this handle.
522  HandleMark hm;
523
524  Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
525  ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
526  assert(ObjectSynchronizer::current_thread_holds_lock(
527           JavaThread::current(), h_lock),
528         "Locking should have succeeded");
529  if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
530}
531
532void instanceRefKlass::release_and_notify_pending_list_lock(
533  BasicLock *pending_list_basic_lock) {
534  // we may enter this with pending exception set
535  PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
536
537  // Create a HandleMark in case we retry a GC multiple times.
538  // Each time we attempt the GC, we allocate the handle below
539  // to hold the pending list lock. We want to free this handle.
540  HandleMark hm;
541
542  Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
543  assert(ObjectSynchronizer::current_thread_holds_lock(
544           JavaThread::current(), h_lock),
545         "Lock should be held");
546  // Notify waiters on pending lists lock if there is any reference.
547  if (java_lang_ref_Reference::pending_list() != NULL) {
548    ObjectSynchronizer::notifyall(h_lock, THREAD);
549  }
550  ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
551  if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
552}
553