instanceRefKlass.cpp revision 113:ba764ed4b6f2
1/*
2 * Copyright 1997-2006 Sun Microsystems, Inc.  All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
24
25# include "incls/_precompiled.incl"
26# include "incls/_instanceRefKlass.cpp.incl"
27
28template <class T>
29static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
30  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
31  oop referent = oopDesc::load_decode_heap_oop(referent_addr);
32  debug_only(
33    if(TraceReferenceGC && PrintGCDetails) {
34      gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
35    }
36  )
37  if (referent != NULL) {
38    if (!referent->is_gc_marked() &&
39        MarkSweep::ref_processor()->
40          discover_reference(obj, ref->reference_type())) {
41      // reference already enqueued, referent will be traversed later
42      ref->instanceKlass::oop_follow_contents(obj);
43      debug_only(
44        if(TraceReferenceGC && PrintGCDetails) {
45          gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
46        }
47      )
48      return;
49    } else {
50      // treat referent as normal oop
51      debug_only(
52        if(TraceReferenceGC && PrintGCDetails) {
53          gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
54        }
55      )
56      MarkSweep::mark_and_push(referent_addr);
57    }
58  }
59  // treat next as normal oop.  next is a link in the pending list.
60  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
61  debug_only(
62    if(TraceReferenceGC && PrintGCDetails) {
63      gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
64    }
65  )
66  MarkSweep::mark_and_push(next_addr);
67  ref->instanceKlass::oop_follow_contents(obj);
68}
69
70void instanceRefKlass::oop_follow_contents(oop obj) {
71  if (UseCompressedOops) {
72    specialized_oop_follow_contents<narrowOop>(this, obj);
73  } else {
74    specialized_oop_follow_contents<oop>(this, obj);
75  }
76}
77
78#ifndef SERIALGC
79template <class T>
80static void specialized_oop_follow_contents(instanceRefKlass* ref,
81                                            ParCompactionManager* cm,
82                                            oop obj) {
83  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
84  oop referent = oopDesc::load_decode_heap_oop(referent_addr);
85  debug_only(
86    if(TraceReferenceGC && PrintGCDetails) {
87      gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
88    }
89  )
90  if (referent != NULL) {
91    if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
92        PSParallelCompact::ref_processor()->
93          discover_reference(obj, ref->reference_type())) {
94      // reference already enqueued, referent will be traversed later
95      ref->instanceKlass::oop_follow_contents(cm, obj);
96      debug_only(
97        if(TraceReferenceGC && PrintGCDetails) {
98          gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
99        }
100      )
101      return;
102    } else {
103      // treat referent as normal oop
104      debug_only(
105        if(TraceReferenceGC && PrintGCDetails) {
106          gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
107        }
108      )
109      PSParallelCompact::mark_and_push(cm, referent_addr);
110    }
111  }
112  // treat next as normal oop.  next is a link in the pending list.
113  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
114  debug_only(
115    if(TraceReferenceGC && PrintGCDetails) {
116      gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
117    }
118  )
119  PSParallelCompact::mark_and_push(cm, next_addr);
120  ref->instanceKlass::oop_follow_contents(cm, obj);
121}
122
123void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
124                                           oop obj) {
125  if (UseCompressedOops) {
126    specialized_oop_follow_contents<narrowOop>(this, cm, obj);
127  } else {
128    specialized_oop_follow_contents<oop>(this, cm, obj);
129  }
130}
131#endif // SERIALGC
132
133#ifdef ASSERT
134template <class T> void trace_reference_gc(const char *s, oop obj,
135                                           T* referent_addr,
136                                           T* next_addr,
137                                           T* discovered_addr) {
138  if(TraceReferenceGC && PrintGCDetails) {
139    gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
140    gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
141         INTPTR_FORMAT, referent_addr,
142         referent_addr ?
143           (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
144    gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
145         INTPTR_FORMAT, next_addr,
146         next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
147    gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
148         INTPTR_FORMAT, discovered_addr,
149         discovered_addr ?
150           (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
151  }
152}
153#endif
154
155template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
156  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
157  MarkSweep::adjust_pointer(referent_addr);
158  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
159  MarkSweep::adjust_pointer(next_addr);
160  T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
161  MarkSweep::adjust_pointer(discovered_addr);
162  debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
163                                referent_addr, next_addr, discovered_addr);)
164}
165
166int instanceRefKlass::oop_adjust_pointers(oop obj) {
167  int size = size_helper();
168  instanceKlass::oop_adjust_pointers(obj);
169
170  if (UseCompressedOops) {
171    specialized_oop_adjust_pointers<narrowOop>(this, obj);
172  } else {
173    specialized_oop_adjust_pointers<oop>(this, obj);
174  }
175  return size;
176}
177
178#define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
179  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
180  oop referent = oopDesc::load_decode_heap_oop(referent_addr);                  \
181  if (referent != NULL && contains(referent_addr)) {                            \
182    ReferenceProcessor* rp = closure->_ref_processor;                           \
183    if (!referent->is_gc_marked() && (rp != NULL) &&                            \
184        rp->discover_reference(obj, reference_type())) {                        \
185      return size;                                                              \
186    } else {                                                                    \
187      /* treat referent as normal oop */                                        \
188      SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
189      closure->do_oop##nv_suffix(referent_addr);                                \
190    }                                                                           \
191  }                                                                             \
192  /* treat next as normal oop */                                                \
193  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
194  if (contains(next_addr)) {                                                    \
195    SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
196    closure->do_oop##nv_suffix(next_addr);                                      \
197  }                                                                             \
198  return size;                                                                  \
199
200
201template <class T> bool contains(T *t) { return true; }
202
203// Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
204// all closures.  Macros calling macros above for each oop size.
205
206#define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
207                                                                                \
208int instanceRefKlass::                                                          \
209oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
210  /* Get size before changing pointers */                                       \
211  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
212                                                                                \
213  int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
214                                                                                \
215  if (UseCompressedOops) {                                                      \
216    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
217  } else {                                                                      \
218    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
219  }                                                                             \
220}
221
222#define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
223                                                                                \
224int instanceRefKlass::                                                          \
225oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
226                               OopClosureType* closure,                         \
227                               MemRegion mr) {                                  \
228  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
229                                                                                \
230  int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
231  if (UseCompressedOops) {                                                      \
232    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
233  } else {                                                                      \
234    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
235  }                                                                             \
236}
237
238ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
239ALL_OOP_OOP_ITERATE_CLOSURES_3(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
240ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
241ALL_OOP_OOP_ITERATE_CLOSURES_3(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
242
243#ifndef SERIALGC
244template <class T>
245void specialized_oop_copy_contents(instanceRefKlass *ref,
246                                   PSPromotionManager* pm, oop obj) {
247  assert(!pm->depth_first(), "invariant");
248  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
249  if (PSScavenge::should_scavenge(referent_addr)) {
250    ReferenceProcessor* rp = PSScavenge::reference_processor();
251    if (rp->discover_reference(obj, ref->reference_type())) {
252      // reference already enqueued, referent and next will be traversed later
253      ref->instanceKlass::oop_copy_contents(pm, obj);
254      return;
255    } else {
256      // treat referent as normal oop
257      pm->claim_or_forward_breadth(referent_addr);
258    }
259  }
260  // treat next as normal oop
261  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
262  if (PSScavenge::should_scavenge(next_addr)) {
263    pm->claim_or_forward_breadth(next_addr);
264  }
265  ref->instanceKlass::oop_copy_contents(pm, obj);
266}
267
268void instanceRefKlass::oop_copy_contents(PSPromotionManager* pm, oop obj) {
269  if (UseCompressedOops) {
270    specialized_oop_copy_contents<narrowOop>(this, pm, obj);
271  } else {
272    specialized_oop_copy_contents<oop>(this, pm, obj);
273  }
274}
275
276template <class T>
277void specialized_oop_push_contents(instanceRefKlass *ref,
278                                   PSPromotionManager* pm, oop obj) {
279  assert(pm->depth_first(), "invariant");
280  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
281  if (PSScavenge::should_scavenge(referent_addr)) {
282    ReferenceProcessor* rp = PSScavenge::reference_processor();
283    if (rp->discover_reference(obj, ref->reference_type())) {
284      // reference already enqueued, referent and next will be traversed later
285      ref->instanceKlass::oop_push_contents(pm, obj);
286      return;
287    } else {
288      // treat referent as normal oop
289      pm->claim_or_forward_depth(referent_addr);
290    }
291  }
292  // treat next as normal oop
293  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
294  if (PSScavenge::should_scavenge(next_addr)) {
295    pm->claim_or_forward_depth(next_addr);
296  }
297  ref->instanceKlass::oop_push_contents(pm, obj);
298}
299
300void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
301  if (UseCompressedOops) {
302    specialized_oop_push_contents<narrowOop>(this, pm, obj);
303  } else {
304    specialized_oop_push_contents<oop>(this, pm, obj);
305  }
306}
307
308template <class T>
309void specialized_oop_update_pointers(instanceRefKlass *ref,
310                                    ParCompactionManager* cm, oop obj) {
311  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
312  PSParallelCompact::adjust_pointer(referent_addr);
313  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
314  PSParallelCompact::adjust_pointer(next_addr);
315  T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
316  PSParallelCompact::adjust_pointer(discovered_addr);
317  debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
318                                referent_addr, next_addr, discovered_addr);)
319}
320
321int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
322  instanceKlass::oop_update_pointers(cm, obj);
323  if (UseCompressedOops) {
324    specialized_oop_update_pointers<narrowOop>(this, cm, obj);
325  } else {
326    specialized_oop_update_pointers<oop>(this, cm, obj);
327  }
328  return size_helper();
329}
330
331
332template <class T> void
333specialized_oop_update_pointers(ParCompactionManager* cm, oop obj,
334                                HeapWord* beg_addr, HeapWord* end_addr) {
335  T* p;
336  T* referent_addr = p = (T*)java_lang_ref_Reference::referent_addr(obj);
337  PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
338  T* next_addr = p = (T*)java_lang_ref_Reference::next_addr(obj);
339  PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
340  T* discovered_addr = p = (T*)java_lang_ref_Reference::discovered_addr(obj);
341  PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
342  debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
343                                referent_addr, next_addr, discovered_addr);)
344}
345
346int
347instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
348                                      HeapWord* beg_addr, HeapWord* end_addr) {
349  instanceKlass::oop_update_pointers(cm, obj, beg_addr, end_addr);
350  if (UseCompressedOops) {
351    specialized_oop_update_pointers<narrowOop>(cm, obj, beg_addr, end_addr);
352  } else {
353    specialized_oop_update_pointers<oop>(cm, obj, beg_addr, end_addr);
354  }
355  return size_helper();
356}
357#endif // SERIALGC
358
359void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
360  // Clear the nonstatic oop-map entries corresponding to referent
361  // and nextPending field.  They are treated specially by the
362  // garbage collector.
363  // The discovered field is used only by the garbage collector
364  // and is also treated specially.
365  instanceKlass* ik = instanceKlass::cast(k);
366
367  // Check that we have the right class
368  debug_only(static bool first_time = true);
369  assert(k == SystemDictionary::reference_klass() && first_time,
370         "Invalid update of maps");
371  debug_only(first_time = false);
372  assert(ik->nonstatic_oop_map_size() == 1, "just checking");
373
374  OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
375
376  // Check that the current map is (2,4) - currently points at field with
377  // offset 2 (words) and has 4 map entries.
378  debug_only(int offset = java_lang_ref_Reference::referent_offset);
379  debug_only(int length = ((java_lang_ref_Reference::discovered_offset -
380    java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
381
382  if (UseSharedSpaces) {
383    assert(map->offset() == java_lang_ref_Reference::queue_offset &&
384           map->length() == 1, "just checking");
385  } else {
386    assert(map->offset() == offset && map->length() == length,
387           "just checking");
388
389    // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
390    map->set_offset(java_lang_ref_Reference::queue_offset);
391    map->set_length(1);
392  }
393}
394
395
396// Verification
397
398void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
399  instanceKlass::oop_verify_on(obj, st);
400  // Verify referent field
401  oop referent = java_lang_ref_Reference::referent(obj);
402
403  // We should make this general to all heaps
404  GenCollectedHeap* gch = NULL;
405  if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
406    gch = GenCollectedHeap::heap();
407
408  if (referent != NULL) {
409    guarantee(referent->is_oop(), "referent field heap failed");
410    if (gch != NULL && !gch->is_in_youngest(obj)) {
411      // We do a specific remembered set check here since the referent
412      // field is not part of the oop mask and therefore skipped by the
413      // regular verify code.
414      if (UseCompressedOops) {
415        narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
416        obj->verify_old_oop(referent_addr, true);
417      } else {
418        oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
419        obj->verify_old_oop(referent_addr, true);
420      }
421    }
422  }
423  // Verify next field
424  oop next = java_lang_ref_Reference::next(obj);
425  if (next != NULL) {
426    guarantee(next->is_oop(), "next field verify fa iled");
427    guarantee(next->is_instanceRef(), "next field verify failed");
428    if (gch != NULL && !gch->is_in_youngest(obj)) {
429      // We do a specific remembered set check here since the next field is
430      // not part of the oop mask and therefore skipped by the regular
431      // verify code.
432      if (UseCompressedOops) {
433        narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
434        obj->verify_old_oop(next_addr, true);
435      } else {
436        oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
437        obj->verify_old_oop(next_addr, true);
438      }
439    }
440  }
441}
442
443void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
444  // we may enter this with pending exception set
445  PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
446  Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
447  ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
448  assert(ObjectSynchronizer::current_thread_holds_lock(
449           JavaThread::current(), h_lock),
450         "Locking should have succeeded");
451  if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
452}
453
454void instanceRefKlass::release_and_notify_pending_list_lock(
455  BasicLock *pending_list_basic_lock) {
456  // we may enter this with pending exception set
457  PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
458  //
459  Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
460  assert(ObjectSynchronizer::current_thread_holds_lock(
461           JavaThread::current(), h_lock),
462         "Lock should be held");
463  // Notify waiters on pending lists lock if there is any reference.
464  if (java_lang_ref_Reference::pending_list() != NULL) {
465    ObjectSynchronizer::notifyall(h_lock, THREAD);
466  }
467  ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
468  if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
469}
470