jvmtiTagMap.cpp revision 6761:739468857ffb
1/*
2 * Copyright (c) 2003, 2014, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "classfile/symbolTable.hpp"
27#include "classfile/systemDictionary.hpp"
28#include "classfile/vmSymbols.hpp"
29#include "code/codeCache.hpp"
30#include "jvmtifiles/jvmtiEnv.hpp"
31#include "oops/instanceMirrorKlass.hpp"
32#include "oops/objArrayKlass.hpp"
33#include "oops/oop.inline2.hpp"
34#include "prims/jvmtiEventController.hpp"
35#include "prims/jvmtiEventController.inline.hpp"
36#include "prims/jvmtiExport.hpp"
37#include "prims/jvmtiImpl.hpp"
38#include "prims/jvmtiTagMap.hpp"
39#include "runtime/biasedLocking.hpp"
40#include "runtime/javaCalls.hpp"
41#include "runtime/jniHandles.hpp"
42#include "runtime/mutex.hpp"
43#include "runtime/mutexLocker.hpp"
44#include "runtime/reflectionUtils.hpp"
45#include "runtime/vframe.hpp"
46#include "runtime/vmThread.hpp"
47#include "runtime/vm_operations.hpp"
48#include "services/serviceUtil.hpp"
49#include "utilities/macros.hpp"
50#if INCLUDE_ALL_GCS
51#include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
52#endif // INCLUDE_ALL_GCS
53
54// JvmtiTagHashmapEntry
55//
56// Each entry encapsulates a reference to the tagged object
57// and the tag value. In addition an entry includes a next pointer which
58// is used to chain entries together.
59
60class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> {
61 private:
62  friend class JvmtiTagMap;
63
64  oop _object;                          // tagged object
65  jlong _tag;                           // the tag
66  JvmtiTagHashmapEntry* _next;          // next on the list
67
68  inline void init(oop object, jlong tag) {
69    _object = object;
70    _tag = tag;
71    _next = NULL;
72  }
73
74  // constructor
75  JvmtiTagHashmapEntry(oop object, jlong tag)         { init(object, tag); }
76
77 public:
78
79  // accessor methods
80  inline oop object() const                           { return _object; }
81  inline oop* object_addr()                           { return &_object; }
82  inline jlong tag() const                            { return _tag; }
83
84  inline void set_tag(jlong tag) {
85    assert(tag != 0, "can't be zero");
86    _tag = tag;
87  }
88
89  inline JvmtiTagHashmapEntry* next() const             { return _next; }
90  inline void set_next(JvmtiTagHashmapEntry* next)      { _next = next; }
91};
92
93
94// JvmtiTagHashmap
95//
96// A hashmap is essentially a table of pointers to entries. Entries
97// are hashed to a location, or position in the table, and then
98// chained from that location. The "key" for hashing is address of
99// the object, or oop. The "value" is the tag value.
100//
101// A hashmap maintains a count of the number entries in the hashmap
102// and resizes if the number of entries exceeds a given threshold.
103// The threshold is specified as a percentage of the size - for
104// example a threshold of 0.75 will trigger the hashmap to resize
105// if the number of entries is >75% of table size.
106//
107// A hashmap provides functions for adding, removing, and finding
108// entries. It also provides a function to iterate over all entries
109// in the hashmap.
110
111class JvmtiTagHashmap : public CHeapObj<mtInternal> {
112 private:
113  friend class JvmtiTagMap;
114
115  enum {
116    small_trace_threshold  = 10000,                  // threshold for tracing
117    medium_trace_threshold = 100000,
118    large_trace_threshold  = 1000000,
119    initial_trace_threshold = small_trace_threshold
120  };
121
122  static int _sizes[];                  // array of possible hashmap sizes
123  int _size;                            // actual size of the table
124  int _size_index;                      // index into size table
125
126  int _entry_count;                     // number of entries in the hashmap
127
128  float _load_factor;                   // load factor as a % of the size
129  int _resize_threshold;                // computed threshold to trigger resizing.
130  bool _resizing_enabled;               // indicates if hashmap can resize
131
132  int _trace_threshold;                 // threshold for trace messages
133
134  JvmtiTagHashmapEntry** _table;        // the table of entries.
135
136  // private accessors
137  int resize_threshold() const                  { return _resize_threshold; }
138  int trace_threshold() const                   { return _trace_threshold; }
139
140  // initialize the hashmap
141  void init(int size_index=0, float load_factor=4.0f) {
142    int initial_size =  _sizes[size_index];
143    _size_index = size_index;
144    _size = initial_size;
145    _entry_count = 0;
146    if (TraceJVMTIObjectTagging) {
147      _trace_threshold = initial_trace_threshold;
148    } else {
149      _trace_threshold = -1;
150    }
151    _load_factor = load_factor;
152    _resize_threshold = (int)(_load_factor * _size);
153    _resizing_enabled = true;
154    size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
155    _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
156    if (_table == NULL) {
157      vm_exit_out_of_memory(s, OOM_MALLOC_ERROR,
158        "unable to allocate initial hashtable for jvmti object tags");
159    }
160    for (int i=0; i<initial_size; i++) {
161      _table[i] = NULL;
162    }
163  }
164
165  // hash a given key (oop) with the specified size
166  static unsigned int hash(oop key, int size) {
167    // shift right to get better distribution (as these bits will be zero
168    // with aligned addresses)
169    unsigned int addr = (unsigned int)(cast_from_oop<intptr_t>(key));
170#ifdef _LP64
171    return (addr >> 3) % size;
172#else
173    return (addr >> 2) % size;
174#endif
175  }
176
177  // hash a given key (oop)
178  unsigned int hash(oop key) {
179    return hash(key, _size);
180  }
181
182  // resize the hashmap - allocates a large table and re-hashes
183  // all entries into the new table.
184  void resize() {
185    int new_size_index = _size_index+1;
186    int new_size = _sizes[new_size_index];
187    if (new_size < 0) {
188      // hashmap already at maximum capacity
189      return;
190    }
191
192    // allocate new table
193    size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
194    JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
195    if (new_table == NULL) {
196      warning("unable to allocate larger hashtable for jvmti object tags");
197      set_resizing_enabled(false);
198      return;
199    }
200
201    // initialize new table
202    int i;
203    for (i=0; i<new_size; i++) {
204      new_table[i] = NULL;
205    }
206
207    // rehash all entries into the new table
208    for (i=0; i<_size; i++) {
209      JvmtiTagHashmapEntry* entry = _table[i];
210      while (entry != NULL) {
211        JvmtiTagHashmapEntry* next = entry->next();
212        oop key = entry->object();
213        assert(key != NULL, "jni weak reference cleared!!");
214        unsigned int h = hash(key, new_size);
215        JvmtiTagHashmapEntry* anchor = new_table[h];
216        if (anchor == NULL) {
217          new_table[h] = entry;
218          entry->set_next(NULL);
219        } else {
220          entry->set_next(anchor);
221          new_table[h] = entry;
222        }
223        entry = next;
224      }
225    }
226
227    // free old table and update settings.
228    os::free((void*)_table);
229    _table = new_table;
230    _size_index = new_size_index;
231    _size = new_size;
232
233    // compute new resize threshold
234    _resize_threshold = (int)(_load_factor * _size);
235  }
236
237
238  // internal remove function - remove an entry at a given position in the
239  // table.
240  inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
241    assert(pos >= 0 && pos < _size, "out of range");
242    if (prev == NULL) {
243      _table[pos] = entry->next();
244    } else {
245      prev->set_next(entry->next());
246    }
247    assert(_entry_count > 0, "checking");
248    _entry_count--;
249  }
250
251  // resizing switch
252  bool is_resizing_enabled() const          { return _resizing_enabled; }
253  void set_resizing_enabled(bool enable)    { _resizing_enabled = enable; }
254
255  // debugging
256  void print_memory_usage();
257  void compute_next_trace_threshold();
258
259 public:
260
261  // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
262  // The preferred size is rounded down to an actual size.
263  JvmtiTagHashmap(int size, float load_factor=0.0f) {
264    int i=0;
265    while (_sizes[i] < size) {
266      if (_sizes[i] < 0) {
267        assert(i > 0, "sanity check");
268        i--;
269        break;
270      }
271      i++;
272    }
273
274    // if a load factor is specified then use it, otherwise use default
275    if (load_factor > 0.01f) {
276      init(i, load_factor);
277    } else {
278      init(i);
279    }
280  }
281
282  // create a JvmtiTagHashmap with default settings
283  JvmtiTagHashmap() {
284    init();
285  }
286
287  // release table when JvmtiTagHashmap destroyed
288  ~JvmtiTagHashmap() {
289    if (_table != NULL) {
290      os::free((void*)_table);
291      _table = NULL;
292    }
293  }
294
295  // accessors
296  int size() const                              { return _size; }
297  JvmtiTagHashmapEntry** table() const          { return _table; }
298  int entry_count() const                       { return _entry_count; }
299
300  // find an entry in the hashmap, returns NULL if not found.
301  inline JvmtiTagHashmapEntry* find(oop key) {
302    unsigned int h = hash(key);
303    JvmtiTagHashmapEntry* entry = _table[h];
304    while (entry != NULL) {
305      if (entry->object() == key) {
306         return entry;
307      }
308      entry = entry->next();
309    }
310    return NULL;
311  }
312
313
314  // add a new entry to hashmap
315  inline void add(oop key, JvmtiTagHashmapEntry* entry) {
316    assert(key != NULL, "checking");
317    assert(find(key) == NULL, "duplicate detected");
318    unsigned int h = hash(key);
319    JvmtiTagHashmapEntry* anchor = _table[h];
320    if (anchor == NULL) {
321      _table[h] = entry;
322      entry->set_next(NULL);
323    } else {
324      entry->set_next(anchor);
325      _table[h] = entry;
326    }
327
328    _entry_count++;
329    if (trace_threshold() > 0 && entry_count() >= trace_threshold()) {
330      assert(TraceJVMTIObjectTagging, "should only get here when tracing");
331      print_memory_usage();
332      compute_next_trace_threshold();
333    }
334
335    // if the number of entries exceed the threshold then resize
336    if (entry_count() > resize_threshold() && is_resizing_enabled()) {
337      resize();
338    }
339  }
340
341  // remove an entry with the given key.
342  inline JvmtiTagHashmapEntry* remove(oop key) {
343    unsigned int h = hash(key);
344    JvmtiTagHashmapEntry* entry = _table[h];
345    JvmtiTagHashmapEntry* prev = NULL;
346    while (entry != NULL) {
347      if (key == entry->object()) {
348        break;
349      }
350      prev = entry;
351      entry = entry->next();
352    }
353    if (entry != NULL) {
354      remove(prev, h, entry);
355    }
356    return entry;
357  }
358
359  // iterate over all entries in the hashmap
360  void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
361};
362
363// possible hashmap sizes - odd primes that roughly double in size.
364// To avoid excessive resizing the odd primes from 4801-76831 and
365// 76831-307261 have been removed. The list must be terminated by -1.
366int JvmtiTagHashmap::_sizes[] =  { 4801, 76831, 307261, 614563, 1228891,
367    2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
368
369
370// A supporting class for iterating over all entries in Hashmap
371class JvmtiTagHashmapEntryClosure {
372 public:
373  virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
374};
375
376
377// iterate over all entries in the hashmap
378void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
379  for (int i=0; i<_size; i++) {
380    JvmtiTagHashmapEntry* entry = _table[i];
381    JvmtiTagHashmapEntry* prev = NULL;
382    while (entry != NULL) {
383      // obtain the next entry before invoking do_entry - this is
384      // necessary because do_entry may remove the entry from the
385      // hashmap.
386      JvmtiTagHashmapEntry* next = entry->next();
387      closure->do_entry(entry);
388      entry = next;
389     }
390  }
391}
392
393// debugging
394void JvmtiTagHashmap::print_memory_usage() {
395  intptr_t p = (intptr_t)this;
396  tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
397
398  // table + entries in KB
399  int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
400    entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
401
402  int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
403  tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
404    entry_count(), hashmap_usage, weak_globals_usage);
405}
406
407// compute threshold for the next trace message
408void JvmtiTagHashmap::compute_next_trace_threshold() {
409  if (trace_threshold() < medium_trace_threshold) {
410    _trace_threshold += small_trace_threshold;
411  } else {
412    if (trace_threshold() < large_trace_threshold) {
413      _trace_threshold += medium_trace_threshold;
414    } else {
415      _trace_threshold += large_trace_threshold;
416    }
417  }
418}
419
420// create a JvmtiTagMap
421JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
422  _env(env),
423  _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
424  _free_entries(NULL),
425  _free_entries_count(0)
426{
427  assert(JvmtiThreadState_lock->is_locked(), "sanity check");
428  assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
429
430  _hashmap = new JvmtiTagHashmap();
431
432  // finally add us to the environment
433  ((JvmtiEnvBase *)env)->set_tag_map(this);
434}
435
436
437// destroy a JvmtiTagMap
438JvmtiTagMap::~JvmtiTagMap() {
439
440  // no lock acquired as we assume the enclosing environment is
441  // also being destroryed.
442  ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
443
444  JvmtiTagHashmapEntry** table = _hashmap->table();
445  for (int j = 0; j < _hashmap->size(); j++) {
446    JvmtiTagHashmapEntry* entry = table[j];
447    while (entry != NULL) {
448      JvmtiTagHashmapEntry* next = entry->next();
449      delete entry;
450      entry = next;
451    }
452  }
453
454  // finally destroy the hashmap
455  delete _hashmap;
456  _hashmap = NULL;
457
458  // remove any entries on the free list
459  JvmtiTagHashmapEntry* entry = _free_entries;
460  while (entry != NULL) {
461    JvmtiTagHashmapEntry* next = entry->next();
462    delete entry;
463    entry = next;
464  }
465  _free_entries = NULL;
466}
467
468// create a hashmap entry
469// - if there's an entry on the (per-environment) free list then this
470// is returned. Otherwise an new entry is allocated.
471JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) {
472  assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
473  JvmtiTagHashmapEntry* entry;
474  if (_free_entries == NULL) {
475    entry = new JvmtiTagHashmapEntry(ref, tag);
476  } else {
477    assert(_free_entries_count > 0, "mismatched _free_entries_count");
478    _free_entries_count--;
479    entry = _free_entries;
480    _free_entries = entry->next();
481    entry->init(ref, tag);
482  }
483  return entry;
484}
485
486// destroy an entry by returning it to the free list
487void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
488  assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
489  // limit the size of the free list
490  if (_free_entries_count >= max_free_entries) {
491    delete entry;
492  } else {
493    entry->set_next(_free_entries);
494    _free_entries = entry;
495    _free_entries_count++;
496  }
497}
498
499// returns the tag map for the given environments. If the tag map
500// doesn't exist then it is created.
501JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
502  JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map();
503  if (tag_map == NULL) {
504    MutexLocker mu(JvmtiThreadState_lock);
505    tag_map = ((JvmtiEnvBase*)env)->tag_map();
506    if (tag_map == NULL) {
507      tag_map = new JvmtiTagMap(env);
508    }
509  } else {
510    CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
511  }
512  return tag_map;
513}
514
515// iterate over all entries in the tag map.
516void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
517  hashmap()->entry_iterate(closure);
518}
519
520// returns true if the hashmaps are empty
521bool JvmtiTagMap::is_empty() {
522  assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
523  return hashmap()->entry_count() == 0;
524}
525
526
527// Return the tag value for an object, or 0 if the object is
528// not tagged
529//
530static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
531  JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o);
532  if (entry == NULL) {
533    return 0;
534  } else {
535    return entry->tag();
536  }
537}
538
539
540// A CallbackWrapper is a support class for querying and tagging an object
541// around a callback to a profiler. The constructor does pre-callback
542// work to get the tag value, klass tag value, ... and the destructor
543// does the post-callback work of tagging or untagging the object.
544//
545// {
546//   CallbackWrapper wrapper(tag_map, o);
547//
548//   (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
549//
550// } // wrapper goes out of scope here which results in the destructor
551//      checking to see if the object has been tagged, untagged, or the
552//      tag value has changed.
553//
554class CallbackWrapper : public StackObj {
555 private:
556  JvmtiTagMap* _tag_map;
557  JvmtiTagHashmap* _hashmap;
558  JvmtiTagHashmapEntry* _entry;
559  oop _o;
560  jlong _obj_size;
561  jlong _obj_tag;
562  jlong _klass_tag;
563
564 protected:
565  JvmtiTagMap* tag_map() const      { return _tag_map; }
566
567  // invoked post-callback to tag, untag, or update the tag of an object
568  void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
569                                       JvmtiTagHashmapEntry* entry, jlong obj_tag);
570 public:
571  CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
572    assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
573           "MT unsafe or must be VM thread");
574
575    // object to tag
576    _o = o;
577
578    // object size
579    _obj_size = (jlong)_o->size() * wordSize;
580
581    // record the context
582    _tag_map = tag_map;
583    _hashmap = tag_map->hashmap();
584    _entry = _hashmap->find(_o);
585
586    // get object tag
587    _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
588
589    // get the class and the class's tag value
590    assert(SystemDictionary::Class_klass()->oop_is_instanceMirror(), "Is not?");
591
592    _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
593  }
594
595  ~CallbackWrapper() {
596    post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
597  }
598
599  inline jlong* obj_tag_p()                     { return &_obj_tag; }
600  inline jlong obj_size() const                 { return _obj_size; }
601  inline jlong obj_tag() const                  { return _obj_tag; }
602  inline jlong klass_tag() const                { return _klass_tag; }
603};
604
605
606
607// callback post-callback to tag, untag, or update the tag of an object
608void inline CallbackWrapper::post_callback_tag_update(oop o,
609                                                      JvmtiTagHashmap* hashmap,
610                                                      JvmtiTagHashmapEntry* entry,
611                                                      jlong obj_tag) {
612  if (entry == NULL) {
613    if (obj_tag != 0) {
614      // callback has tagged the object
615      assert(Thread::current()->is_VM_thread(), "must be VMThread");
616      entry = tag_map()->create_entry(o, obj_tag);
617      hashmap->add(o, entry);
618    }
619  } else {
620    // object was previously tagged - the callback may have untagged
621    // the object or changed the tag value
622    if (obj_tag == 0) {
623
624      JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
625      assert(entry_removed == entry, "checking");
626      tag_map()->destroy_entry(entry);
627
628    } else {
629      if (obj_tag != entry->tag()) {
630         entry->set_tag(obj_tag);
631      }
632    }
633  }
634}
635
636// An extended CallbackWrapper used when reporting an object reference
637// to the agent.
638//
639// {
640//   TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
641//
642//   (*callback)(wrapper.klass_tag(),
643//               wrapper.obj_size(),
644//               wrapper.obj_tag_p()
645//               wrapper.referrer_tag_p(), ...)
646//
647// } // wrapper goes out of scope here which results in the destructor
648//      checking to see if the referrer object has been tagged, untagged,
649//      or the tag value has changed.
650//
651class TwoOopCallbackWrapper : public CallbackWrapper {
652 private:
653  bool _is_reference_to_self;
654  JvmtiTagHashmap* _referrer_hashmap;
655  JvmtiTagHashmapEntry* _referrer_entry;
656  oop _referrer;
657  jlong _referrer_obj_tag;
658  jlong _referrer_klass_tag;
659  jlong* _referrer_tag_p;
660
661  bool is_reference_to_self() const             { return _is_reference_to_self; }
662
663 public:
664  TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
665    CallbackWrapper(tag_map, o)
666  {
667    // self reference needs to be handled in a special way
668    _is_reference_to_self = (referrer == o);
669
670    if (_is_reference_to_self) {
671      _referrer_klass_tag = klass_tag();
672      _referrer_tag_p = obj_tag_p();
673    } else {
674      _referrer = referrer;
675      // record the context
676      _referrer_hashmap = tag_map->hashmap();
677      _referrer_entry = _referrer_hashmap->find(_referrer);
678
679      // get object tag
680      _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
681      _referrer_tag_p = &_referrer_obj_tag;
682
683      // get referrer class tag.
684      _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
685    }
686  }
687
688  ~TwoOopCallbackWrapper() {
689    if (!is_reference_to_self()){
690      post_callback_tag_update(_referrer,
691                               _referrer_hashmap,
692                               _referrer_entry,
693                               _referrer_obj_tag);
694    }
695  }
696
697  // address of referrer tag
698  // (for a self reference this will return the same thing as obj_tag_p())
699  inline jlong* referrer_tag_p()        { return _referrer_tag_p; }
700
701  // referrer's class tag
702  inline jlong referrer_klass_tag()     { return _referrer_klass_tag; }
703};
704
705// tag an object
706//
707// This function is performance critical. If many threads attempt to tag objects
708// around the same time then it's possible that the Mutex associated with the
709// tag map will be a hot lock.
710void JvmtiTagMap::set_tag(jobject object, jlong tag) {
711  MutexLocker ml(lock());
712
713  // resolve the object
714  oop o = JNIHandles::resolve_non_null(object);
715
716  // see if the object is already tagged
717  JvmtiTagHashmap* hashmap = _hashmap;
718  JvmtiTagHashmapEntry* entry = hashmap->find(o);
719
720  // if the object is not already tagged then we tag it
721  if (entry == NULL) {
722    if (tag != 0) {
723      entry = create_entry(o, tag);
724      hashmap->add(o, entry);
725    } else {
726      // no-op
727    }
728  } else {
729    // if the object is already tagged then we either update
730    // the tag (if a new tag value has been provided)
731    // or remove the object if the new tag value is 0.
732    if (tag == 0) {
733      hashmap->remove(o);
734      destroy_entry(entry);
735    } else {
736      entry->set_tag(tag);
737    }
738  }
739}
740
741// get the tag for an object
742jlong JvmtiTagMap::get_tag(jobject object) {
743  MutexLocker ml(lock());
744
745  // resolve the object
746  oop o = JNIHandles::resolve_non_null(object);
747
748  return tag_for(this, o);
749}
750
751
752// Helper class used to describe the static or instance fields of a class.
753// For each field it holds the field index (as defined by the JVMTI specification),
754// the field type, and the offset.
755
756class ClassFieldDescriptor: public CHeapObj<mtInternal> {
757 private:
758  int _field_index;
759  int _field_offset;
760  char _field_type;
761 public:
762  ClassFieldDescriptor(int index, char type, int offset) :
763    _field_index(index), _field_type(type), _field_offset(offset) {
764  }
765  int field_index()  const  { return _field_index; }
766  char field_type()  const  { return _field_type; }
767  int field_offset() const  { return _field_offset; }
768};
769
770class ClassFieldMap: public CHeapObj<mtInternal> {
771 private:
772  enum {
773    initial_field_count = 5
774  };
775
776  // list of field descriptors
777  GrowableArray<ClassFieldDescriptor*>* _fields;
778
779  // constructor
780  ClassFieldMap();
781
782  // add a field
783  void add(int index, char type, int offset);
784
785  // returns the field count for the given class
786  static int compute_field_count(instanceKlassHandle ikh);
787
788 public:
789  ~ClassFieldMap();
790
791  // access
792  int field_count()                     { return _fields->length(); }
793  ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
794
795  // functions to create maps of static or instance fields
796  static ClassFieldMap* create_map_of_static_fields(Klass* k);
797  static ClassFieldMap* create_map_of_instance_fields(oop obj);
798};
799
800ClassFieldMap::ClassFieldMap() {
801  _fields = new (ResourceObj::C_HEAP, mtInternal)
802    GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
803}
804
805ClassFieldMap::~ClassFieldMap() {
806  for (int i=0; i<_fields->length(); i++) {
807    delete _fields->at(i);
808  }
809  delete _fields;
810}
811
812void ClassFieldMap::add(int index, char type, int offset) {
813  ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
814  _fields->append(field);
815}
816
817// Returns a heap allocated ClassFieldMap to describe the static fields
818// of the given class.
819//
820ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
821  HandleMark hm;
822  instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
823
824  // create the field map
825  ClassFieldMap* field_map = new ClassFieldMap();
826
827  FilteredFieldStream f(ikh, false, false);
828  int max_field_index = f.field_count()-1;
829
830  int index = 0;
831  for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
832    // ignore instance fields
833    if (!fld.access_flags().is_static()) {
834      continue;
835    }
836    field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
837  }
838  return field_map;
839}
840
841// Returns a heap allocated ClassFieldMap to describe the instance fields
842// of the given class. All instance fields are included (this means public
843// and private fields declared in superclasses and superinterfaces too).
844//
845ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
846  HandleMark hm;
847  instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
848
849  // create the field map
850  ClassFieldMap* field_map = new ClassFieldMap();
851
852  FilteredFieldStream f(ikh, false, false);
853
854  int max_field_index = f.field_count()-1;
855
856  int index = 0;
857  for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
858    // ignore static fields
859    if (fld.access_flags().is_static()) {
860      continue;
861    }
862    field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
863  }
864
865  return field_map;
866}
867
868// Helper class used to cache a ClassFileMap for the instance fields of
869// a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
870// heap iteration and avoid creating a field map for each object in the heap
871// (only need to create the map when the first instance of a class is encountered).
872//
873class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
874 private:
875   enum {
876     initial_class_count = 200
877   };
878  ClassFieldMap* _field_map;
879
880  ClassFieldMap* field_map() const          { return _field_map; }
881
882  JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
883  ~JvmtiCachedClassFieldMap();
884
885  static GrowableArray<InstanceKlass*>* _class_list;
886  static void add_to_class_list(InstanceKlass* ik);
887
888 public:
889  // returns the field map for a given object (returning map cached
890  // by InstanceKlass if possible
891  static ClassFieldMap* get_map_of_instance_fields(oop obj);
892
893  // removes the field map from all instanceKlasses - should be
894  // called before VM operation completes
895  static void clear_cache();
896
897  // returns the number of ClassFieldMap cached by instanceKlasses
898  static int cached_field_map_count();
899};
900
901GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
902
903JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
904  _field_map = field_map;
905}
906
907JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
908  if (_field_map != NULL) {
909    delete _field_map;
910  }
911}
912
913// Marker class to ensure that the class file map cache is only used in a defined
914// scope.
915class ClassFieldMapCacheMark : public StackObj {
916 private:
917   static bool _is_active;
918 public:
919   ClassFieldMapCacheMark() {
920     assert(Thread::current()->is_VM_thread(), "must be VMThread");
921     assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
922     assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
923     _is_active = true;
924   }
925   ~ClassFieldMapCacheMark() {
926     JvmtiCachedClassFieldMap::clear_cache();
927     _is_active = false;
928   }
929   static bool is_active() { return _is_active; }
930};
931
932bool ClassFieldMapCacheMark::_is_active;
933
934
935// record that the given InstanceKlass is caching a field map
936void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
937  if (_class_list == NULL) {
938    _class_list = new (ResourceObj::C_HEAP, mtInternal)
939      GrowableArray<InstanceKlass*>(initial_class_count, true);
940  }
941  _class_list->push(ik);
942}
943
944// returns the instance field map for the given object
945// (returns field map cached by the InstanceKlass if possible)
946ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
947  assert(Thread::current()->is_VM_thread(), "must be VMThread");
948  assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
949
950  Klass* k = obj->klass();
951  InstanceKlass* ik = InstanceKlass::cast(k);
952
953  // return cached map if possible
954  JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
955  if (cached_map != NULL) {
956    assert(cached_map->field_map() != NULL, "missing field list");
957    return cached_map->field_map();
958  } else {
959    ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
960    cached_map = new JvmtiCachedClassFieldMap(field_map);
961    ik->set_jvmti_cached_class_field_map(cached_map);
962    add_to_class_list(ik);
963    return field_map;
964  }
965}
966
967// remove the fields maps cached from all instanceKlasses
968void JvmtiCachedClassFieldMap::clear_cache() {
969  assert(Thread::current()->is_VM_thread(), "must be VMThread");
970  if (_class_list != NULL) {
971    for (int i = 0; i < _class_list->length(); i++) {
972      InstanceKlass* ik = _class_list->at(i);
973      JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
974      assert(cached_map != NULL, "should not be NULL");
975      ik->set_jvmti_cached_class_field_map(NULL);
976      delete cached_map;  // deletes the encapsulated field map
977    }
978    delete _class_list;
979    _class_list = NULL;
980  }
981}
982
983// returns the number of ClassFieldMap cached by instanceKlasses
984int JvmtiCachedClassFieldMap::cached_field_map_count() {
985  return (_class_list == NULL) ? 0 : _class_list->length();
986}
987
988// helper function to indicate if an object is filtered by its tag or class tag
989static inline bool is_filtered_by_heap_filter(jlong obj_tag,
990                                              jlong klass_tag,
991                                              int heap_filter) {
992  // apply the heap filter
993  if (obj_tag != 0) {
994    // filter out tagged objects
995    if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
996  } else {
997    // filter out untagged objects
998    if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
999  }
1000  if (klass_tag != 0) {
1001    // filter out objects with tagged classes
1002    if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
1003  } else {
1004    // filter out objects with untagged classes.
1005    if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
1006  }
1007  return false;
1008}
1009
1010// helper function to indicate if an object is filtered by a klass filter
1011static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
1012  if (!klass_filter.is_null()) {
1013    if (obj->klass() != klass_filter()) {
1014      return true;
1015    }
1016  }
1017  return false;
1018}
1019
1020// helper function to tell if a field is a primitive field or not
1021static inline bool is_primitive_field_type(char type) {
1022  return (type != 'L' && type != '[');
1023}
1024
1025// helper function to copy the value from location addr to jvalue.
1026static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
1027  switch (value_type) {
1028    case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
1029    case JVMTI_PRIMITIVE_TYPE_BYTE    : { v->b = *(jbyte*)addr;    break; }
1030    case JVMTI_PRIMITIVE_TYPE_CHAR    : { v->c = *(jchar*)addr;    break; }
1031    case JVMTI_PRIMITIVE_TYPE_SHORT   : { v->s = *(jshort*)addr;   break; }
1032    case JVMTI_PRIMITIVE_TYPE_INT     : { v->i = *(jint*)addr;     break; }
1033    case JVMTI_PRIMITIVE_TYPE_LONG    : { v->j = *(jlong*)addr;    break; }
1034    case JVMTI_PRIMITIVE_TYPE_FLOAT   : { v->f = *(jfloat*)addr;   break; }
1035    case JVMTI_PRIMITIVE_TYPE_DOUBLE  : { v->d = *(jdouble*)addr;  break; }
1036    default: ShouldNotReachHere();
1037  }
1038}
1039
1040// helper function to invoke string primitive value callback
1041// returns visit control flags
1042static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
1043                                         CallbackWrapper* wrapper,
1044                                         oop str,
1045                                         void* user_data)
1046{
1047  assert(str->klass() == SystemDictionary::String_klass(), "not a string");
1048
1049  // get the string value and length
1050  // (string value may be offset from the base)
1051  int s_len = java_lang_String::length(str);
1052  typeArrayOop s_value = java_lang_String::value(str);
1053  int s_offset = java_lang_String::offset(str);
1054  jchar* value;
1055  if (s_len > 0) {
1056    value = s_value->char_at_addr(s_offset);
1057  } else {
1058    value = (jchar*) s_value->base(T_CHAR);
1059  }
1060
1061  // invoke the callback
1062  return (*cb)(wrapper->klass_tag(),
1063               wrapper->obj_size(),
1064               wrapper->obj_tag_p(),
1065               value,
1066               (jint)s_len,
1067               user_data);
1068}
1069
1070// helper function to invoke string primitive value callback
1071// returns visit control flags
1072static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
1073                                                  CallbackWrapper* wrapper,
1074                                                  oop obj,
1075                                                  void* user_data)
1076{
1077  assert(obj->is_typeArray(), "not a primitive array");
1078
1079  // get base address of first element
1080  typeArrayOop array = typeArrayOop(obj);
1081  BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
1082  void* elements = array->base(type);
1083
1084  // jvmtiPrimitiveType is defined so this mapping is always correct
1085  jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
1086
1087  return (*cb)(wrapper->klass_tag(),
1088               wrapper->obj_size(),
1089               wrapper->obj_tag_p(),
1090               (jint)array->length(),
1091               elem_type,
1092               elements,
1093               user_data);
1094}
1095
1096// helper function to invoke the primitive field callback for all static fields
1097// of a given class
1098static jint invoke_primitive_field_callback_for_static_fields
1099  (CallbackWrapper* wrapper,
1100   oop obj,
1101   jvmtiPrimitiveFieldCallback cb,
1102   void* user_data)
1103{
1104  // for static fields only the index will be set
1105  static jvmtiHeapReferenceInfo reference_info = { 0 };
1106
1107  assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
1108  if (java_lang_Class::is_primitive(obj)) {
1109    return 0;
1110  }
1111  Klass* klass = java_lang_Class::as_Klass(obj);
1112
1113  // ignore classes for object and type arrays
1114  if (!klass->oop_is_instance()) {
1115    return 0;
1116  }
1117
1118  // ignore classes which aren't linked yet
1119  InstanceKlass* ik = InstanceKlass::cast(klass);
1120  if (!ik->is_linked()) {
1121    return 0;
1122  }
1123
1124  // get the field map
1125  ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
1126
1127  // invoke the callback for each static primitive field
1128  for (int i=0; i<field_map->field_count(); i++) {
1129    ClassFieldDescriptor* field = field_map->field_at(i);
1130
1131    // ignore non-primitive fields
1132    char type = field->field_type();
1133    if (!is_primitive_field_type(type)) {
1134      continue;
1135    }
1136    // one-to-one mapping
1137    jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1138
1139    // get offset and field value
1140    int offset = field->field_offset();
1141    address addr = (address)klass->java_mirror() + offset;
1142    jvalue value;
1143    copy_to_jvalue(&value, addr, value_type);
1144
1145    // field index
1146    reference_info.field.index = field->field_index();
1147
1148    // invoke the callback
1149    jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1150                     &reference_info,
1151                     wrapper->klass_tag(),
1152                     wrapper->obj_tag_p(),
1153                     value,
1154                     value_type,
1155                     user_data);
1156    if (res & JVMTI_VISIT_ABORT) {
1157      delete field_map;
1158      return res;
1159    }
1160  }
1161
1162  delete field_map;
1163  return 0;
1164}
1165
1166// helper function to invoke the primitive field callback for all instance fields
1167// of a given object
1168static jint invoke_primitive_field_callback_for_instance_fields(
1169  CallbackWrapper* wrapper,
1170  oop obj,
1171  jvmtiPrimitiveFieldCallback cb,
1172  void* user_data)
1173{
1174  // for instance fields only the index will be set
1175  static jvmtiHeapReferenceInfo reference_info = { 0 };
1176
1177  // get the map of the instance fields
1178  ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
1179
1180  // invoke the callback for each instance primitive field
1181  for (int i=0; i<fields->field_count(); i++) {
1182    ClassFieldDescriptor* field = fields->field_at(i);
1183
1184    // ignore non-primitive fields
1185    char type = field->field_type();
1186    if (!is_primitive_field_type(type)) {
1187      continue;
1188    }
1189    // one-to-one mapping
1190    jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1191
1192    // get offset and field value
1193    int offset = field->field_offset();
1194    address addr = (address)obj + offset;
1195    jvalue value;
1196    copy_to_jvalue(&value, addr, value_type);
1197
1198    // field index
1199    reference_info.field.index = field->field_index();
1200
1201    // invoke the callback
1202    jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1203                     &reference_info,
1204                     wrapper->klass_tag(),
1205                     wrapper->obj_tag_p(),
1206                     value,
1207                     value_type,
1208                     user_data);
1209    if (res & JVMTI_VISIT_ABORT) {
1210      return res;
1211    }
1212  }
1213  return 0;
1214}
1215
1216
1217// VM operation to iterate over all objects in the heap (both reachable
1218// and unreachable)
1219class VM_HeapIterateOperation: public VM_Operation {
1220 private:
1221  ObjectClosure* _blk;
1222 public:
1223  VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
1224
1225  VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1226  void doit() {
1227    // allows class files maps to be cached during iteration
1228    ClassFieldMapCacheMark cm;
1229
1230    // make sure that heap is parsable (fills TLABs with filler objects)
1231    Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1232
1233    // Verify heap before iteration - if the heap gets corrupted then
1234    // JVMTI's IterateOverHeap will crash.
1235    if (VerifyBeforeIteration) {
1236      Universe::verify();
1237    }
1238
1239    // do the iteration
1240    // If this operation encounters a bad object when using CMS,
1241    // consider using safe_object_iterate() which avoids perm gen
1242    // objects that may contain bad references.
1243    Universe::heap()->object_iterate(_blk);
1244  }
1245
1246};
1247
1248
1249// An ObjectClosure used to support the deprecated IterateOverHeap and
1250// IterateOverInstancesOfClass functions
1251class IterateOverHeapObjectClosure: public ObjectClosure {
1252 private:
1253  JvmtiTagMap* _tag_map;
1254  KlassHandle _klass;
1255  jvmtiHeapObjectFilter _object_filter;
1256  jvmtiHeapObjectCallback _heap_object_callback;
1257  const void* _user_data;
1258
1259  // accessors
1260  JvmtiTagMap* tag_map() const                    { return _tag_map; }
1261  jvmtiHeapObjectFilter object_filter() const     { return _object_filter; }
1262  jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1263  KlassHandle klass() const                       { return _klass; }
1264  const void* user_data() const                   { return _user_data; }
1265
1266  // indicates if iteration has been aborted
1267  bool _iteration_aborted;
1268  bool is_iteration_aborted() const               { return _iteration_aborted; }
1269  void set_iteration_aborted(bool aborted)        { _iteration_aborted = aborted; }
1270
1271 public:
1272  IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1273                               KlassHandle klass,
1274                               jvmtiHeapObjectFilter object_filter,
1275                               jvmtiHeapObjectCallback heap_object_callback,
1276                               const void* user_data) :
1277    _tag_map(tag_map),
1278    _klass(klass),
1279    _object_filter(object_filter),
1280    _heap_object_callback(heap_object_callback),
1281    _user_data(user_data),
1282    _iteration_aborted(false)
1283  {
1284  }
1285
1286  void do_object(oop o);
1287};
1288
1289// invoked for each object in the heap
1290void IterateOverHeapObjectClosure::do_object(oop o) {
1291  // check if iteration has been halted
1292  if (is_iteration_aborted()) return;
1293
1294  // ignore any objects that aren't visible to profiler
1295  if (!ServiceUtil::visible_oop(o)) return;
1296
1297  // instanceof check when filtering by klass
1298  if (!klass().is_null() && !o->is_a(klass()())) {
1299    return;
1300  }
1301  // prepare for the calllback
1302  CallbackWrapper wrapper(tag_map(), o);
1303
1304  // if the object is tagged and we're only interested in untagged objects
1305  // then don't invoke the callback. Similiarly, if the object is untagged
1306  // and we're only interested in tagged objects we skip the callback.
1307  if (wrapper.obj_tag() != 0) {
1308    if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1309  } else {
1310    if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1311  }
1312
1313  // invoke the agent's callback
1314  jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1315                                                       wrapper.obj_size(),
1316                                                       wrapper.obj_tag_p(),
1317                                                       (void*)user_data());
1318  if (control == JVMTI_ITERATION_ABORT) {
1319    set_iteration_aborted(true);
1320  }
1321}
1322
1323// An ObjectClosure used to support the IterateThroughHeap function
1324class IterateThroughHeapObjectClosure: public ObjectClosure {
1325 private:
1326  JvmtiTagMap* _tag_map;
1327  KlassHandle _klass;
1328  int _heap_filter;
1329  const jvmtiHeapCallbacks* _callbacks;
1330  const void* _user_data;
1331
1332  // accessor functions
1333  JvmtiTagMap* tag_map() const                     { return _tag_map; }
1334  int heap_filter() const                          { return _heap_filter; }
1335  const jvmtiHeapCallbacks* callbacks() const      { return _callbacks; }
1336  KlassHandle klass() const                        { return _klass; }
1337  const void* user_data() const                    { return _user_data; }
1338
1339  // indicates if the iteration has been aborted
1340  bool _iteration_aborted;
1341  bool is_iteration_aborted() const                { return _iteration_aborted; }
1342
1343  // used to check the visit control flags. If the abort flag is set
1344  // then we set the iteration aborted flag so that the iteration completes
1345  // without processing any further objects
1346  bool check_flags_for_abort(jint flags) {
1347    bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1348    if (is_abort) {
1349      _iteration_aborted = true;
1350    }
1351    return is_abort;
1352  }
1353
1354 public:
1355  IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1356                                  KlassHandle klass,
1357                                  int heap_filter,
1358                                  const jvmtiHeapCallbacks* heap_callbacks,
1359                                  const void* user_data) :
1360    _tag_map(tag_map),
1361    _klass(klass),
1362    _heap_filter(heap_filter),
1363    _callbacks(heap_callbacks),
1364    _user_data(user_data),
1365    _iteration_aborted(false)
1366  {
1367  }
1368
1369  void do_object(oop o);
1370};
1371
1372// invoked for each object in the heap
1373void IterateThroughHeapObjectClosure::do_object(oop obj) {
1374  // check if iteration has been halted
1375  if (is_iteration_aborted()) return;
1376
1377  // ignore any objects that aren't visible to profiler
1378  if (!ServiceUtil::visible_oop(obj)) return;
1379
1380  // apply class filter
1381  if (is_filtered_by_klass_filter(obj, klass())) return;
1382
1383  // prepare for callback
1384  CallbackWrapper wrapper(tag_map(), obj);
1385
1386  // check if filtered by the heap filter
1387  if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1388    return;
1389  }
1390
1391  // for arrays we need the length, otherwise -1
1392  bool is_array = obj->is_array();
1393  int len = is_array ? arrayOop(obj)->length() : -1;
1394
1395  // invoke the object callback (if callback is provided)
1396  if (callbacks()->heap_iteration_callback != NULL) {
1397    jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1398    jint res = (*cb)(wrapper.klass_tag(),
1399                     wrapper.obj_size(),
1400                     wrapper.obj_tag_p(),
1401                     (jint)len,
1402                     (void*)user_data());
1403    if (check_flags_for_abort(res)) return;
1404  }
1405
1406  // for objects and classes we report primitive fields if callback provided
1407  if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1408    jint res;
1409    jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1410    if (obj->klass() == SystemDictionary::Class_klass()) {
1411      res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1412                                                                    obj,
1413                                                                    cb,
1414                                                                    (void*)user_data());
1415    } else {
1416      res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1417                                                                      obj,
1418                                                                      cb,
1419                                                                      (void*)user_data());
1420    }
1421    if (check_flags_for_abort(res)) return;
1422  }
1423
1424  // string callback
1425  if (!is_array &&
1426      callbacks()->string_primitive_value_callback != NULL &&
1427      obj->klass() == SystemDictionary::String_klass()) {
1428    jint res = invoke_string_value_callback(
1429                callbacks()->string_primitive_value_callback,
1430                &wrapper,
1431                obj,
1432                (void*)user_data() );
1433    if (check_flags_for_abort(res)) return;
1434  }
1435
1436  // array callback
1437  if (is_array &&
1438      callbacks()->array_primitive_value_callback != NULL &&
1439      obj->is_typeArray()) {
1440    jint res = invoke_array_primitive_value_callback(
1441               callbacks()->array_primitive_value_callback,
1442               &wrapper,
1443               obj,
1444               (void*)user_data() );
1445    if (check_flags_for_abort(res)) return;
1446  }
1447};
1448
1449
1450// Deprecated function to iterate over all objects in the heap
1451void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1452                                    KlassHandle klass,
1453                                    jvmtiHeapObjectCallback heap_object_callback,
1454                                    const void* user_data)
1455{
1456  MutexLocker ml(Heap_lock);
1457  IterateOverHeapObjectClosure blk(this,
1458                                   klass,
1459                                   object_filter,
1460                                   heap_object_callback,
1461                                   user_data);
1462  VM_HeapIterateOperation op(&blk);
1463  VMThread::execute(&op);
1464}
1465
1466
1467// Iterates over all objects in the heap
1468void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1469                                       KlassHandle klass,
1470                                       const jvmtiHeapCallbacks* callbacks,
1471                                       const void* user_data)
1472{
1473  MutexLocker ml(Heap_lock);
1474  IterateThroughHeapObjectClosure blk(this,
1475                                      klass,
1476                                      heap_filter,
1477                                      callbacks,
1478                                      user_data);
1479  VM_HeapIterateOperation op(&blk);
1480  VMThread::execute(&op);
1481}
1482
1483// support class for get_objects_with_tags
1484
1485class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
1486 private:
1487  JvmtiEnv* _env;
1488  jlong* _tags;
1489  jint _tag_count;
1490
1491  GrowableArray<jobject>* _object_results;  // collected objects (JNI weak refs)
1492  GrowableArray<uint64_t>* _tag_results;    // collected tags
1493
1494 public:
1495  TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
1496    _env = env;
1497    _tags = (jlong*)tags;
1498    _tag_count = tag_count;
1499    _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true);
1500    _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true);
1501  }
1502
1503  ~TagObjectCollector() {
1504    delete _object_results;
1505    delete _tag_results;
1506  }
1507
1508  // for each tagged object check if the tag value matches
1509  // - if it matches then we create a JNI local reference to the object
1510  // and record the reference and tag value.
1511  //
1512  void do_entry(JvmtiTagHashmapEntry* entry) {
1513    for (int i=0; i<_tag_count; i++) {
1514      if (_tags[i] == entry->tag()) {
1515        oop o = entry->object();
1516        assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check");
1517        jobject ref = JNIHandles::make_local(JavaThread::current(), o);
1518        _object_results->append(ref);
1519        _tag_results->append((uint64_t)entry->tag());
1520      }
1521    }
1522  }
1523
1524  // return the results from the collection
1525  //
1526  jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1527    jvmtiError error;
1528    int count = _object_results->length();
1529    assert(count >= 0, "sanity check");
1530
1531    // if object_result_ptr is not NULL then allocate the result and copy
1532    // in the object references.
1533    if (object_result_ptr != NULL) {
1534      error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1535      if (error != JVMTI_ERROR_NONE) {
1536        return error;
1537      }
1538      for (int i=0; i<count; i++) {
1539        (*object_result_ptr)[i] = _object_results->at(i);
1540      }
1541    }
1542
1543    // if tag_result_ptr is not NULL then allocate the result and copy
1544    // in the tag values.
1545    if (tag_result_ptr != NULL) {
1546      error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1547      if (error != JVMTI_ERROR_NONE) {
1548        if (object_result_ptr != NULL) {
1549          _env->Deallocate((unsigned char*)object_result_ptr);
1550        }
1551        return error;
1552      }
1553      for (int i=0; i<count; i++) {
1554        (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1555      }
1556    }
1557
1558    *count_ptr = count;
1559    return JVMTI_ERROR_NONE;
1560  }
1561};
1562
1563// return the list of objects with the specified tags
1564jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1565  jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1566
1567  TagObjectCollector collector(env(), tags, count);
1568  {
1569    // iterate over all tagged objects
1570    MutexLocker ml(lock());
1571    entry_iterate(&collector);
1572  }
1573  return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1574}
1575
1576
1577// ObjectMarker is used to support the marking objects when walking the
1578// heap.
1579//
1580// This implementation uses the existing mark bits in an object for
1581// marking. Objects that are marked must later have their headers restored.
1582// As most objects are unlocked and don't have their identity hash computed
1583// we don't have to save their headers. Instead we save the headers that
1584// are "interesting". Later when the headers are restored this implementation
1585// restores all headers to their initial value and then restores the few
1586// objects that had interesting headers.
1587//
1588// Future work: This implementation currently uses growable arrays to save
1589// the oop and header of interesting objects. As an optimization we could
1590// use the same technique as the GC and make use of the unused area
1591// between top() and end().
1592//
1593
1594// An ObjectClosure used to restore the mark bits of an object
1595class RestoreMarksClosure : public ObjectClosure {
1596 public:
1597  void do_object(oop o) {
1598    if (o != NULL) {
1599      markOop mark = o->mark();
1600      if (mark->is_marked()) {
1601        o->init_mark();
1602      }
1603    }
1604  }
1605};
1606
1607// ObjectMarker provides the mark and visited functions
1608class ObjectMarker : AllStatic {
1609 private:
1610  // saved headers
1611  static GrowableArray<oop>* _saved_oop_stack;
1612  static GrowableArray<markOop>* _saved_mark_stack;
1613  static bool _needs_reset;                  // do we need to reset mark bits?
1614
1615 public:
1616  static void init();                       // initialize
1617  static void done();                       // clean-up
1618
1619  static inline void mark(oop o);           // mark an object
1620  static inline bool visited(oop o);        // check if object has been visited
1621
1622  static inline bool needs_reset()            { return _needs_reset; }
1623  static inline void set_needs_reset(bool v)  { _needs_reset = v; }
1624};
1625
1626GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1627GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
1628bool ObjectMarker::_needs_reset = true;  // need to reset mark bits by default
1629
1630// initialize ObjectMarker - prepares for object marking
1631void ObjectMarker::init() {
1632  assert(Thread::current()->is_VM_thread(), "must be VMThread");
1633
1634  // prepare heap for iteration
1635  Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1636
1637  // create stacks for interesting headers
1638  _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true);
1639  _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true);
1640
1641  if (UseBiasedLocking) {
1642    BiasedLocking::preserve_marks();
1643  }
1644}
1645
1646// Object marking is done so restore object headers
1647void ObjectMarker::done() {
1648  // iterate over all objects and restore the mark bits to
1649  // their initial value
1650  RestoreMarksClosure blk;
1651  if (needs_reset()) {
1652    Universe::heap()->object_iterate(&blk);
1653  } else {
1654    // We don't need to reset mark bits on this call, but reset the
1655    // flag to the default for the next call.
1656    set_needs_reset(true);
1657  }
1658
1659  // now restore the interesting headers
1660  for (int i = 0; i < _saved_oop_stack->length(); i++) {
1661    oop o = _saved_oop_stack->at(i);
1662    markOop mark = _saved_mark_stack->at(i);
1663    o->set_mark(mark);
1664  }
1665
1666  if (UseBiasedLocking) {
1667    BiasedLocking::restore_marks();
1668  }
1669
1670  // free the stacks
1671  delete _saved_oop_stack;
1672  delete _saved_mark_stack;
1673}
1674
1675// mark an object
1676inline void ObjectMarker::mark(oop o) {
1677  assert(Universe::heap()->is_in(o), "sanity check");
1678  assert(!o->mark()->is_marked(), "should only mark an object once");
1679
1680  // object's mark word
1681  markOop mark = o->mark();
1682
1683  if (mark->must_be_preserved(o)) {
1684    _saved_mark_stack->push(mark);
1685    _saved_oop_stack->push(o);
1686  }
1687
1688  // mark the object
1689  o->set_mark(markOopDesc::prototype()->set_marked());
1690}
1691
1692// return true if object is marked
1693inline bool ObjectMarker::visited(oop o) {
1694  return o->mark()->is_marked();
1695}
1696
1697// Stack allocated class to help ensure that ObjectMarker is used
1698// correctly. Constructor initializes ObjectMarker, destructor calls
1699// ObjectMarker's done() function to restore object headers.
1700class ObjectMarkerController : public StackObj {
1701 public:
1702  ObjectMarkerController() {
1703    ObjectMarker::init();
1704  }
1705  ~ObjectMarkerController() {
1706    ObjectMarker::done();
1707  }
1708};
1709
1710
1711// helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1712// (not performance critical as only used for roots)
1713static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1714  switch (kind) {
1715    case JVMTI_HEAP_REFERENCE_JNI_GLOBAL:   return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1716    case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1717    case JVMTI_HEAP_REFERENCE_MONITOR:      return JVMTI_HEAP_ROOT_MONITOR;
1718    case JVMTI_HEAP_REFERENCE_STACK_LOCAL:  return JVMTI_HEAP_ROOT_STACK_LOCAL;
1719    case JVMTI_HEAP_REFERENCE_JNI_LOCAL:    return JVMTI_HEAP_ROOT_JNI_LOCAL;
1720    case JVMTI_HEAP_REFERENCE_THREAD:       return JVMTI_HEAP_ROOT_THREAD;
1721    case JVMTI_HEAP_REFERENCE_OTHER:        return JVMTI_HEAP_ROOT_OTHER;
1722    default: ShouldNotReachHere();          return JVMTI_HEAP_ROOT_OTHER;
1723  }
1724}
1725
1726// Base class for all heap walk contexts. The base class maintains a flag
1727// to indicate if the context is valid or not.
1728class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
1729 private:
1730  bool _valid;
1731 public:
1732  HeapWalkContext(bool valid)                   { _valid = valid; }
1733  void invalidate()                             { _valid = false; }
1734  bool is_valid() const                         { return _valid; }
1735};
1736
1737// A basic heap walk context for the deprecated heap walking functions.
1738// The context for a basic heap walk are the callbacks and fields used by
1739// the referrer caching scheme.
1740class BasicHeapWalkContext: public HeapWalkContext {
1741 private:
1742  jvmtiHeapRootCallback _heap_root_callback;
1743  jvmtiStackReferenceCallback _stack_ref_callback;
1744  jvmtiObjectReferenceCallback _object_ref_callback;
1745
1746  // used for caching
1747  oop _last_referrer;
1748  jlong _last_referrer_tag;
1749
1750 public:
1751  BasicHeapWalkContext() : HeapWalkContext(false) { }
1752
1753  BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1754                       jvmtiStackReferenceCallback stack_ref_callback,
1755                       jvmtiObjectReferenceCallback object_ref_callback) :
1756    HeapWalkContext(true),
1757    _heap_root_callback(heap_root_callback),
1758    _stack_ref_callback(stack_ref_callback),
1759    _object_ref_callback(object_ref_callback),
1760    _last_referrer(NULL),
1761    _last_referrer_tag(0) {
1762  }
1763
1764  // accessors
1765  jvmtiHeapRootCallback heap_root_callback() const         { return _heap_root_callback; }
1766  jvmtiStackReferenceCallback stack_ref_callback() const   { return _stack_ref_callback; }
1767  jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback;  }
1768
1769  oop last_referrer() const               { return _last_referrer; }
1770  void set_last_referrer(oop referrer)    { _last_referrer = referrer; }
1771  jlong last_referrer_tag() const         { return _last_referrer_tag; }
1772  void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1773};
1774
1775// The advanced heap walk context for the FollowReferences functions.
1776// The context is the callbacks, and the fields used for filtering.
1777class AdvancedHeapWalkContext: public HeapWalkContext {
1778 private:
1779  jint _heap_filter;
1780  KlassHandle _klass_filter;
1781  const jvmtiHeapCallbacks* _heap_callbacks;
1782
1783 public:
1784  AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1785
1786  AdvancedHeapWalkContext(jint heap_filter,
1787                           KlassHandle klass_filter,
1788                           const jvmtiHeapCallbacks* heap_callbacks) :
1789    HeapWalkContext(true),
1790    _heap_filter(heap_filter),
1791    _klass_filter(klass_filter),
1792    _heap_callbacks(heap_callbacks) {
1793  }
1794
1795  // accessors
1796  jint heap_filter() const         { return _heap_filter; }
1797  KlassHandle klass_filter() const { return _klass_filter; }
1798
1799  const jvmtiHeapReferenceCallback heap_reference_callback() const {
1800    return _heap_callbacks->heap_reference_callback;
1801  };
1802  const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1803    return _heap_callbacks->primitive_field_callback;
1804  }
1805  const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1806    return _heap_callbacks->array_primitive_value_callback;
1807  }
1808  const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1809    return _heap_callbacks->string_primitive_value_callback;
1810  }
1811};
1812
1813// The CallbackInvoker is a class with static functions that the heap walk can call
1814// into to invoke callbacks. It works in one of two modes. The "basic" mode is
1815// used for the deprecated IterateOverReachableObjects functions. The "advanced"
1816// mode is for the newer FollowReferences function which supports a lot of
1817// additional callbacks.
1818class CallbackInvoker : AllStatic {
1819 private:
1820  // heap walk styles
1821  enum { basic, advanced };
1822  static int _heap_walk_type;
1823  static bool is_basic_heap_walk()           { return _heap_walk_type == basic; }
1824  static bool is_advanced_heap_walk()        { return _heap_walk_type == advanced; }
1825
1826  // context for basic style heap walk
1827  static BasicHeapWalkContext _basic_context;
1828  static BasicHeapWalkContext* basic_context() {
1829    assert(_basic_context.is_valid(), "invalid");
1830    return &_basic_context;
1831  }
1832
1833  // context for advanced style heap walk
1834  static AdvancedHeapWalkContext _advanced_context;
1835  static AdvancedHeapWalkContext* advanced_context() {
1836    assert(_advanced_context.is_valid(), "invalid");
1837    return &_advanced_context;
1838  }
1839
1840  // context needed for all heap walks
1841  static JvmtiTagMap* _tag_map;
1842  static const void* _user_data;
1843  static GrowableArray<oop>* _visit_stack;
1844
1845  // accessors
1846  static JvmtiTagMap* tag_map()                        { return _tag_map; }
1847  static const void* user_data()                       { return _user_data; }
1848  static GrowableArray<oop>* visit_stack()             { return _visit_stack; }
1849
1850  // if the object hasn't been visited then push it onto the visit stack
1851  // so that it will be visited later
1852  static inline bool check_for_visit(oop obj) {
1853    if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1854    return true;
1855  }
1856
1857  // invoke basic style callbacks
1858  static inline bool invoke_basic_heap_root_callback
1859    (jvmtiHeapRootKind root_kind, oop obj);
1860  static inline bool invoke_basic_stack_ref_callback
1861    (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1862     int slot, oop obj);
1863  static inline bool invoke_basic_object_reference_callback
1864    (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1865
1866  // invoke advanced style callbacks
1867  static inline bool invoke_advanced_heap_root_callback
1868    (jvmtiHeapReferenceKind ref_kind, oop obj);
1869  static inline bool invoke_advanced_stack_ref_callback
1870    (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1871     jmethodID method, jlocation bci, jint slot, oop obj);
1872  static inline bool invoke_advanced_object_reference_callback
1873    (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1874
1875  // used to report the value of primitive fields
1876  static inline bool report_primitive_field
1877    (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1878
1879 public:
1880  // initialize for basic mode
1881  static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1882                                             GrowableArray<oop>* visit_stack,
1883                                             const void* user_data,
1884                                             BasicHeapWalkContext context);
1885
1886  // initialize for advanced mode
1887  static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1888                                                GrowableArray<oop>* visit_stack,
1889                                                const void* user_data,
1890                                                AdvancedHeapWalkContext context);
1891
1892   // functions to report roots
1893  static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1894  static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1895    jmethodID m, oop o);
1896  static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1897    jmethodID method, jlocation bci, jint slot, oop o);
1898
1899  // functions to report references
1900  static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1901  static inline bool report_class_reference(oop referrer, oop referree);
1902  static inline bool report_class_loader_reference(oop referrer, oop referree);
1903  static inline bool report_signers_reference(oop referrer, oop referree);
1904  static inline bool report_protection_domain_reference(oop referrer, oop referree);
1905  static inline bool report_superclass_reference(oop referrer, oop referree);
1906  static inline bool report_interface_reference(oop referrer, oop referree);
1907  static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1908  static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1909  static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1910  static inline bool report_primitive_array_values(oop array);
1911  static inline bool report_string_value(oop str);
1912  static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1913  static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1914};
1915
1916// statics
1917int CallbackInvoker::_heap_walk_type;
1918BasicHeapWalkContext CallbackInvoker::_basic_context;
1919AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1920JvmtiTagMap* CallbackInvoker::_tag_map;
1921const void* CallbackInvoker::_user_data;
1922GrowableArray<oop>* CallbackInvoker::_visit_stack;
1923
1924// initialize for basic heap walk (IterateOverReachableObjects et al)
1925void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1926                                                     GrowableArray<oop>* visit_stack,
1927                                                     const void* user_data,
1928                                                     BasicHeapWalkContext context) {
1929  _tag_map = tag_map;
1930  _visit_stack = visit_stack;
1931  _user_data = user_data;
1932  _basic_context = context;
1933  _advanced_context.invalidate();       // will trigger assertion if used
1934  _heap_walk_type = basic;
1935}
1936
1937// initialize for advanced heap walk (FollowReferences)
1938void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1939                                                        GrowableArray<oop>* visit_stack,
1940                                                        const void* user_data,
1941                                                        AdvancedHeapWalkContext context) {
1942  _tag_map = tag_map;
1943  _visit_stack = visit_stack;
1944  _user_data = user_data;
1945  _advanced_context = context;
1946  _basic_context.invalidate();      // will trigger assertion if used
1947  _heap_walk_type = advanced;
1948}
1949
1950
1951// invoke basic style heap root callback
1952inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1953  assert(ServiceUtil::visible_oop(obj), "checking");
1954
1955  // if we heap roots should be reported
1956  jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1957  if (cb == NULL) {
1958    return check_for_visit(obj);
1959  }
1960
1961  CallbackWrapper wrapper(tag_map(), obj);
1962  jvmtiIterationControl control = (*cb)(root_kind,
1963                                        wrapper.klass_tag(),
1964                                        wrapper.obj_size(),
1965                                        wrapper.obj_tag_p(),
1966                                        (void*)user_data());
1967  // push root to visit stack when following references
1968  if (control == JVMTI_ITERATION_CONTINUE &&
1969      basic_context()->object_ref_callback() != NULL) {
1970    visit_stack()->push(obj);
1971  }
1972  return control != JVMTI_ITERATION_ABORT;
1973}
1974
1975// invoke basic style stack ref callback
1976inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
1977                                                             jlong thread_tag,
1978                                                             jint depth,
1979                                                             jmethodID method,
1980                                                             jint slot,
1981                                                             oop obj) {
1982  assert(ServiceUtil::visible_oop(obj), "checking");
1983
1984  // if we stack refs should be reported
1985  jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
1986  if (cb == NULL) {
1987    return check_for_visit(obj);
1988  }
1989
1990  CallbackWrapper wrapper(tag_map(), obj);
1991  jvmtiIterationControl control = (*cb)(root_kind,
1992                                        wrapper.klass_tag(),
1993                                        wrapper.obj_size(),
1994                                        wrapper.obj_tag_p(),
1995                                        thread_tag,
1996                                        depth,
1997                                        method,
1998                                        slot,
1999                                        (void*)user_data());
2000  // push root to visit stack when following references
2001  if (control == JVMTI_ITERATION_CONTINUE &&
2002      basic_context()->object_ref_callback() != NULL) {
2003    visit_stack()->push(obj);
2004  }
2005  return control != JVMTI_ITERATION_ABORT;
2006}
2007
2008// invoke basic style object reference callback
2009inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
2010                                                                    oop referrer,
2011                                                                    oop referree,
2012                                                                    jint index) {
2013
2014  assert(ServiceUtil::visible_oop(referrer), "checking");
2015  assert(ServiceUtil::visible_oop(referree), "checking");
2016
2017  BasicHeapWalkContext* context = basic_context();
2018
2019  // callback requires the referrer's tag. If it's the same referrer
2020  // as the last call then we use the cached value.
2021  jlong referrer_tag;
2022  if (referrer == context->last_referrer()) {
2023    referrer_tag = context->last_referrer_tag();
2024  } else {
2025    referrer_tag = tag_for(tag_map(), referrer);
2026  }
2027
2028  // do the callback
2029  CallbackWrapper wrapper(tag_map(), referree);
2030  jvmtiObjectReferenceCallback cb = context->object_ref_callback();
2031  jvmtiIterationControl control = (*cb)(ref_kind,
2032                                        wrapper.klass_tag(),
2033                                        wrapper.obj_size(),
2034                                        wrapper.obj_tag_p(),
2035                                        referrer_tag,
2036                                        index,
2037                                        (void*)user_data());
2038
2039  // record referrer and referrer tag. For self-references record the
2040  // tag value from the callback as this might differ from referrer_tag.
2041  context->set_last_referrer(referrer);
2042  if (referrer == referree) {
2043    context->set_last_referrer_tag(*wrapper.obj_tag_p());
2044  } else {
2045    context->set_last_referrer_tag(referrer_tag);
2046  }
2047
2048  if (control == JVMTI_ITERATION_CONTINUE) {
2049    return check_for_visit(referree);
2050  } else {
2051    return control != JVMTI_ITERATION_ABORT;
2052  }
2053}
2054
2055// invoke advanced style heap root callback
2056inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2057                                                                oop obj) {
2058  assert(ServiceUtil::visible_oop(obj), "checking");
2059
2060  AdvancedHeapWalkContext* context = advanced_context();
2061
2062  // check that callback is provided
2063  jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2064  if (cb == NULL) {
2065    return check_for_visit(obj);
2066  }
2067
2068  // apply class filter
2069  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2070    return check_for_visit(obj);
2071  }
2072
2073  // setup the callback wrapper
2074  CallbackWrapper wrapper(tag_map(), obj);
2075
2076  // apply tag filter
2077  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2078                                 wrapper.klass_tag(),
2079                                 context->heap_filter())) {
2080    return check_for_visit(obj);
2081  }
2082
2083  // for arrays we need the length, otherwise -1
2084  jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2085
2086  // invoke the callback
2087  jint res  = (*cb)(ref_kind,
2088                    NULL, // referrer info
2089                    wrapper.klass_tag(),
2090                    0,    // referrer_class_tag is 0 for heap root
2091                    wrapper.obj_size(),
2092                    wrapper.obj_tag_p(),
2093                    NULL, // referrer_tag_p
2094                    len,
2095                    (void*)user_data());
2096  if (res & JVMTI_VISIT_ABORT) {
2097    return false;// referrer class tag
2098  }
2099  if (res & JVMTI_VISIT_OBJECTS) {
2100    check_for_visit(obj);
2101  }
2102  return true;
2103}
2104
2105// report a reference from a thread stack to an object
2106inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2107                                                                jlong thread_tag,
2108                                                                jlong tid,
2109                                                                int depth,
2110                                                                jmethodID method,
2111                                                                jlocation bci,
2112                                                                jint slot,
2113                                                                oop obj) {
2114  assert(ServiceUtil::visible_oop(obj), "checking");
2115
2116  AdvancedHeapWalkContext* context = advanced_context();
2117
2118  // check that callback is provider
2119  jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2120  if (cb == NULL) {
2121    return check_for_visit(obj);
2122  }
2123
2124  // apply class filter
2125  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2126    return check_for_visit(obj);
2127  }
2128
2129  // setup the callback wrapper
2130  CallbackWrapper wrapper(tag_map(), obj);
2131
2132  // apply tag filter
2133  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2134                                 wrapper.klass_tag(),
2135                                 context->heap_filter())) {
2136    return check_for_visit(obj);
2137  }
2138
2139  // setup the referrer info
2140  jvmtiHeapReferenceInfo reference_info;
2141  reference_info.stack_local.thread_tag = thread_tag;
2142  reference_info.stack_local.thread_id = tid;
2143  reference_info.stack_local.depth = depth;
2144  reference_info.stack_local.method = method;
2145  reference_info.stack_local.location = bci;
2146  reference_info.stack_local.slot = slot;
2147
2148  // for arrays we need the length, otherwise -1
2149  jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2150
2151  // call into the agent
2152  int res = (*cb)(ref_kind,
2153                  &reference_info,
2154                  wrapper.klass_tag(),
2155                  0,    // referrer_class_tag is 0 for heap root (stack)
2156                  wrapper.obj_size(),
2157                  wrapper.obj_tag_p(),
2158                  NULL, // referrer_tag is 0 for root
2159                  len,
2160                  (void*)user_data());
2161
2162  if (res & JVMTI_VISIT_ABORT) {
2163    return false;
2164  }
2165  if (res & JVMTI_VISIT_OBJECTS) {
2166    check_for_visit(obj);
2167  }
2168  return true;
2169}
2170
2171// This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2172// only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
2173#define REF_INFO_MASK  ((1 << JVMTI_HEAP_REFERENCE_FIELD)         \
2174                      | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD)  \
2175                      | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2176                      | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2177                      | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL)   \
2178                      | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2179
2180// invoke the object reference callback to report a reference
2181inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2182                                                                       oop referrer,
2183                                                                       oop obj,
2184                                                                       jint index)
2185{
2186  // field index is only valid field in reference_info
2187  static jvmtiHeapReferenceInfo reference_info = { 0 };
2188
2189  assert(ServiceUtil::visible_oop(referrer), "checking");
2190  assert(ServiceUtil::visible_oop(obj), "checking");
2191
2192  AdvancedHeapWalkContext* context = advanced_context();
2193
2194  // check that callback is provider
2195  jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2196  if (cb == NULL) {
2197    return check_for_visit(obj);
2198  }
2199
2200  // apply class filter
2201  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2202    return check_for_visit(obj);
2203  }
2204
2205  // setup the callback wrapper
2206  TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2207
2208  // apply tag filter
2209  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2210                                 wrapper.klass_tag(),
2211                                 context->heap_filter())) {
2212    return check_for_visit(obj);
2213  }
2214
2215  // field index is only valid field in reference_info
2216  reference_info.field.index = index;
2217
2218  // for arrays we need the length, otherwise -1
2219  jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2220
2221  // invoke the callback
2222  int res = (*cb)(ref_kind,
2223                  (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
2224                  wrapper.klass_tag(),
2225                  wrapper.referrer_klass_tag(),
2226                  wrapper.obj_size(),
2227                  wrapper.obj_tag_p(),
2228                  wrapper.referrer_tag_p(),
2229                  len,
2230                  (void*)user_data());
2231
2232  if (res & JVMTI_VISIT_ABORT) {
2233    return false;
2234  }
2235  if (res & JVMTI_VISIT_OBJECTS) {
2236    check_for_visit(obj);
2237  }
2238  return true;
2239}
2240
2241// report a "simple root"
2242inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
2243  assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2244         kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2245  assert(ServiceUtil::visible_oop(obj), "checking");
2246
2247  if (is_basic_heap_walk()) {
2248    // map to old style root kind
2249    jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2250    return invoke_basic_heap_root_callback(root_kind, obj);
2251  } else {
2252    assert(is_advanced_heap_walk(), "wrong heap walk type");
2253    return invoke_advanced_heap_root_callback(kind, obj);
2254  }
2255}
2256
2257
2258// invoke the primitive array values
2259inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
2260  assert(obj->is_typeArray(), "not a primitive array");
2261
2262  AdvancedHeapWalkContext* context = advanced_context();
2263  assert(context->array_primitive_value_callback() != NULL, "no callback");
2264
2265  // apply class filter
2266  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2267    return true;
2268  }
2269
2270  CallbackWrapper wrapper(tag_map(), obj);
2271
2272  // apply tag filter
2273  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2274                                 wrapper.klass_tag(),
2275                                 context->heap_filter())) {
2276    return true;
2277  }
2278
2279  // invoke the callback
2280  int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2281                                                  &wrapper,
2282                                                  obj,
2283                                                  (void*)user_data());
2284  return (!(res & JVMTI_VISIT_ABORT));
2285}
2286
2287// invoke the string value callback
2288inline bool CallbackInvoker::report_string_value(oop str) {
2289  assert(str->klass() == SystemDictionary::String_klass(), "not a string");
2290
2291  AdvancedHeapWalkContext* context = advanced_context();
2292  assert(context->string_primitive_value_callback() != NULL, "no callback");
2293
2294  // apply class filter
2295  if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2296    return true;
2297  }
2298
2299  CallbackWrapper wrapper(tag_map(), str);
2300
2301  // apply tag filter
2302  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2303                                 wrapper.klass_tag(),
2304                                 context->heap_filter())) {
2305    return true;
2306  }
2307
2308  // invoke the callback
2309  int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2310                                         &wrapper,
2311                                         str,
2312                                         (void*)user_data());
2313  return (!(res & JVMTI_VISIT_ABORT));
2314}
2315
2316// invoke the primitive field callback
2317inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2318                                                    oop obj,
2319                                                    jint index,
2320                                                    address addr,
2321                                                    char type)
2322{
2323  // for primitive fields only the index will be set
2324  static jvmtiHeapReferenceInfo reference_info = { 0 };
2325
2326  AdvancedHeapWalkContext* context = advanced_context();
2327  assert(context->primitive_field_callback() != NULL, "no callback");
2328
2329  // apply class filter
2330  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2331    return true;
2332  }
2333
2334  CallbackWrapper wrapper(tag_map(), obj);
2335
2336  // apply tag filter
2337  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2338                                 wrapper.klass_tag(),
2339                                 context->heap_filter())) {
2340    return true;
2341  }
2342
2343  // the field index in the referrer
2344  reference_info.field.index = index;
2345
2346  // map the type
2347  jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2348
2349  // setup the jvalue
2350  jvalue value;
2351  copy_to_jvalue(&value, addr, value_type);
2352
2353  jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2354  int res = (*cb)(ref_kind,
2355                  &reference_info,
2356                  wrapper.klass_tag(),
2357                  wrapper.obj_tag_p(),
2358                  value,
2359                  value_type,
2360                  (void*)user_data());
2361  return (!(res & JVMTI_VISIT_ABORT));
2362}
2363
2364
2365// instance field
2366inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2367                                                             jint index,
2368                                                             address value,
2369                                                             char type) {
2370  return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2371                                obj,
2372                                index,
2373                                value,
2374                                type);
2375}
2376
2377// static field
2378inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2379                                                           jint index,
2380                                                           address value,
2381                                                           char type) {
2382  return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2383                                obj,
2384                                index,
2385                                value,
2386                                type);
2387}
2388
2389// report a JNI local (root object) to the profiler
2390inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2391  if (is_basic_heap_walk()) {
2392    return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2393                                           thread_tag,
2394                                           depth,
2395                                           m,
2396                                           -1,
2397                                           obj);
2398  } else {
2399    return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2400                                              thread_tag, tid,
2401                                              depth,
2402                                              m,
2403                                              (jlocation)-1,
2404                                              -1,
2405                                              obj);
2406  }
2407}
2408
2409
2410// report a local (stack reference, root object)
2411inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2412                                                   jlong tid,
2413                                                   jint depth,
2414                                                   jmethodID method,
2415                                                   jlocation bci,
2416                                                   jint slot,
2417                                                   oop obj) {
2418  if (is_basic_heap_walk()) {
2419    return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2420                                           thread_tag,
2421                                           depth,
2422                                           method,
2423                                           slot,
2424                                           obj);
2425  } else {
2426    return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2427                                              thread_tag,
2428                                              tid,
2429                                              depth,
2430                                              method,
2431                                              bci,
2432                                              slot,
2433                                              obj);
2434  }
2435}
2436
2437// report an object referencing a class.
2438inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2439  if (is_basic_heap_walk()) {
2440    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2441  } else {
2442    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2443  }
2444}
2445
2446// report a class referencing its class loader.
2447inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2448  if (is_basic_heap_walk()) {
2449    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2450  } else {
2451    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2452  }
2453}
2454
2455// report a class referencing its signers.
2456inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2457  if (is_basic_heap_walk()) {
2458    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2459  } else {
2460    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2461  }
2462}
2463
2464// report a class referencing its protection domain..
2465inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2466  if (is_basic_heap_walk()) {
2467    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2468  } else {
2469    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2470  }
2471}
2472
2473// report a class referencing its superclass.
2474inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2475  if (is_basic_heap_walk()) {
2476    // Send this to be consistent with past implementation
2477    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2478  } else {
2479    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2480  }
2481}
2482
2483// report a class referencing one of its interfaces.
2484inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2485  if (is_basic_heap_walk()) {
2486    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2487  } else {
2488    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2489  }
2490}
2491
2492// report a class referencing one of its static fields.
2493inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2494  if (is_basic_heap_walk()) {
2495    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2496  } else {
2497    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2498  }
2499}
2500
2501// report an array referencing an element object
2502inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2503  if (is_basic_heap_walk()) {
2504    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2505  } else {
2506    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2507  }
2508}
2509
2510// report an object referencing an instance field object
2511inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2512  if (is_basic_heap_walk()) {
2513    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2514  } else {
2515    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2516  }
2517}
2518
2519// report an array referencing an element object
2520inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2521  if (is_basic_heap_walk()) {
2522    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2523  } else {
2524    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2525  }
2526}
2527
2528// A supporting closure used to process simple roots
2529class SimpleRootsClosure : public OopClosure {
2530 private:
2531  jvmtiHeapReferenceKind _kind;
2532  bool _continue;
2533
2534  jvmtiHeapReferenceKind root_kind()    { return _kind; }
2535
2536 public:
2537  void set_kind(jvmtiHeapReferenceKind kind) {
2538    _kind = kind;
2539    _continue = true;
2540  }
2541
2542  inline bool stopped() {
2543    return !_continue;
2544  }
2545
2546  void do_oop(oop* obj_p) {
2547    // iteration has terminated
2548    if (stopped()) {
2549      return;
2550    }
2551
2552    // ignore null or deleted handles
2553    oop o = *obj_p;
2554    if (o == NULL || o == JNIHandles::deleted_handle()) {
2555      return;
2556    }
2557
2558    assert(Universe::heap()->is_in_reserved(o), "should be impossible");
2559
2560    jvmtiHeapReferenceKind kind = root_kind();
2561    if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
2562      // SystemDictionary::always_strong_oops_do reports the application
2563      // class loader as a root. We want this root to be reported as
2564      // a root kind of "OTHER" rather than "SYSTEM_CLASS".
2565      if (!o->is_instanceMirror()) {
2566        kind = JVMTI_HEAP_REFERENCE_OTHER;
2567      }
2568    }
2569
2570    // some objects are ignored - in the case of simple
2571    // roots it's mostly Symbol*s that we are skipping
2572    // here.
2573    if (!ServiceUtil::visible_oop(o)) {
2574      return;
2575    }
2576
2577    // invoke the callback
2578    _continue = CallbackInvoker::report_simple_root(kind, o);
2579
2580  }
2581  virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2582};
2583
2584// A supporting closure used to process JNI locals
2585class JNILocalRootsClosure : public OopClosure {
2586 private:
2587  jlong _thread_tag;
2588  jlong _tid;
2589  jint _depth;
2590  jmethodID _method;
2591  bool _continue;
2592 public:
2593  void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2594    _thread_tag = thread_tag;
2595    _tid = tid;
2596    _depth = depth;
2597    _method = method;
2598    _continue = true;
2599  }
2600
2601  inline bool stopped() {
2602    return !_continue;
2603  }
2604
2605  void do_oop(oop* obj_p) {
2606    // iteration has terminated
2607    if (stopped()) {
2608      return;
2609    }
2610
2611    // ignore null or deleted handles
2612    oop o = *obj_p;
2613    if (o == NULL || o == JNIHandles::deleted_handle()) {
2614      return;
2615    }
2616
2617    if (!ServiceUtil::visible_oop(o)) {
2618      return;
2619    }
2620
2621    // invoke the callback
2622    _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2623  }
2624  virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2625};
2626
2627
2628// A VM operation to iterate over objects that are reachable from
2629// a set of roots or an initial object.
2630//
2631// For VM_HeapWalkOperation the set of roots used is :-
2632//
2633// - All JNI global references
2634// - All inflated monitors
2635// - All classes loaded by the boot class loader (or all classes
2636//     in the event that class unloading is disabled)
2637// - All java threads
2638// - For each java thread then all locals and JNI local references
2639//      on the thread's execution stack
2640// - All visible/explainable objects from Universes::oops_do
2641//
2642class VM_HeapWalkOperation: public VM_Operation {
2643 private:
2644  enum {
2645    initial_visit_stack_size = 4000
2646  };
2647
2648  bool _is_advanced_heap_walk;                      // indicates FollowReferences
2649  JvmtiTagMap* _tag_map;
2650  Handle _initial_object;
2651  GrowableArray<oop>* _visit_stack;                 // the visit stack
2652
2653  bool _collecting_heap_roots;                      // are we collecting roots
2654  bool _following_object_refs;                      // are we following object references
2655
2656  bool _reporting_primitive_fields;                 // optional reporting
2657  bool _reporting_primitive_array_values;
2658  bool _reporting_string_values;
2659
2660  GrowableArray<oop>* create_visit_stack() {
2661    return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true);
2662  }
2663
2664  // accessors
2665  bool is_advanced_heap_walk() const               { return _is_advanced_heap_walk; }
2666  JvmtiTagMap* tag_map() const                     { return _tag_map; }
2667  Handle initial_object() const                    { return _initial_object; }
2668
2669  bool is_following_references() const             { return _following_object_refs; }
2670
2671  bool is_reporting_primitive_fields()  const      { return _reporting_primitive_fields; }
2672  bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2673  bool is_reporting_string_values() const          { return _reporting_string_values; }
2674
2675  GrowableArray<oop>* visit_stack() const          { return _visit_stack; }
2676
2677  // iterate over the various object types
2678  inline bool iterate_over_array(oop o);
2679  inline bool iterate_over_type_array(oop o);
2680  inline bool iterate_over_class(oop o);
2681  inline bool iterate_over_object(oop o);
2682
2683  // root collection
2684  inline bool collect_simple_roots();
2685  inline bool collect_stack_roots();
2686  inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2687
2688  // visit an object
2689  inline bool visit(oop o);
2690
2691 public:
2692  VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2693                       Handle initial_object,
2694                       BasicHeapWalkContext callbacks,
2695                       const void* user_data);
2696
2697  VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2698                       Handle initial_object,
2699                       AdvancedHeapWalkContext callbacks,
2700                       const void* user_data);
2701
2702  ~VM_HeapWalkOperation();
2703
2704  VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2705  void doit();
2706};
2707
2708
2709VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2710                                           Handle initial_object,
2711                                           BasicHeapWalkContext callbacks,
2712                                           const void* user_data) {
2713  _is_advanced_heap_walk = false;
2714  _tag_map = tag_map;
2715  _initial_object = initial_object;
2716  _following_object_refs = (callbacks.object_ref_callback() != NULL);
2717  _reporting_primitive_fields = false;
2718  _reporting_primitive_array_values = false;
2719  _reporting_string_values = false;
2720  _visit_stack = create_visit_stack();
2721
2722
2723  CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2724}
2725
2726VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2727                                           Handle initial_object,
2728                                           AdvancedHeapWalkContext callbacks,
2729                                           const void* user_data) {
2730  _is_advanced_heap_walk = true;
2731  _tag_map = tag_map;
2732  _initial_object = initial_object;
2733  _following_object_refs = true;
2734  _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2735  _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2736  _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2737  _visit_stack = create_visit_stack();
2738
2739  CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2740}
2741
2742VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2743  if (_following_object_refs) {
2744    assert(_visit_stack != NULL, "checking");
2745    delete _visit_stack;
2746    _visit_stack = NULL;
2747  }
2748}
2749
2750// an array references its class and has a reference to
2751// each element in the array
2752inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2753  objArrayOop array = objArrayOop(o);
2754
2755  // array reference to its class
2756  oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2757  if (!CallbackInvoker::report_class_reference(o, mirror)) {
2758    return false;
2759  }
2760
2761  // iterate over the array and report each reference to a
2762  // non-null element
2763  for (int index=0; index<array->length(); index++) {
2764    oop elem = array->obj_at(index);
2765    if (elem == NULL) {
2766      continue;
2767    }
2768
2769    // report the array reference o[index] = elem
2770    if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2771      return false;
2772    }
2773  }
2774  return true;
2775}
2776
2777// a type array references its class
2778inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2779  Klass* k = o->klass();
2780  oop mirror = k->java_mirror();
2781  if (!CallbackInvoker::report_class_reference(o, mirror)) {
2782    return false;
2783  }
2784
2785  // report the array contents if required
2786  if (is_reporting_primitive_array_values()) {
2787    if (!CallbackInvoker::report_primitive_array_values(o)) {
2788      return false;
2789    }
2790  }
2791  return true;
2792}
2793
2794#ifdef ASSERT
2795// verify that a static oop field is in range
2796static inline bool verify_static_oop(InstanceKlass* ik,
2797                                     oop mirror, int offset) {
2798  address obj_p = (address)mirror + offset;
2799  address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2800  address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2801  assert(end >= start, "sanity check");
2802
2803  if (obj_p >= start && obj_p < end) {
2804    return true;
2805  } else {
2806    return false;
2807  }
2808}
2809#endif // #ifdef ASSERT
2810
2811// a class references its super class, interfaces, class loader, ...
2812// and finally its static fields
2813inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2814  int i;
2815  Klass* klass = java_lang_Class::as_Klass(java_class);
2816
2817  if (klass->oop_is_instance()) {
2818    InstanceKlass* ik = InstanceKlass::cast(klass);
2819
2820    // ignore the class if it's has been initialized yet
2821    if (!ik->is_linked()) {
2822      return true;
2823    }
2824
2825    // get the java mirror
2826    oop mirror = klass->java_mirror();
2827
2828    // super (only if something more interesting than java.lang.Object)
2829    Klass* java_super = ik->java_super();
2830    if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
2831      oop super = java_super->java_mirror();
2832      if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2833        return false;
2834      }
2835    }
2836
2837    // class loader
2838    oop cl = ik->class_loader();
2839    if (cl != NULL) {
2840      if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2841        return false;
2842      }
2843    }
2844
2845    // protection domain
2846    oop pd = ik->protection_domain();
2847    if (pd != NULL) {
2848      if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2849        return false;
2850      }
2851    }
2852
2853    // signers
2854    oop signers = ik->signers();
2855    if (signers != NULL) {
2856      if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2857        return false;
2858      }
2859    }
2860
2861    // references from the constant pool
2862    {
2863      ConstantPool* pool = ik->constants();
2864      for (int i = 1; i < pool->length(); i++) {
2865        constantTag tag = pool->tag_at(i).value();
2866        if (tag.is_string() || tag.is_klass()) {
2867          oop entry;
2868          if (tag.is_string()) {
2869            entry = pool->resolved_string_at(i);
2870            // If the entry is non-null it is resolved.
2871            if (entry == NULL) continue;
2872          } else {
2873            entry = pool->resolved_klass_at(i)->java_mirror();
2874          }
2875          if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2876            return false;
2877          }
2878        }
2879      }
2880    }
2881
2882    // interfaces
2883    // (These will already have been reported as references from the constant pool
2884    //  but are specified by IterateOverReachableObjects and must be reported).
2885    Array<Klass*>* interfaces = ik->local_interfaces();
2886    for (i = 0; i < interfaces->length(); i++) {
2887      oop interf = ((Klass*)interfaces->at(i))->java_mirror();
2888      if (interf == NULL) {
2889        continue;
2890      }
2891      if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2892        return false;
2893      }
2894    }
2895
2896    // iterate over the static fields
2897
2898    ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2899    for (i=0; i<field_map->field_count(); i++) {
2900      ClassFieldDescriptor* field = field_map->field_at(i);
2901      char type = field->field_type();
2902      if (!is_primitive_field_type(type)) {
2903        oop fld_o = mirror->obj_field(field->field_offset());
2904        assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2905        if (fld_o != NULL) {
2906          int slot = field->field_index();
2907          if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2908            delete field_map;
2909            return false;
2910          }
2911        }
2912      } else {
2913         if (is_reporting_primitive_fields()) {
2914           address addr = (address)mirror + field->field_offset();
2915           int slot = field->field_index();
2916           if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2917             delete field_map;
2918             return false;
2919          }
2920        }
2921      }
2922    }
2923    delete field_map;
2924
2925    return true;
2926  }
2927
2928  return true;
2929}
2930
2931// an object references a class and its instance fields
2932// (static fields are ignored here as we report these as
2933// references from the class).
2934inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2935  // reference to the class
2936  if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2937    return false;
2938  }
2939
2940  // iterate over instance fields
2941  ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2942  for (int i=0; i<field_map->field_count(); i++) {
2943    ClassFieldDescriptor* field = field_map->field_at(i);
2944    char type = field->field_type();
2945    if (!is_primitive_field_type(type)) {
2946      oop fld_o = o->obj_field(field->field_offset());
2947      // ignore any objects that aren't visible to profiler
2948      if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) {
2949        assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not "
2950               "have references to Klass* anymore");
2951        int slot = field->field_index();
2952        if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2953          return false;
2954        }
2955      }
2956    } else {
2957      if (is_reporting_primitive_fields()) {
2958        // primitive instance field
2959        address addr = (address)o + field->field_offset();
2960        int slot = field->field_index();
2961        if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2962          return false;
2963        }
2964      }
2965    }
2966  }
2967
2968  // if the object is a java.lang.String
2969  if (is_reporting_string_values() &&
2970      o->klass() == SystemDictionary::String_klass()) {
2971    if (!CallbackInvoker::report_string_value(o)) {
2972      return false;
2973    }
2974  }
2975  return true;
2976}
2977
2978
2979// Collects all simple (non-stack) roots except for threads;
2980// threads are handled in collect_stack_roots() as an optimization.
2981// if there's a heap root callback provided then the callback is
2982// invoked for each simple root.
2983// if an object reference callback is provided then all simple
2984// roots are pushed onto the marking stack so that they can be
2985// processed later
2986//
2987inline bool VM_HeapWalkOperation::collect_simple_roots() {
2988  SimpleRootsClosure blk;
2989
2990  // JNI globals
2991  blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
2992  JNIHandles::oops_do(&blk);
2993  if (blk.stopped()) {
2994    return false;
2995  }
2996
2997  // Preloaded classes and loader from the system dictionary
2998  blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
2999  SystemDictionary::always_strong_oops_do(&blk);
3000  KlassToOopClosure klass_blk(&blk);
3001  ClassLoaderDataGraph::always_strong_oops_do(&blk, &klass_blk, false);
3002  if (blk.stopped()) {
3003    return false;
3004  }
3005
3006  // Inflated monitors
3007  blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
3008  ObjectSynchronizer::oops_do(&blk);
3009  if (blk.stopped()) {
3010    return false;
3011  }
3012
3013  // threads are now handled in collect_stack_roots()
3014
3015  // Other kinds of roots maintained by HotSpot
3016  // Many of these won't be visible but others (such as instances of important
3017  // exceptions) will be visible.
3018  blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3019  Universe::oops_do(&blk);
3020
3021  // If there are any non-perm roots in the code cache, visit them.
3022  blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3023  CodeBlobToOopClosure look_in_blobs(&blk, !CodeBlobToOopClosure::FixRelocations);
3024  CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
3025
3026  return true;
3027}
3028
3029// Walk the stack of a given thread and find all references (locals
3030// and JNI calls) and report these as stack references
3031inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
3032                                                      JNILocalRootsClosure* blk)
3033{
3034  oop threadObj = java_thread->threadObj();
3035  assert(threadObj != NULL, "sanity check");
3036
3037  // only need to get the thread's tag once per thread
3038  jlong thread_tag = tag_for(_tag_map, threadObj);
3039
3040  // also need the thread id
3041  jlong tid = java_lang_Thread::thread_id(threadObj);
3042
3043
3044  if (java_thread->has_last_Java_frame()) {
3045
3046    // vframes are resource allocated
3047    Thread* current_thread = Thread::current();
3048    ResourceMark rm(current_thread);
3049    HandleMark hm(current_thread);
3050
3051    RegisterMap reg_map(java_thread);
3052    frame f = java_thread->last_frame();
3053    vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
3054
3055    bool is_top_frame = true;
3056    int depth = 0;
3057    frame* last_entry_frame = NULL;
3058
3059    while (vf != NULL) {
3060      if (vf->is_java_frame()) {
3061
3062        // java frame (interpreted, compiled, ...)
3063        javaVFrame *jvf = javaVFrame::cast(vf);
3064
3065        // the jmethodID
3066        jmethodID method = jvf->method()->jmethod_id();
3067
3068        if (!(jvf->method()->is_native())) {
3069          jlocation bci = (jlocation)jvf->bci();
3070          StackValueCollection* locals = jvf->locals();
3071          for (int slot=0; slot<locals->size(); slot++) {
3072            if (locals->at(slot)->type() == T_OBJECT) {
3073              oop o = locals->obj_at(slot)();
3074              if (o == NULL) {
3075                continue;
3076              }
3077
3078              // stack reference
3079              if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3080                                                   bci, slot, o)) {
3081                return false;
3082              }
3083            }
3084          }
3085
3086          StackValueCollection* exprs = jvf->expressions();
3087          for (int index=0; index < exprs->size(); index++) {
3088            if (exprs->at(index)->type() == T_OBJECT) {
3089              oop o = exprs->obj_at(index)();
3090              if (o == NULL) {
3091                continue;
3092              }
3093
3094              // stack reference
3095              if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3096                                                   bci, locals->size() + index, o)) {
3097                return false;
3098              }
3099            }
3100          }
3101
3102        } else {
3103          blk->set_context(thread_tag, tid, depth, method);
3104          if (is_top_frame) {
3105            // JNI locals for the top frame.
3106            java_thread->active_handles()->oops_do(blk);
3107          } else {
3108            if (last_entry_frame != NULL) {
3109              // JNI locals for the entry frame
3110              assert(last_entry_frame->is_entry_frame(), "checking");
3111              last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
3112            }
3113          }
3114        }
3115        last_entry_frame = NULL;
3116        depth++;
3117      } else {
3118        // externalVFrame - for an entry frame then we report the JNI locals
3119        // when we find the corresponding javaVFrame
3120        frame* fr = vf->frame_pointer();
3121        assert(fr != NULL, "sanity check");
3122        if (fr->is_entry_frame()) {
3123          last_entry_frame = fr;
3124        }
3125      }
3126
3127      vf = vf->sender();
3128      is_top_frame = false;
3129    }
3130  } else {
3131    // no last java frame but there may be JNI locals
3132    blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
3133    java_thread->active_handles()->oops_do(blk);
3134  }
3135  return true;
3136}
3137
3138
3139// Collects the simple roots for all threads and collects all
3140// stack roots - for each thread it walks the execution
3141// stack to find all references and local JNI refs.
3142inline bool VM_HeapWalkOperation::collect_stack_roots() {
3143  JNILocalRootsClosure blk;
3144  for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
3145    oop threadObj = thread->threadObj();
3146    if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3147      // Collect the simple root for this thread before we
3148      // collect its stack roots
3149      if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
3150                                               threadObj)) {
3151        return false;
3152      }
3153      if (!collect_stack_roots(thread, &blk)) {
3154        return false;
3155      }
3156    }
3157  }
3158  return true;
3159}
3160
3161// visit an object
3162// first mark the object as visited
3163// second get all the outbound references from this object (in other words, all
3164// the objects referenced by this object).
3165//
3166bool VM_HeapWalkOperation::visit(oop o) {
3167  // mark object as visited
3168  assert(!ObjectMarker::visited(o), "can't visit same object more than once");
3169  ObjectMarker::mark(o);
3170
3171  // instance
3172  if (o->is_instance()) {
3173    if (o->klass() == SystemDictionary::Class_klass()) {
3174      if (!java_lang_Class::is_primitive(o)) {
3175        // a java.lang.Class
3176        return iterate_over_class(o);
3177      }
3178    } else {
3179      return iterate_over_object(o);
3180    }
3181  }
3182
3183  // object array
3184  if (o->is_objArray()) {
3185    return iterate_over_array(o);
3186  }
3187
3188  // type array
3189  if (o->is_typeArray()) {
3190    return iterate_over_type_array(o);
3191  }
3192
3193  return true;
3194}
3195
3196void VM_HeapWalkOperation::doit() {
3197  ResourceMark rm;
3198  ObjectMarkerController marker;
3199  ClassFieldMapCacheMark cm;
3200
3201  assert(visit_stack()->is_empty(), "visit stack must be empty");
3202
3203  // the heap walk starts with an initial object or the heap roots
3204  if (initial_object().is_null()) {
3205    // If either collect_stack_roots() or collect_simple_roots()
3206    // returns false at this point, then there are no mark bits
3207    // to reset.
3208    ObjectMarker::set_needs_reset(false);
3209
3210    // Calling collect_stack_roots() before collect_simple_roots()
3211    // can result in a big performance boost for an agent that is
3212    // focused on analyzing references in the thread stacks.
3213    if (!collect_stack_roots()) return;
3214
3215    if (!collect_simple_roots()) return;
3216
3217    // no early return so enable heap traversal to reset the mark bits
3218    ObjectMarker::set_needs_reset(true);
3219  } else {
3220    visit_stack()->push(initial_object()());
3221  }
3222
3223  // object references required
3224  if (is_following_references()) {
3225
3226    // visit each object until all reachable objects have been
3227    // visited or the callback asked to terminate the iteration.
3228    while (!visit_stack()->is_empty()) {
3229      oop o = visit_stack()->pop();
3230      if (!ObjectMarker::visited(o)) {
3231        if (!visit(o)) {
3232          break;
3233        }
3234      }
3235    }
3236  }
3237}
3238
3239// iterate over all objects that are reachable from a set of roots
3240void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3241                                                 jvmtiStackReferenceCallback stack_ref_callback,
3242                                                 jvmtiObjectReferenceCallback object_ref_callback,
3243                                                 const void* user_data) {
3244  MutexLocker ml(Heap_lock);
3245  BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3246  VM_HeapWalkOperation op(this, Handle(), context, user_data);
3247  VMThread::execute(&op);
3248}
3249
3250// iterate over all objects that are reachable from a given object
3251void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3252                                                             jvmtiObjectReferenceCallback object_ref_callback,
3253                                                             const void* user_data) {
3254  oop obj = JNIHandles::resolve(object);
3255  Handle initial_object(Thread::current(), obj);
3256
3257  MutexLocker ml(Heap_lock);
3258  BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
3259  VM_HeapWalkOperation op(this, initial_object, context, user_data);
3260  VMThread::execute(&op);
3261}
3262
3263// follow references from an initial object or the GC roots
3264void JvmtiTagMap::follow_references(jint heap_filter,
3265                                    KlassHandle klass,
3266                                    jobject object,
3267                                    const jvmtiHeapCallbacks* callbacks,
3268                                    const void* user_data)
3269{
3270  oop obj = JNIHandles::resolve(object);
3271  Handle initial_object(Thread::current(), obj);
3272
3273  MutexLocker ml(Heap_lock);
3274  AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3275  VM_HeapWalkOperation op(this, initial_object, context, user_data);
3276  VMThread::execute(&op);
3277}
3278
3279
3280void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
3281  // No locks during VM bring-up (0 threads) and no safepoints after main
3282  // thread creation and before VMThread creation (1 thread); initial GC
3283  // verification can happen in that window which gets to here.
3284  assert(Threads::number_of_threads() <= 1 ||
3285         SafepointSynchronize::is_at_safepoint(),
3286         "must be executed at a safepoint");
3287  if (JvmtiEnv::environments_might_exist()) {
3288    JvmtiEnvIterator it;
3289    for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3290      JvmtiTagMap* tag_map = env->tag_map();
3291      if (tag_map != NULL && !tag_map->is_empty()) {
3292        tag_map->do_weak_oops(is_alive, f);
3293      }
3294    }
3295  }
3296}
3297
3298void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) {
3299
3300  // does this environment have the OBJECT_FREE event enabled
3301  bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
3302
3303  // counters used for trace message
3304  int freed = 0;
3305  int moved = 0;
3306
3307  JvmtiTagHashmap* hashmap = this->hashmap();
3308
3309  // reenable sizing (if disabled)
3310  hashmap->set_resizing_enabled(true);
3311
3312  // if the hashmap is empty then we can skip it
3313  if (hashmap->_entry_count == 0) {
3314    return;
3315  }
3316
3317  // now iterate through each entry in the table
3318
3319  JvmtiTagHashmapEntry** table = hashmap->table();
3320  int size = hashmap->size();
3321
3322  JvmtiTagHashmapEntry* delayed_add = NULL;
3323
3324  for (int pos = 0; pos < size; ++pos) {
3325    JvmtiTagHashmapEntry* entry = table[pos];
3326    JvmtiTagHashmapEntry* prev = NULL;
3327
3328    while (entry != NULL) {
3329      JvmtiTagHashmapEntry* next = entry->next();
3330
3331      oop* obj = entry->object_addr();
3332
3333      // has object been GC'ed
3334      if (!is_alive->do_object_b(entry->object())) {
3335        // grab the tag
3336        jlong tag = entry->tag();
3337        guarantee(tag != 0, "checking");
3338
3339        // remove GC'ed entry from hashmap and return the
3340        // entry to the free list
3341        hashmap->remove(prev, pos, entry);
3342        destroy_entry(entry);
3343
3344        // post the event to the profiler
3345        if (post_object_free) {
3346          JvmtiExport::post_object_free(env(), tag);
3347        }
3348
3349        ++freed;
3350      } else {
3351        f->do_oop(entry->object_addr());
3352        oop new_oop = entry->object();
3353
3354        // if the object has moved then re-hash it and move its
3355        // entry to its new location.
3356        unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size);
3357        if (new_pos != (unsigned int)pos) {
3358          if (prev == NULL) {
3359            table[pos] = next;
3360          } else {
3361            prev->set_next(next);
3362          }
3363          if (new_pos < (unsigned int)pos) {
3364            entry->set_next(table[new_pos]);
3365            table[new_pos] = entry;
3366          } else {
3367            // Delay adding this entry to it's new position as we'd end up
3368            // hitting it again during this iteration.
3369            entry->set_next(delayed_add);
3370            delayed_add = entry;
3371          }
3372          moved++;
3373        } else {
3374          // object didn't move
3375          prev = entry;
3376        }
3377      }
3378
3379      entry = next;
3380    }
3381  }
3382
3383  // Re-add all the entries which were kept aside
3384  while (delayed_add != NULL) {
3385    JvmtiTagHashmapEntry* next = delayed_add->next();
3386    unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object(), size);
3387    delayed_add->set_next(table[pos]);
3388    table[pos] = delayed_add;
3389    delayed_add = next;
3390  }
3391
3392  // stats
3393  if (TraceJVMTIObjectTagging) {
3394    int post_total = hashmap->_entry_count;
3395    int pre_total = post_total + freed;
3396
3397    tty->print_cr("(%d->%d, %d freed, %d total moves)",
3398        pre_total, post_total, freed, moved);
3399  }
3400}
3401