1/*
2 * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "classfile/javaClasses.inline.hpp"
27#include "classfile/symbolTable.hpp"
28#include "classfile/systemDictionary.hpp"
29#include "classfile/vmSymbols.hpp"
30#include "code/codeCache.hpp"
31#include "jvmtifiles/jvmtiEnv.hpp"
32#include "memory/resourceArea.hpp"
33#include "oops/instanceMirrorKlass.hpp"
34#include "oops/objArrayKlass.hpp"
35#include "oops/objArrayOop.inline.hpp"
36#include "oops/oop.inline.hpp"
37#include "prims/jvmtiEventController.hpp"
38#include "prims/jvmtiEventController.inline.hpp"
39#include "prims/jvmtiExport.hpp"
40#include "prims/jvmtiImpl.hpp"
41#include "prims/jvmtiTagMap.hpp"
42#include "runtime/biasedLocking.hpp"
43#include "runtime/javaCalls.hpp"
44#include "runtime/jniHandles.hpp"
45#include "runtime/mutex.hpp"
46#include "runtime/mutexLocker.hpp"
47#include "runtime/reflectionUtils.hpp"
48#include "runtime/vframe.hpp"
49#include "runtime/vmThread.hpp"
50#include "runtime/vm_operations.hpp"
51#include "services/serviceUtil.hpp"
52#include "utilities/macros.hpp"
53#if INCLUDE_ALL_GCS
54#include "gc/g1/g1SATBCardTableModRefBS.hpp"
55#include "gc/parallel/parallelScavengeHeap.hpp"
56#endif // INCLUDE_ALL_GCS
57
58// JvmtiTagHashmapEntry
59//
60// Each entry encapsulates a reference to the tagged object
61// and the tag value. In addition an entry includes a next pointer which
62// is used to chain entries together.
63
64class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> {
65 private:
66  friend class JvmtiTagMap;
67
68  oop _object;                          // tagged object
69  jlong _tag;                           // the tag
70  JvmtiTagHashmapEntry* _next;          // next on the list
71
72  inline void init(oop object, jlong tag) {
73    _object = object;
74    _tag = tag;
75    _next = NULL;
76  }
77
78  // constructor
79  JvmtiTagHashmapEntry(oop object, jlong tag)         { init(object, tag); }
80
81 public:
82
83  // accessor methods
84  inline oop object() const                           { return _object; }
85  inline oop* object_addr()                           { return &_object; }
86  inline jlong tag() const                            { return _tag; }
87
88  inline void set_tag(jlong tag) {
89    assert(tag != 0, "can't be zero");
90    _tag = tag;
91  }
92
93  inline JvmtiTagHashmapEntry* next() const             { return _next; }
94  inline void set_next(JvmtiTagHashmapEntry* next)      { _next = next; }
95};
96
97
98// JvmtiTagHashmap
99//
100// A hashmap is essentially a table of pointers to entries. Entries
101// are hashed to a location, or position in the table, and then
102// chained from that location. The "key" for hashing is address of
103// the object, or oop. The "value" is the tag value.
104//
105// A hashmap maintains a count of the number entries in the hashmap
106// and resizes if the number of entries exceeds a given threshold.
107// The threshold is specified as a percentage of the size - for
108// example a threshold of 0.75 will trigger the hashmap to resize
109// if the number of entries is >75% of table size.
110//
111// A hashmap provides functions for adding, removing, and finding
112// entries. It also provides a function to iterate over all entries
113// in the hashmap.
114
115class JvmtiTagHashmap : public CHeapObj<mtInternal> {
116 private:
117  friend class JvmtiTagMap;
118
119  enum {
120    small_trace_threshold  = 10000,                  // threshold for tracing
121    medium_trace_threshold = 100000,
122    large_trace_threshold  = 1000000,
123    initial_trace_threshold = small_trace_threshold
124  };
125
126  static int _sizes[];                  // array of possible hashmap sizes
127  int _size;                            // actual size of the table
128  int _size_index;                      // index into size table
129
130  int _entry_count;                     // number of entries in the hashmap
131
132  float _load_factor;                   // load factor as a % of the size
133  int _resize_threshold;                // computed threshold to trigger resizing.
134  bool _resizing_enabled;               // indicates if hashmap can resize
135
136  int _trace_threshold;                 // threshold for trace messages
137
138  JvmtiTagHashmapEntry** _table;        // the table of entries.
139
140  // private accessors
141  int resize_threshold() const                  { return _resize_threshold; }
142  int trace_threshold() const                   { return _trace_threshold; }
143
144  // initialize the hashmap
145  void init(int size_index=0, float load_factor=4.0f) {
146    int initial_size =  _sizes[size_index];
147    _size_index = size_index;
148    _size = initial_size;
149    _entry_count = 0;
150    _trace_threshold = initial_trace_threshold;
151    _load_factor = load_factor;
152    _resize_threshold = (int)(_load_factor * _size);
153    _resizing_enabled = true;
154    size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
155    _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
156    if (_table == NULL) {
157      vm_exit_out_of_memory(s, OOM_MALLOC_ERROR,
158        "unable to allocate initial hashtable for jvmti object tags");
159    }
160    for (int i=0; i<initial_size; i++) {
161      _table[i] = NULL;
162    }
163  }
164
165  // hash a given key (oop) with the specified size
166  static unsigned int hash(oop key, int size) {
167    // shift right to get better distribution (as these bits will be zero
168    // with aligned addresses)
169    unsigned int addr = (unsigned int)(cast_from_oop<intptr_t>(key));
170#ifdef _LP64
171    return (addr >> 3) % size;
172#else
173    return (addr >> 2) % size;
174#endif
175  }
176
177  // hash a given key (oop)
178  unsigned int hash(oop key) {
179    return hash(key, _size);
180  }
181
182  // resize the hashmap - allocates a large table and re-hashes
183  // all entries into the new table.
184  void resize() {
185    int new_size_index = _size_index+1;
186    int new_size = _sizes[new_size_index];
187    if (new_size < 0) {
188      // hashmap already at maximum capacity
189      return;
190    }
191
192    // allocate new table
193    size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
194    JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
195    if (new_table == NULL) {
196      warning("unable to allocate larger hashtable for jvmti object tags");
197      set_resizing_enabled(false);
198      return;
199    }
200
201    // initialize new table
202    int i;
203    for (i=0; i<new_size; i++) {
204      new_table[i] = NULL;
205    }
206
207    // rehash all entries into the new table
208    for (i=0; i<_size; i++) {
209      JvmtiTagHashmapEntry* entry = _table[i];
210      while (entry != NULL) {
211        JvmtiTagHashmapEntry* next = entry->next();
212        oop key = entry->object();
213        assert(key != NULL, "jni weak reference cleared!!");
214        unsigned int h = hash(key, new_size);
215        JvmtiTagHashmapEntry* anchor = new_table[h];
216        if (anchor == NULL) {
217          new_table[h] = entry;
218          entry->set_next(NULL);
219        } else {
220          entry->set_next(anchor);
221          new_table[h] = entry;
222        }
223        entry = next;
224      }
225    }
226
227    // free old table and update settings.
228    os::free((void*)_table);
229    _table = new_table;
230    _size_index = new_size_index;
231    _size = new_size;
232
233    // compute new resize threshold
234    _resize_threshold = (int)(_load_factor * _size);
235  }
236
237
238  // internal remove function - remove an entry at a given position in the
239  // table.
240  inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
241    assert(pos >= 0 && pos < _size, "out of range");
242    if (prev == NULL) {
243      _table[pos] = entry->next();
244    } else {
245      prev->set_next(entry->next());
246    }
247    assert(_entry_count > 0, "checking");
248    _entry_count--;
249  }
250
251  // resizing switch
252  bool is_resizing_enabled() const          { return _resizing_enabled; }
253  void set_resizing_enabled(bool enable)    { _resizing_enabled = enable; }
254
255  // debugging
256  void print_memory_usage();
257  void compute_next_trace_threshold();
258
259 public:
260
261  // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
262  // The preferred size is rounded down to an actual size.
263  JvmtiTagHashmap(int size, float load_factor=0.0f) {
264    int i=0;
265    while (_sizes[i] < size) {
266      if (_sizes[i] < 0) {
267        assert(i > 0, "sanity check");
268        i--;
269        break;
270      }
271      i++;
272    }
273
274    // if a load factor is specified then use it, otherwise use default
275    if (load_factor > 0.01f) {
276      init(i, load_factor);
277    } else {
278      init(i);
279    }
280  }
281
282  // create a JvmtiTagHashmap with default settings
283  JvmtiTagHashmap() {
284    init();
285  }
286
287  // release table when JvmtiTagHashmap destroyed
288  ~JvmtiTagHashmap() {
289    if (_table != NULL) {
290      os::free((void*)_table);
291      _table = NULL;
292    }
293  }
294
295  // accessors
296  int size() const                              { return _size; }
297  JvmtiTagHashmapEntry** table() const          { return _table; }
298  int entry_count() const                       { return _entry_count; }
299
300  // find an entry in the hashmap, returns NULL if not found.
301  inline JvmtiTagHashmapEntry* find(oop key) {
302    unsigned int h = hash(key);
303    JvmtiTagHashmapEntry* entry = _table[h];
304    while (entry != NULL) {
305      if (entry->object() == key) {
306         return entry;
307      }
308      entry = entry->next();
309    }
310    return NULL;
311  }
312
313
314  // add a new entry to hashmap
315  inline void add(oop key, JvmtiTagHashmapEntry* entry) {
316    assert(key != NULL, "checking");
317    assert(find(key) == NULL, "duplicate detected");
318    unsigned int h = hash(key);
319    JvmtiTagHashmapEntry* anchor = _table[h];
320    if (anchor == NULL) {
321      _table[h] = entry;
322      entry->set_next(NULL);
323    } else {
324      entry->set_next(anchor);
325      _table[h] = entry;
326    }
327
328    _entry_count++;
329    if (log_is_enabled(Debug, jvmti, objecttagging) && entry_count() >= trace_threshold()) {
330      print_memory_usage();
331      compute_next_trace_threshold();
332    }
333
334    // if the number of entries exceed the threshold then resize
335    if (entry_count() > resize_threshold() && is_resizing_enabled()) {
336      resize();
337    }
338  }
339
340  // remove an entry with the given key.
341  inline JvmtiTagHashmapEntry* remove(oop key) {
342    unsigned int h = hash(key);
343    JvmtiTagHashmapEntry* entry = _table[h];
344    JvmtiTagHashmapEntry* prev = NULL;
345    while (entry != NULL) {
346      if (key == entry->object()) {
347        break;
348      }
349      prev = entry;
350      entry = entry->next();
351    }
352    if (entry != NULL) {
353      remove(prev, h, entry);
354    }
355    return entry;
356  }
357
358  // iterate over all entries in the hashmap
359  void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
360};
361
362// possible hashmap sizes - odd primes that roughly double in size.
363// To avoid excessive resizing the odd primes from 4801-76831 and
364// 76831-307261 have been removed. The list must be terminated by -1.
365int JvmtiTagHashmap::_sizes[] =  { 4801, 76831, 307261, 614563, 1228891,
366    2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
367
368
369// A supporting class for iterating over all entries in Hashmap
370class JvmtiTagHashmapEntryClosure {
371 public:
372  virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
373};
374
375
376// iterate over all entries in the hashmap
377void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
378  for (int i=0; i<_size; i++) {
379    JvmtiTagHashmapEntry* entry = _table[i];
380    JvmtiTagHashmapEntry* prev = NULL;
381    while (entry != NULL) {
382      // obtain the next entry before invoking do_entry - this is
383      // necessary because do_entry may remove the entry from the
384      // hashmap.
385      JvmtiTagHashmapEntry* next = entry->next();
386      closure->do_entry(entry);
387      entry = next;
388     }
389  }
390}
391
392// debugging
393void JvmtiTagHashmap::print_memory_usage() {
394  intptr_t p = (intptr_t)this;
395  tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
396
397  // table + entries in KB
398  int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
399    entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
400
401  int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
402  tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
403    entry_count(), hashmap_usage, weak_globals_usage);
404}
405
406// compute threshold for the next trace message
407void JvmtiTagHashmap::compute_next_trace_threshold() {
408  _trace_threshold = entry_count();
409  if (trace_threshold() < medium_trace_threshold) {
410    _trace_threshold += small_trace_threshold;
411  } else {
412    if (trace_threshold() < large_trace_threshold) {
413      _trace_threshold += medium_trace_threshold;
414    } else {
415      _trace_threshold += large_trace_threshold;
416    }
417  }
418}
419
420// create a JvmtiTagMap
421JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
422  _env(env),
423  _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
424  _free_entries(NULL),
425  _free_entries_count(0)
426{
427  assert(JvmtiThreadState_lock->is_locked(), "sanity check");
428  assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
429
430  _hashmap = new JvmtiTagHashmap();
431
432  // finally add us to the environment
433  ((JvmtiEnvBase *)env)->set_tag_map(this);
434}
435
436
437// destroy a JvmtiTagMap
438JvmtiTagMap::~JvmtiTagMap() {
439
440  // no lock acquired as we assume the enclosing environment is
441  // also being destroryed.
442  ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
443
444  JvmtiTagHashmapEntry** table = _hashmap->table();
445  for (int j = 0; j < _hashmap->size(); j++) {
446    JvmtiTagHashmapEntry* entry = table[j];
447    while (entry != NULL) {
448      JvmtiTagHashmapEntry* next = entry->next();
449      delete entry;
450      entry = next;
451    }
452  }
453
454  // finally destroy the hashmap
455  delete _hashmap;
456  _hashmap = NULL;
457
458  // remove any entries on the free list
459  JvmtiTagHashmapEntry* entry = _free_entries;
460  while (entry != NULL) {
461    JvmtiTagHashmapEntry* next = entry->next();
462    delete entry;
463    entry = next;
464  }
465  _free_entries = NULL;
466}
467
468// create a hashmap entry
469// - if there's an entry on the (per-environment) free list then this
470// is returned. Otherwise an new entry is allocated.
471JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) {
472  assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
473  JvmtiTagHashmapEntry* entry;
474  if (_free_entries == NULL) {
475    entry = new JvmtiTagHashmapEntry(ref, tag);
476  } else {
477    assert(_free_entries_count > 0, "mismatched _free_entries_count");
478    _free_entries_count--;
479    entry = _free_entries;
480    _free_entries = entry->next();
481    entry->init(ref, tag);
482  }
483  return entry;
484}
485
486// destroy an entry by returning it to the free list
487void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
488  assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
489  // limit the size of the free list
490  if (_free_entries_count >= max_free_entries) {
491    delete entry;
492  } else {
493    entry->set_next(_free_entries);
494    _free_entries = entry;
495    _free_entries_count++;
496  }
497}
498
499// returns the tag map for the given environments. If the tag map
500// doesn't exist then it is created.
501JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
502  JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map();
503  if (tag_map == NULL) {
504    MutexLocker mu(JvmtiThreadState_lock);
505    tag_map = ((JvmtiEnvBase*)env)->tag_map();
506    if (tag_map == NULL) {
507      tag_map = new JvmtiTagMap(env);
508    }
509  } else {
510    CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
511  }
512  return tag_map;
513}
514
515// iterate over all entries in the tag map.
516void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
517  hashmap()->entry_iterate(closure);
518}
519
520// returns true if the hashmaps are empty
521bool JvmtiTagMap::is_empty() {
522  assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
523  return hashmap()->entry_count() == 0;
524}
525
526
527// Return the tag value for an object, or 0 if the object is
528// not tagged
529//
530static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
531  JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o);
532  if (entry == NULL) {
533    return 0;
534  } else {
535    return entry->tag();
536  }
537}
538
539
540// A CallbackWrapper is a support class for querying and tagging an object
541// around a callback to a profiler. The constructor does pre-callback
542// work to get the tag value, klass tag value, ... and the destructor
543// does the post-callback work of tagging or untagging the object.
544//
545// {
546//   CallbackWrapper wrapper(tag_map, o);
547//
548//   (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
549//
550// } // wrapper goes out of scope here which results in the destructor
551//      checking to see if the object has been tagged, untagged, or the
552//      tag value has changed.
553//
554class CallbackWrapper : public StackObj {
555 private:
556  JvmtiTagMap* _tag_map;
557  JvmtiTagHashmap* _hashmap;
558  JvmtiTagHashmapEntry* _entry;
559  oop _o;
560  jlong _obj_size;
561  jlong _obj_tag;
562  jlong _klass_tag;
563
564 protected:
565  JvmtiTagMap* tag_map() const      { return _tag_map; }
566
567  // invoked post-callback to tag, untag, or update the tag of an object
568  void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
569                                       JvmtiTagHashmapEntry* entry, jlong obj_tag);
570 public:
571  CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
572    assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
573           "MT unsafe or must be VM thread");
574
575    // object to tag
576    _o = o;
577
578    // object size
579    _obj_size = (jlong)_o->size() * wordSize;
580
581    // record the context
582    _tag_map = tag_map;
583    _hashmap = tag_map->hashmap();
584    _entry = _hashmap->find(_o);
585
586    // get object tag
587    _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
588
589    // get the class and the class's tag value
590    assert(SystemDictionary::Class_klass()->is_mirror_instance_klass(), "Is not?");
591
592    _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
593  }
594
595  ~CallbackWrapper() {
596    post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
597  }
598
599  inline jlong* obj_tag_p()                     { return &_obj_tag; }
600  inline jlong obj_size() const                 { return _obj_size; }
601  inline jlong obj_tag() const                  { return _obj_tag; }
602  inline jlong klass_tag() const                { return _klass_tag; }
603};
604
605
606
607// callback post-callback to tag, untag, or update the tag of an object
608void inline CallbackWrapper::post_callback_tag_update(oop o,
609                                                      JvmtiTagHashmap* hashmap,
610                                                      JvmtiTagHashmapEntry* entry,
611                                                      jlong obj_tag) {
612  if (entry == NULL) {
613    if (obj_tag != 0) {
614      // callback has tagged the object
615      assert(Thread::current()->is_VM_thread(), "must be VMThread");
616      entry = tag_map()->create_entry(o, obj_tag);
617      hashmap->add(o, entry);
618    }
619  } else {
620    // object was previously tagged - the callback may have untagged
621    // the object or changed the tag value
622    if (obj_tag == 0) {
623
624      JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
625      assert(entry_removed == entry, "checking");
626      tag_map()->destroy_entry(entry);
627
628    } else {
629      if (obj_tag != entry->tag()) {
630         entry->set_tag(obj_tag);
631      }
632    }
633  }
634}
635
636// An extended CallbackWrapper used when reporting an object reference
637// to the agent.
638//
639// {
640//   TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
641//
642//   (*callback)(wrapper.klass_tag(),
643//               wrapper.obj_size(),
644//               wrapper.obj_tag_p()
645//               wrapper.referrer_tag_p(), ...)
646//
647// } // wrapper goes out of scope here which results in the destructor
648//      checking to see if the referrer object has been tagged, untagged,
649//      or the tag value has changed.
650//
651class TwoOopCallbackWrapper : public CallbackWrapper {
652 private:
653  bool _is_reference_to_self;
654  JvmtiTagHashmap* _referrer_hashmap;
655  JvmtiTagHashmapEntry* _referrer_entry;
656  oop _referrer;
657  jlong _referrer_obj_tag;
658  jlong _referrer_klass_tag;
659  jlong* _referrer_tag_p;
660
661  bool is_reference_to_self() const             { return _is_reference_to_self; }
662
663 public:
664  TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
665    CallbackWrapper(tag_map, o)
666  {
667    // self reference needs to be handled in a special way
668    _is_reference_to_self = (referrer == o);
669
670    if (_is_reference_to_self) {
671      _referrer_klass_tag = klass_tag();
672      _referrer_tag_p = obj_tag_p();
673    } else {
674      _referrer = referrer;
675      // record the context
676      _referrer_hashmap = tag_map->hashmap();
677      _referrer_entry = _referrer_hashmap->find(_referrer);
678
679      // get object tag
680      _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
681      _referrer_tag_p = &_referrer_obj_tag;
682
683      // get referrer class tag.
684      _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
685    }
686  }
687
688  ~TwoOopCallbackWrapper() {
689    if (!is_reference_to_self()){
690      post_callback_tag_update(_referrer,
691                               _referrer_hashmap,
692                               _referrer_entry,
693                               _referrer_obj_tag);
694    }
695  }
696
697  // address of referrer tag
698  // (for a self reference this will return the same thing as obj_tag_p())
699  inline jlong* referrer_tag_p()        { return _referrer_tag_p; }
700
701  // referrer's class tag
702  inline jlong referrer_klass_tag()     { return _referrer_klass_tag; }
703};
704
705// tag an object
706//
707// This function is performance critical. If many threads attempt to tag objects
708// around the same time then it's possible that the Mutex associated with the
709// tag map will be a hot lock.
710void JvmtiTagMap::set_tag(jobject object, jlong tag) {
711  MutexLocker ml(lock());
712
713  // resolve the object
714  oop o = JNIHandles::resolve_non_null(object);
715
716  // see if the object is already tagged
717  JvmtiTagHashmap* hashmap = _hashmap;
718  JvmtiTagHashmapEntry* entry = hashmap->find(o);
719
720  // if the object is not already tagged then we tag it
721  if (entry == NULL) {
722    if (tag != 0) {
723      entry = create_entry(o, tag);
724      hashmap->add(o, entry);
725    } else {
726      // no-op
727    }
728  } else {
729    // if the object is already tagged then we either update
730    // the tag (if a new tag value has been provided)
731    // or remove the object if the new tag value is 0.
732    if (tag == 0) {
733      hashmap->remove(o);
734      destroy_entry(entry);
735    } else {
736      entry->set_tag(tag);
737    }
738  }
739}
740
741// get the tag for an object
742jlong JvmtiTagMap::get_tag(jobject object) {
743  MutexLocker ml(lock());
744
745  // resolve the object
746  oop o = JNIHandles::resolve_non_null(object);
747
748  return tag_for(this, o);
749}
750
751
752// Helper class used to describe the static or instance fields of a class.
753// For each field it holds the field index (as defined by the JVMTI specification),
754// the field type, and the offset.
755
756class ClassFieldDescriptor: public CHeapObj<mtInternal> {
757 private:
758  int _field_index;
759  int _field_offset;
760  char _field_type;
761 public:
762  ClassFieldDescriptor(int index, char type, int offset) :
763    _field_index(index), _field_type(type), _field_offset(offset) {
764  }
765  int field_index()  const  { return _field_index; }
766  char field_type()  const  { return _field_type; }
767  int field_offset() const  { return _field_offset; }
768};
769
770class ClassFieldMap: public CHeapObj<mtInternal> {
771 private:
772  enum {
773    initial_field_count = 5
774  };
775
776  // list of field descriptors
777  GrowableArray<ClassFieldDescriptor*>* _fields;
778
779  // constructor
780  ClassFieldMap();
781
782  // add a field
783  void add(int index, char type, int offset);
784
785  // returns the field count for the given class
786  static int compute_field_count(instanceKlassHandle ikh);
787
788 public:
789  ~ClassFieldMap();
790
791  // access
792  int field_count()                     { return _fields->length(); }
793  ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
794
795  // functions to create maps of static or instance fields
796  static ClassFieldMap* create_map_of_static_fields(Klass* k);
797  static ClassFieldMap* create_map_of_instance_fields(oop obj);
798};
799
800ClassFieldMap::ClassFieldMap() {
801  _fields = new (ResourceObj::C_HEAP, mtInternal)
802    GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
803}
804
805ClassFieldMap::~ClassFieldMap() {
806  for (int i=0; i<_fields->length(); i++) {
807    delete _fields->at(i);
808  }
809  delete _fields;
810}
811
812void ClassFieldMap::add(int index, char type, int offset) {
813  ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
814  _fields->append(field);
815}
816
817// Returns a heap allocated ClassFieldMap to describe the static fields
818// of the given class.
819//
820ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
821  HandleMark hm;
822  instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
823
824  // create the field map
825  ClassFieldMap* field_map = new ClassFieldMap();
826
827  FilteredFieldStream f(ikh, false, false);
828  int max_field_index = f.field_count()-1;
829
830  int index = 0;
831  for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
832    // ignore instance fields
833    if (!fld.access_flags().is_static()) {
834      continue;
835    }
836    field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
837  }
838  return field_map;
839}
840
841// Returns a heap allocated ClassFieldMap to describe the instance fields
842// of the given class. All instance fields are included (this means public
843// and private fields declared in superclasses and superinterfaces too).
844//
845ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
846  HandleMark hm;
847  instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
848
849  // create the field map
850  ClassFieldMap* field_map = new ClassFieldMap();
851
852  FilteredFieldStream f(ikh, false, false);
853
854  int max_field_index = f.field_count()-1;
855
856  int index = 0;
857  for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
858    // ignore static fields
859    if (fld.access_flags().is_static()) {
860      continue;
861    }
862    field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
863  }
864
865  return field_map;
866}
867
868// Helper class used to cache a ClassFileMap for the instance fields of
869// a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
870// heap iteration and avoid creating a field map for each object in the heap
871// (only need to create the map when the first instance of a class is encountered).
872//
873class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
874 private:
875   enum {
876     initial_class_count = 200
877   };
878  ClassFieldMap* _field_map;
879
880  ClassFieldMap* field_map() const          { return _field_map; }
881
882  JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
883  ~JvmtiCachedClassFieldMap();
884
885  static GrowableArray<InstanceKlass*>* _class_list;
886  static void add_to_class_list(InstanceKlass* ik);
887
888 public:
889  // returns the field map for a given object (returning map cached
890  // by InstanceKlass if possible
891  static ClassFieldMap* get_map_of_instance_fields(oop obj);
892
893  // removes the field map from all instanceKlasses - should be
894  // called before VM operation completes
895  static void clear_cache();
896
897  // returns the number of ClassFieldMap cached by instanceKlasses
898  static int cached_field_map_count();
899};
900
901GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
902
903JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
904  _field_map = field_map;
905}
906
907JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
908  if (_field_map != NULL) {
909    delete _field_map;
910  }
911}
912
913// Marker class to ensure that the class file map cache is only used in a defined
914// scope.
915class ClassFieldMapCacheMark : public StackObj {
916 private:
917   static bool _is_active;
918 public:
919   ClassFieldMapCacheMark() {
920     assert(Thread::current()->is_VM_thread(), "must be VMThread");
921     assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
922     assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
923     _is_active = true;
924   }
925   ~ClassFieldMapCacheMark() {
926     JvmtiCachedClassFieldMap::clear_cache();
927     _is_active = false;
928   }
929   static bool is_active() { return _is_active; }
930};
931
932bool ClassFieldMapCacheMark::_is_active;
933
934
935// record that the given InstanceKlass is caching a field map
936void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
937  if (_class_list == NULL) {
938    _class_list = new (ResourceObj::C_HEAP, mtInternal)
939      GrowableArray<InstanceKlass*>(initial_class_count, true);
940  }
941  _class_list->push(ik);
942}
943
944// returns the instance field map for the given object
945// (returns field map cached by the InstanceKlass if possible)
946ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
947  assert(Thread::current()->is_VM_thread(), "must be VMThread");
948  assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
949
950  Klass* k = obj->klass();
951  InstanceKlass* ik = InstanceKlass::cast(k);
952
953  // return cached map if possible
954  JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
955  if (cached_map != NULL) {
956    assert(cached_map->field_map() != NULL, "missing field list");
957    return cached_map->field_map();
958  } else {
959    ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
960    cached_map = new JvmtiCachedClassFieldMap(field_map);
961    ik->set_jvmti_cached_class_field_map(cached_map);
962    add_to_class_list(ik);
963    return field_map;
964  }
965}
966
967// remove the fields maps cached from all instanceKlasses
968void JvmtiCachedClassFieldMap::clear_cache() {
969  assert(Thread::current()->is_VM_thread(), "must be VMThread");
970  if (_class_list != NULL) {
971    for (int i = 0; i < _class_list->length(); i++) {
972      InstanceKlass* ik = _class_list->at(i);
973      JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
974      assert(cached_map != NULL, "should not be NULL");
975      ik->set_jvmti_cached_class_field_map(NULL);
976      delete cached_map;  // deletes the encapsulated field map
977    }
978    delete _class_list;
979    _class_list = NULL;
980  }
981}
982
983// returns the number of ClassFieldMap cached by instanceKlasses
984int JvmtiCachedClassFieldMap::cached_field_map_count() {
985  return (_class_list == NULL) ? 0 : _class_list->length();
986}
987
988// helper function to indicate if an object is filtered by its tag or class tag
989static inline bool is_filtered_by_heap_filter(jlong obj_tag,
990                                              jlong klass_tag,
991                                              int heap_filter) {
992  // apply the heap filter
993  if (obj_tag != 0) {
994    // filter out tagged objects
995    if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
996  } else {
997    // filter out untagged objects
998    if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
999  }
1000  if (klass_tag != 0) {
1001    // filter out objects with tagged classes
1002    if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
1003  } else {
1004    // filter out objects with untagged classes.
1005    if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
1006  }
1007  return false;
1008}
1009
1010// helper function to indicate if an object is filtered by a klass filter
1011static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
1012  if (!klass_filter.is_null()) {
1013    if (obj->klass() != klass_filter()) {
1014      return true;
1015    }
1016  }
1017  return false;
1018}
1019
1020// helper function to tell if a field is a primitive field or not
1021static inline bool is_primitive_field_type(char type) {
1022  return (type != 'L' && type != '[');
1023}
1024
1025// helper function to copy the value from location addr to jvalue.
1026static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
1027  switch (value_type) {
1028    case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
1029    case JVMTI_PRIMITIVE_TYPE_BYTE    : { v->b = *(jbyte*)addr;    break; }
1030    case JVMTI_PRIMITIVE_TYPE_CHAR    : { v->c = *(jchar*)addr;    break; }
1031    case JVMTI_PRIMITIVE_TYPE_SHORT   : { v->s = *(jshort*)addr;   break; }
1032    case JVMTI_PRIMITIVE_TYPE_INT     : { v->i = *(jint*)addr;     break; }
1033    case JVMTI_PRIMITIVE_TYPE_LONG    : { v->j = *(jlong*)addr;    break; }
1034    case JVMTI_PRIMITIVE_TYPE_FLOAT   : { v->f = *(jfloat*)addr;   break; }
1035    case JVMTI_PRIMITIVE_TYPE_DOUBLE  : { v->d = *(jdouble*)addr;  break; }
1036    default: ShouldNotReachHere();
1037  }
1038}
1039
1040// helper function to invoke string primitive value callback
1041// returns visit control flags
1042static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
1043                                         CallbackWrapper* wrapper,
1044                                         oop str,
1045                                         void* user_data)
1046{
1047  assert(str->klass() == SystemDictionary::String_klass(), "not a string");
1048
1049  typeArrayOop s_value = java_lang_String::value(str);
1050
1051  // JDK-6584008: the value field may be null if a String instance is
1052  // partially constructed.
1053  if (s_value == NULL) {
1054    return 0;
1055  }
1056  // get the string value and length
1057  // (string value may be offset from the base)
1058  int s_len = java_lang_String::length(str);
1059  bool is_latin1 = java_lang_String::is_latin1(str);
1060  jchar* value;
1061  if (s_len > 0) {
1062    if (!is_latin1) {
1063      value = s_value->char_at_addr(0);
1064    } else {
1065      // Inflate latin1 encoded string to UTF16
1066      jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal);
1067      for (int i = 0; i < s_len; i++) {
1068        buf[i] = ((jchar) s_value->byte_at(i)) & 0xff;
1069      }
1070      value = &buf[0];
1071    }
1072  } else {
1073    // Don't use char_at_addr(0) if length is 0
1074    value = (jchar*) s_value->base(T_CHAR);
1075  }
1076
1077  // invoke the callback
1078  jint res = (*cb)(wrapper->klass_tag(),
1079                   wrapper->obj_size(),
1080                   wrapper->obj_tag_p(),
1081                   value,
1082                   (jint)s_len,
1083                   user_data);
1084
1085  if (is_latin1 && s_len > 0) {
1086    FREE_C_HEAP_ARRAY(jchar, value);
1087  }
1088  return res;
1089}
1090
1091// helper function to invoke string primitive value callback
1092// returns visit control flags
1093static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
1094                                                  CallbackWrapper* wrapper,
1095                                                  oop obj,
1096                                                  void* user_data)
1097{
1098  assert(obj->is_typeArray(), "not a primitive array");
1099
1100  // get base address of first element
1101  typeArrayOop array = typeArrayOop(obj);
1102  BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
1103  void* elements = array->base(type);
1104
1105  // jvmtiPrimitiveType is defined so this mapping is always correct
1106  jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
1107
1108  return (*cb)(wrapper->klass_tag(),
1109               wrapper->obj_size(),
1110               wrapper->obj_tag_p(),
1111               (jint)array->length(),
1112               elem_type,
1113               elements,
1114               user_data);
1115}
1116
1117// helper function to invoke the primitive field callback for all static fields
1118// of a given class
1119static jint invoke_primitive_field_callback_for_static_fields
1120  (CallbackWrapper* wrapper,
1121   oop obj,
1122   jvmtiPrimitiveFieldCallback cb,
1123   void* user_data)
1124{
1125  // for static fields only the index will be set
1126  static jvmtiHeapReferenceInfo reference_info = { 0 };
1127
1128  assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
1129  if (java_lang_Class::is_primitive(obj)) {
1130    return 0;
1131  }
1132  Klass* klass = java_lang_Class::as_Klass(obj);
1133
1134  // ignore classes for object and type arrays
1135  if (!klass->is_instance_klass()) {
1136    return 0;
1137  }
1138
1139  // ignore classes which aren't linked yet
1140  InstanceKlass* ik = InstanceKlass::cast(klass);
1141  if (!ik->is_linked()) {
1142    return 0;
1143  }
1144
1145  // get the field map
1146  ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
1147
1148  // invoke the callback for each static primitive field
1149  for (int i=0; i<field_map->field_count(); i++) {
1150    ClassFieldDescriptor* field = field_map->field_at(i);
1151
1152    // ignore non-primitive fields
1153    char type = field->field_type();
1154    if (!is_primitive_field_type(type)) {
1155      continue;
1156    }
1157    // one-to-one mapping
1158    jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1159
1160    // get offset and field value
1161    int offset = field->field_offset();
1162    address addr = (address)klass->java_mirror() + offset;
1163    jvalue value;
1164    copy_to_jvalue(&value, addr, value_type);
1165
1166    // field index
1167    reference_info.field.index = field->field_index();
1168
1169    // invoke the callback
1170    jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1171                     &reference_info,
1172                     wrapper->klass_tag(),
1173                     wrapper->obj_tag_p(),
1174                     value,
1175                     value_type,
1176                     user_data);
1177    if (res & JVMTI_VISIT_ABORT) {
1178      delete field_map;
1179      return res;
1180    }
1181  }
1182
1183  delete field_map;
1184  return 0;
1185}
1186
1187// helper function to invoke the primitive field callback for all instance fields
1188// of a given object
1189static jint invoke_primitive_field_callback_for_instance_fields(
1190  CallbackWrapper* wrapper,
1191  oop obj,
1192  jvmtiPrimitiveFieldCallback cb,
1193  void* user_data)
1194{
1195  // for instance fields only the index will be set
1196  static jvmtiHeapReferenceInfo reference_info = { 0 };
1197
1198  // get the map of the instance fields
1199  ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
1200
1201  // invoke the callback for each instance primitive field
1202  for (int i=0; i<fields->field_count(); i++) {
1203    ClassFieldDescriptor* field = fields->field_at(i);
1204
1205    // ignore non-primitive fields
1206    char type = field->field_type();
1207    if (!is_primitive_field_type(type)) {
1208      continue;
1209    }
1210    // one-to-one mapping
1211    jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1212
1213    // get offset and field value
1214    int offset = field->field_offset();
1215    address addr = (address)obj + offset;
1216    jvalue value;
1217    copy_to_jvalue(&value, addr, value_type);
1218
1219    // field index
1220    reference_info.field.index = field->field_index();
1221
1222    // invoke the callback
1223    jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1224                     &reference_info,
1225                     wrapper->klass_tag(),
1226                     wrapper->obj_tag_p(),
1227                     value,
1228                     value_type,
1229                     user_data);
1230    if (res & JVMTI_VISIT_ABORT) {
1231      return res;
1232    }
1233  }
1234  return 0;
1235}
1236
1237
1238// VM operation to iterate over all objects in the heap (both reachable
1239// and unreachable)
1240class VM_HeapIterateOperation: public VM_Operation {
1241 private:
1242  ObjectClosure* _blk;
1243 public:
1244  VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
1245
1246  VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1247  void doit() {
1248    // allows class files maps to be cached during iteration
1249    ClassFieldMapCacheMark cm;
1250
1251    // make sure that heap is parsable (fills TLABs with filler objects)
1252    Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1253
1254    // Verify heap before iteration - if the heap gets corrupted then
1255    // JVMTI's IterateOverHeap will crash.
1256    if (VerifyBeforeIteration) {
1257      Universe::verify();
1258    }
1259
1260    // do the iteration
1261    // If this operation encounters a bad object when using CMS,
1262    // consider using safe_object_iterate() which avoids perm gen
1263    // objects that may contain bad references.
1264    Universe::heap()->object_iterate(_blk);
1265  }
1266
1267};
1268
1269
1270// An ObjectClosure used to support the deprecated IterateOverHeap and
1271// IterateOverInstancesOfClass functions
1272class IterateOverHeapObjectClosure: public ObjectClosure {
1273 private:
1274  JvmtiTagMap* _tag_map;
1275  KlassHandle _klass;
1276  jvmtiHeapObjectFilter _object_filter;
1277  jvmtiHeapObjectCallback _heap_object_callback;
1278  const void* _user_data;
1279
1280  // accessors
1281  JvmtiTagMap* tag_map() const                    { return _tag_map; }
1282  jvmtiHeapObjectFilter object_filter() const     { return _object_filter; }
1283  jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1284  KlassHandle klass() const                       { return _klass; }
1285  const void* user_data() const                   { return _user_data; }
1286
1287  // indicates if iteration has been aborted
1288  bool _iteration_aborted;
1289  bool is_iteration_aborted() const               { return _iteration_aborted; }
1290  void set_iteration_aborted(bool aborted)        { _iteration_aborted = aborted; }
1291
1292 public:
1293  IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1294                               KlassHandle klass,
1295                               jvmtiHeapObjectFilter object_filter,
1296                               jvmtiHeapObjectCallback heap_object_callback,
1297                               const void* user_data) :
1298    _tag_map(tag_map),
1299    _klass(klass),
1300    _object_filter(object_filter),
1301    _heap_object_callback(heap_object_callback),
1302    _user_data(user_data),
1303    _iteration_aborted(false)
1304  {
1305  }
1306
1307  void do_object(oop o);
1308};
1309
1310// invoked for each object in the heap
1311void IterateOverHeapObjectClosure::do_object(oop o) {
1312  // check if iteration has been halted
1313  if (is_iteration_aborted()) return;
1314
1315  // ignore any objects that aren't visible to profiler
1316  if (!ServiceUtil::visible_oop(o)) return;
1317
1318  // instanceof check when filtering by klass
1319  if (!klass().is_null() && !o->is_a(klass()())) {
1320    return;
1321  }
1322  // prepare for the calllback
1323  CallbackWrapper wrapper(tag_map(), o);
1324
1325  // if the object is tagged and we're only interested in untagged objects
1326  // then don't invoke the callback. Similiarly, if the object is untagged
1327  // and we're only interested in tagged objects we skip the callback.
1328  if (wrapper.obj_tag() != 0) {
1329    if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1330  } else {
1331    if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1332  }
1333
1334  // invoke the agent's callback
1335  jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1336                                                       wrapper.obj_size(),
1337                                                       wrapper.obj_tag_p(),
1338                                                       (void*)user_data());
1339  if (control == JVMTI_ITERATION_ABORT) {
1340    set_iteration_aborted(true);
1341  }
1342}
1343
1344// An ObjectClosure used to support the IterateThroughHeap function
1345class IterateThroughHeapObjectClosure: public ObjectClosure {
1346 private:
1347  JvmtiTagMap* _tag_map;
1348  KlassHandle _klass;
1349  int _heap_filter;
1350  const jvmtiHeapCallbacks* _callbacks;
1351  const void* _user_data;
1352
1353  // accessor functions
1354  JvmtiTagMap* tag_map() const                     { return _tag_map; }
1355  int heap_filter() const                          { return _heap_filter; }
1356  const jvmtiHeapCallbacks* callbacks() const      { return _callbacks; }
1357  KlassHandle klass() const                        { return _klass; }
1358  const void* user_data() const                    { return _user_data; }
1359
1360  // indicates if the iteration has been aborted
1361  bool _iteration_aborted;
1362  bool is_iteration_aborted() const                { return _iteration_aborted; }
1363
1364  // used to check the visit control flags. If the abort flag is set
1365  // then we set the iteration aborted flag so that the iteration completes
1366  // without processing any further objects
1367  bool check_flags_for_abort(jint flags) {
1368    bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1369    if (is_abort) {
1370      _iteration_aborted = true;
1371    }
1372    return is_abort;
1373  }
1374
1375 public:
1376  IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1377                                  KlassHandle klass,
1378                                  int heap_filter,
1379                                  const jvmtiHeapCallbacks* heap_callbacks,
1380                                  const void* user_data) :
1381    _tag_map(tag_map),
1382    _klass(klass),
1383    _heap_filter(heap_filter),
1384    _callbacks(heap_callbacks),
1385    _user_data(user_data),
1386    _iteration_aborted(false)
1387  {
1388  }
1389
1390  void do_object(oop o);
1391};
1392
1393// invoked for each object in the heap
1394void IterateThroughHeapObjectClosure::do_object(oop obj) {
1395  // check if iteration has been halted
1396  if (is_iteration_aborted()) return;
1397
1398  // ignore any objects that aren't visible to profiler
1399  if (!ServiceUtil::visible_oop(obj)) return;
1400
1401  // apply class filter
1402  if (is_filtered_by_klass_filter(obj, klass())) return;
1403
1404  // prepare for callback
1405  CallbackWrapper wrapper(tag_map(), obj);
1406
1407  // check if filtered by the heap filter
1408  if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1409    return;
1410  }
1411
1412  // for arrays we need the length, otherwise -1
1413  bool is_array = obj->is_array();
1414  int len = is_array ? arrayOop(obj)->length() : -1;
1415
1416  // invoke the object callback (if callback is provided)
1417  if (callbacks()->heap_iteration_callback != NULL) {
1418    jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1419    jint res = (*cb)(wrapper.klass_tag(),
1420                     wrapper.obj_size(),
1421                     wrapper.obj_tag_p(),
1422                     (jint)len,
1423                     (void*)user_data());
1424    if (check_flags_for_abort(res)) return;
1425  }
1426
1427  // for objects and classes we report primitive fields if callback provided
1428  if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1429    jint res;
1430    jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1431    if (obj->klass() == SystemDictionary::Class_klass()) {
1432      res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1433                                                                    obj,
1434                                                                    cb,
1435                                                                    (void*)user_data());
1436    } else {
1437      res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1438                                                                      obj,
1439                                                                      cb,
1440                                                                      (void*)user_data());
1441    }
1442    if (check_flags_for_abort(res)) return;
1443  }
1444
1445  // string callback
1446  if (!is_array &&
1447      callbacks()->string_primitive_value_callback != NULL &&
1448      obj->klass() == SystemDictionary::String_klass()) {
1449    jint res = invoke_string_value_callback(
1450                callbacks()->string_primitive_value_callback,
1451                &wrapper,
1452                obj,
1453                (void*)user_data() );
1454    if (check_flags_for_abort(res)) return;
1455  }
1456
1457  // array callback
1458  if (is_array &&
1459      callbacks()->array_primitive_value_callback != NULL &&
1460      obj->is_typeArray()) {
1461    jint res = invoke_array_primitive_value_callback(
1462               callbacks()->array_primitive_value_callback,
1463               &wrapper,
1464               obj,
1465               (void*)user_data() );
1466    if (check_flags_for_abort(res)) return;
1467  }
1468};
1469
1470
1471// Deprecated function to iterate over all objects in the heap
1472void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1473                                    KlassHandle klass,
1474                                    jvmtiHeapObjectCallback heap_object_callback,
1475                                    const void* user_data)
1476{
1477  MutexLocker ml(Heap_lock);
1478  IterateOverHeapObjectClosure blk(this,
1479                                   klass,
1480                                   object_filter,
1481                                   heap_object_callback,
1482                                   user_data);
1483  VM_HeapIterateOperation op(&blk);
1484  VMThread::execute(&op);
1485}
1486
1487
1488// Iterates over all objects in the heap
1489void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1490                                       KlassHandle klass,
1491                                       const jvmtiHeapCallbacks* callbacks,
1492                                       const void* user_data)
1493{
1494  MutexLocker ml(Heap_lock);
1495  IterateThroughHeapObjectClosure blk(this,
1496                                      klass,
1497                                      heap_filter,
1498                                      callbacks,
1499                                      user_data);
1500  VM_HeapIterateOperation op(&blk);
1501  VMThread::execute(&op);
1502}
1503
1504// support class for get_objects_with_tags
1505
1506class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
1507 private:
1508  JvmtiEnv* _env;
1509  jlong* _tags;
1510  jint _tag_count;
1511
1512  GrowableArray<jobject>* _object_results;  // collected objects (JNI weak refs)
1513  GrowableArray<uint64_t>* _tag_results;    // collected tags
1514
1515 public:
1516  TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
1517    _env = env;
1518    _tags = (jlong*)tags;
1519    _tag_count = tag_count;
1520    _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true);
1521    _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true);
1522  }
1523
1524  ~TagObjectCollector() {
1525    delete _object_results;
1526    delete _tag_results;
1527  }
1528
1529  // for each tagged object check if the tag value matches
1530  // - if it matches then we create a JNI local reference to the object
1531  // and record the reference and tag value.
1532  //
1533  void do_entry(JvmtiTagHashmapEntry* entry) {
1534    for (int i=0; i<_tag_count; i++) {
1535      if (_tags[i] == entry->tag()) {
1536        oop o = entry->object();
1537        assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check");
1538#if INCLUDE_ALL_GCS
1539        if (UseG1GC) {
1540          // The reference in this tag map could be the only (implicitly weak)
1541          // reference to that object. If we hand it out, we need to keep it live wrt
1542          // SATB marking similar to other j.l.ref.Reference referents.
1543          G1SATBCardTableModRefBS::enqueue(o);
1544        }
1545#endif
1546        jobject ref = JNIHandles::make_local(JavaThread::current(), o);
1547        _object_results->append(ref);
1548        _tag_results->append((uint64_t)entry->tag());
1549      }
1550    }
1551  }
1552
1553  // return the results from the collection
1554  //
1555  jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1556    jvmtiError error;
1557    int count = _object_results->length();
1558    assert(count >= 0, "sanity check");
1559
1560    // if object_result_ptr is not NULL then allocate the result and copy
1561    // in the object references.
1562    if (object_result_ptr != NULL) {
1563      error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1564      if (error != JVMTI_ERROR_NONE) {
1565        return error;
1566      }
1567      for (int i=0; i<count; i++) {
1568        (*object_result_ptr)[i] = _object_results->at(i);
1569      }
1570    }
1571
1572    // if tag_result_ptr is not NULL then allocate the result and copy
1573    // in the tag values.
1574    if (tag_result_ptr != NULL) {
1575      error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1576      if (error != JVMTI_ERROR_NONE) {
1577        if (object_result_ptr != NULL) {
1578          _env->Deallocate((unsigned char*)object_result_ptr);
1579        }
1580        return error;
1581      }
1582      for (int i=0; i<count; i++) {
1583        (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1584      }
1585    }
1586
1587    *count_ptr = count;
1588    return JVMTI_ERROR_NONE;
1589  }
1590};
1591
1592// return the list of objects with the specified tags
1593jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1594  jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1595
1596  TagObjectCollector collector(env(), tags, count);
1597  {
1598    // iterate over all tagged objects
1599    MutexLocker ml(lock());
1600    entry_iterate(&collector);
1601  }
1602  return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1603}
1604
1605
1606// ObjectMarker is used to support the marking objects when walking the
1607// heap.
1608//
1609// This implementation uses the existing mark bits in an object for
1610// marking. Objects that are marked must later have their headers restored.
1611// As most objects are unlocked and don't have their identity hash computed
1612// we don't have to save their headers. Instead we save the headers that
1613// are "interesting". Later when the headers are restored this implementation
1614// restores all headers to their initial value and then restores the few
1615// objects that had interesting headers.
1616//
1617// Future work: This implementation currently uses growable arrays to save
1618// the oop and header of interesting objects. As an optimization we could
1619// use the same technique as the GC and make use of the unused area
1620// between top() and end().
1621//
1622
1623// An ObjectClosure used to restore the mark bits of an object
1624class RestoreMarksClosure : public ObjectClosure {
1625 public:
1626  void do_object(oop o) {
1627    if (o != NULL) {
1628      markOop mark = o->mark();
1629      if (mark->is_marked()) {
1630        o->init_mark();
1631      }
1632    }
1633  }
1634};
1635
1636// ObjectMarker provides the mark and visited functions
1637class ObjectMarker : AllStatic {
1638 private:
1639  // saved headers
1640  static GrowableArray<oop>* _saved_oop_stack;
1641  static GrowableArray<markOop>* _saved_mark_stack;
1642  static bool _needs_reset;                  // do we need to reset mark bits?
1643
1644 public:
1645  static void init();                       // initialize
1646  static void done();                       // clean-up
1647
1648  static inline void mark(oop o);           // mark an object
1649  static inline bool visited(oop o);        // check if object has been visited
1650
1651  static inline bool needs_reset()            { return _needs_reset; }
1652  static inline void set_needs_reset(bool v)  { _needs_reset = v; }
1653};
1654
1655GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1656GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
1657bool ObjectMarker::_needs_reset = true;  // need to reset mark bits by default
1658
1659// initialize ObjectMarker - prepares for object marking
1660void ObjectMarker::init() {
1661  assert(Thread::current()->is_VM_thread(), "must be VMThread");
1662
1663  // prepare heap for iteration
1664  Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1665
1666  // create stacks for interesting headers
1667  _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true);
1668  _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true);
1669
1670  if (UseBiasedLocking) {
1671    BiasedLocking::preserve_marks();
1672  }
1673}
1674
1675// Object marking is done so restore object headers
1676void ObjectMarker::done() {
1677  // iterate over all objects and restore the mark bits to
1678  // their initial value
1679  RestoreMarksClosure blk;
1680  if (needs_reset()) {
1681    Universe::heap()->object_iterate(&blk);
1682  } else {
1683    // We don't need to reset mark bits on this call, but reset the
1684    // flag to the default for the next call.
1685    set_needs_reset(true);
1686  }
1687
1688  // now restore the interesting headers
1689  for (int i = 0; i < _saved_oop_stack->length(); i++) {
1690    oop o = _saved_oop_stack->at(i);
1691    markOop mark = _saved_mark_stack->at(i);
1692    o->set_mark(mark);
1693  }
1694
1695  if (UseBiasedLocking) {
1696    BiasedLocking::restore_marks();
1697  }
1698
1699  // free the stacks
1700  delete _saved_oop_stack;
1701  delete _saved_mark_stack;
1702}
1703
1704// mark an object
1705inline void ObjectMarker::mark(oop o) {
1706  assert(Universe::heap()->is_in(o), "sanity check");
1707  assert(!o->mark()->is_marked(), "should only mark an object once");
1708
1709  // object's mark word
1710  markOop mark = o->mark();
1711
1712  if (mark->must_be_preserved(o)) {
1713    _saved_mark_stack->push(mark);
1714    _saved_oop_stack->push(o);
1715  }
1716
1717  // mark the object
1718  o->set_mark(markOopDesc::prototype()->set_marked());
1719}
1720
1721// return true if object is marked
1722inline bool ObjectMarker::visited(oop o) {
1723  return o->mark()->is_marked();
1724}
1725
1726// Stack allocated class to help ensure that ObjectMarker is used
1727// correctly. Constructor initializes ObjectMarker, destructor calls
1728// ObjectMarker's done() function to restore object headers.
1729class ObjectMarkerController : public StackObj {
1730 public:
1731  ObjectMarkerController() {
1732    ObjectMarker::init();
1733  }
1734  ~ObjectMarkerController() {
1735    ObjectMarker::done();
1736  }
1737};
1738
1739
1740// helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1741// (not performance critical as only used for roots)
1742static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1743  switch (kind) {
1744    case JVMTI_HEAP_REFERENCE_JNI_GLOBAL:   return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1745    case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1746    case JVMTI_HEAP_REFERENCE_MONITOR:      return JVMTI_HEAP_ROOT_MONITOR;
1747    case JVMTI_HEAP_REFERENCE_STACK_LOCAL:  return JVMTI_HEAP_ROOT_STACK_LOCAL;
1748    case JVMTI_HEAP_REFERENCE_JNI_LOCAL:    return JVMTI_HEAP_ROOT_JNI_LOCAL;
1749    case JVMTI_HEAP_REFERENCE_THREAD:       return JVMTI_HEAP_ROOT_THREAD;
1750    case JVMTI_HEAP_REFERENCE_OTHER:        return JVMTI_HEAP_ROOT_OTHER;
1751    default: ShouldNotReachHere();          return JVMTI_HEAP_ROOT_OTHER;
1752  }
1753}
1754
1755// Base class for all heap walk contexts. The base class maintains a flag
1756// to indicate if the context is valid or not.
1757class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
1758 private:
1759  bool _valid;
1760 public:
1761  HeapWalkContext(bool valid)                   { _valid = valid; }
1762  void invalidate()                             { _valid = false; }
1763  bool is_valid() const                         { return _valid; }
1764};
1765
1766// A basic heap walk context for the deprecated heap walking functions.
1767// The context for a basic heap walk are the callbacks and fields used by
1768// the referrer caching scheme.
1769class BasicHeapWalkContext: public HeapWalkContext {
1770 private:
1771  jvmtiHeapRootCallback _heap_root_callback;
1772  jvmtiStackReferenceCallback _stack_ref_callback;
1773  jvmtiObjectReferenceCallback _object_ref_callback;
1774
1775  // used for caching
1776  oop _last_referrer;
1777  jlong _last_referrer_tag;
1778
1779 public:
1780  BasicHeapWalkContext() : HeapWalkContext(false) { }
1781
1782  BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1783                       jvmtiStackReferenceCallback stack_ref_callback,
1784                       jvmtiObjectReferenceCallback object_ref_callback) :
1785    HeapWalkContext(true),
1786    _heap_root_callback(heap_root_callback),
1787    _stack_ref_callback(stack_ref_callback),
1788    _object_ref_callback(object_ref_callback),
1789    _last_referrer(NULL),
1790    _last_referrer_tag(0) {
1791  }
1792
1793  // accessors
1794  jvmtiHeapRootCallback heap_root_callback() const         { return _heap_root_callback; }
1795  jvmtiStackReferenceCallback stack_ref_callback() const   { return _stack_ref_callback; }
1796  jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback;  }
1797
1798  oop last_referrer() const               { return _last_referrer; }
1799  void set_last_referrer(oop referrer)    { _last_referrer = referrer; }
1800  jlong last_referrer_tag() const         { return _last_referrer_tag; }
1801  void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1802};
1803
1804// The advanced heap walk context for the FollowReferences functions.
1805// The context is the callbacks, and the fields used for filtering.
1806class AdvancedHeapWalkContext: public HeapWalkContext {
1807 private:
1808  jint _heap_filter;
1809  KlassHandle _klass_filter;
1810  const jvmtiHeapCallbacks* _heap_callbacks;
1811
1812 public:
1813  AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1814
1815  AdvancedHeapWalkContext(jint heap_filter,
1816                           KlassHandle klass_filter,
1817                           const jvmtiHeapCallbacks* heap_callbacks) :
1818    HeapWalkContext(true),
1819    _heap_filter(heap_filter),
1820    _klass_filter(klass_filter),
1821    _heap_callbacks(heap_callbacks) {
1822  }
1823
1824  // accessors
1825  jint heap_filter() const         { return _heap_filter; }
1826  KlassHandle klass_filter() const { return _klass_filter; }
1827
1828  const jvmtiHeapReferenceCallback heap_reference_callback() const {
1829    return _heap_callbacks->heap_reference_callback;
1830  };
1831  const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1832    return _heap_callbacks->primitive_field_callback;
1833  }
1834  const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1835    return _heap_callbacks->array_primitive_value_callback;
1836  }
1837  const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1838    return _heap_callbacks->string_primitive_value_callback;
1839  }
1840};
1841
1842// The CallbackInvoker is a class with static functions that the heap walk can call
1843// into to invoke callbacks. It works in one of two modes. The "basic" mode is
1844// used for the deprecated IterateOverReachableObjects functions. The "advanced"
1845// mode is for the newer FollowReferences function which supports a lot of
1846// additional callbacks.
1847class CallbackInvoker : AllStatic {
1848 private:
1849  // heap walk styles
1850  enum { basic, advanced };
1851  static int _heap_walk_type;
1852  static bool is_basic_heap_walk()           { return _heap_walk_type == basic; }
1853  static bool is_advanced_heap_walk()        { return _heap_walk_type == advanced; }
1854
1855  // context for basic style heap walk
1856  static BasicHeapWalkContext _basic_context;
1857  static BasicHeapWalkContext* basic_context() {
1858    assert(_basic_context.is_valid(), "invalid");
1859    return &_basic_context;
1860  }
1861
1862  // context for advanced style heap walk
1863  static AdvancedHeapWalkContext _advanced_context;
1864  static AdvancedHeapWalkContext* advanced_context() {
1865    assert(_advanced_context.is_valid(), "invalid");
1866    return &_advanced_context;
1867  }
1868
1869  // context needed for all heap walks
1870  static JvmtiTagMap* _tag_map;
1871  static const void* _user_data;
1872  static GrowableArray<oop>* _visit_stack;
1873
1874  // accessors
1875  static JvmtiTagMap* tag_map()                        { return _tag_map; }
1876  static const void* user_data()                       { return _user_data; }
1877  static GrowableArray<oop>* visit_stack()             { return _visit_stack; }
1878
1879  // if the object hasn't been visited then push it onto the visit stack
1880  // so that it will be visited later
1881  static inline bool check_for_visit(oop obj) {
1882    if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1883    return true;
1884  }
1885
1886  // invoke basic style callbacks
1887  static inline bool invoke_basic_heap_root_callback
1888    (jvmtiHeapRootKind root_kind, oop obj);
1889  static inline bool invoke_basic_stack_ref_callback
1890    (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1891     int slot, oop obj);
1892  static inline bool invoke_basic_object_reference_callback
1893    (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1894
1895  // invoke advanced style callbacks
1896  static inline bool invoke_advanced_heap_root_callback
1897    (jvmtiHeapReferenceKind ref_kind, oop obj);
1898  static inline bool invoke_advanced_stack_ref_callback
1899    (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1900     jmethodID method, jlocation bci, jint slot, oop obj);
1901  static inline bool invoke_advanced_object_reference_callback
1902    (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1903
1904  // used to report the value of primitive fields
1905  static inline bool report_primitive_field
1906    (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1907
1908 public:
1909  // initialize for basic mode
1910  static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1911                                             GrowableArray<oop>* visit_stack,
1912                                             const void* user_data,
1913                                             BasicHeapWalkContext context);
1914
1915  // initialize for advanced mode
1916  static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1917                                                GrowableArray<oop>* visit_stack,
1918                                                const void* user_data,
1919                                                AdvancedHeapWalkContext context);
1920
1921   // functions to report roots
1922  static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1923  static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1924    jmethodID m, oop o);
1925  static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1926    jmethodID method, jlocation bci, jint slot, oop o);
1927
1928  // functions to report references
1929  static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1930  static inline bool report_class_reference(oop referrer, oop referree);
1931  static inline bool report_class_loader_reference(oop referrer, oop referree);
1932  static inline bool report_signers_reference(oop referrer, oop referree);
1933  static inline bool report_protection_domain_reference(oop referrer, oop referree);
1934  static inline bool report_superclass_reference(oop referrer, oop referree);
1935  static inline bool report_interface_reference(oop referrer, oop referree);
1936  static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1937  static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1938  static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1939  static inline bool report_primitive_array_values(oop array);
1940  static inline bool report_string_value(oop str);
1941  static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1942  static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1943};
1944
1945// statics
1946int CallbackInvoker::_heap_walk_type;
1947BasicHeapWalkContext CallbackInvoker::_basic_context;
1948AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1949JvmtiTagMap* CallbackInvoker::_tag_map;
1950const void* CallbackInvoker::_user_data;
1951GrowableArray<oop>* CallbackInvoker::_visit_stack;
1952
1953// initialize for basic heap walk (IterateOverReachableObjects et al)
1954void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1955                                                     GrowableArray<oop>* visit_stack,
1956                                                     const void* user_data,
1957                                                     BasicHeapWalkContext context) {
1958  _tag_map = tag_map;
1959  _visit_stack = visit_stack;
1960  _user_data = user_data;
1961  _basic_context = context;
1962  _advanced_context.invalidate();       // will trigger assertion if used
1963  _heap_walk_type = basic;
1964}
1965
1966// initialize for advanced heap walk (FollowReferences)
1967void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1968                                                        GrowableArray<oop>* visit_stack,
1969                                                        const void* user_data,
1970                                                        AdvancedHeapWalkContext context) {
1971  _tag_map = tag_map;
1972  _visit_stack = visit_stack;
1973  _user_data = user_data;
1974  _advanced_context = context;
1975  _basic_context.invalidate();      // will trigger assertion if used
1976  _heap_walk_type = advanced;
1977}
1978
1979
1980// invoke basic style heap root callback
1981inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1982  assert(ServiceUtil::visible_oop(obj), "checking");
1983
1984  // if we heap roots should be reported
1985  jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1986  if (cb == NULL) {
1987    return check_for_visit(obj);
1988  }
1989
1990  CallbackWrapper wrapper(tag_map(), obj);
1991  jvmtiIterationControl control = (*cb)(root_kind,
1992                                        wrapper.klass_tag(),
1993                                        wrapper.obj_size(),
1994                                        wrapper.obj_tag_p(),
1995                                        (void*)user_data());
1996  // push root to visit stack when following references
1997  if (control == JVMTI_ITERATION_CONTINUE &&
1998      basic_context()->object_ref_callback() != NULL) {
1999    visit_stack()->push(obj);
2000  }
2001  return control != JVMTI_ITERATION_ABORT;
2002}
2003
2004// invoke basic style stack ref callback
2005inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
2006                                                             jlong thread_tag,
2007                                                             jint depth,
2008                                                             jmethodID method,
2009                                                             jint slot,
2010                                                             oop obj) {
2011  assert(ServiceUtil::visible_oop(obj), "checking");
2012
2013  // if we stack refs should be reported
2014  jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
2015  if (cb == NULL) {
2016    return check_for_visit(obj);
2017  }
2018
2019  CallbackWrapper wrapper(tag_map(), obj);
2020  jvmtiIterationControl control = (*cb)(root_kind,
2021                                        wrapper.klass_tag(),
2022                                        wrapper.obj_size(),
2023                                        wrapper.obj_tag_p(),
2024                                        thread_tag,
2025                                        depth,
2026                                        method,
2027                                        slot,
2028                                        (void*)user_data());
2029  // push root to visit stack when following references
2030  if (control == JVMTI_ITERATION_CONTINUE &&
2031      basic_context()->object_ref_callback() != NULL) {
2032    visit_stack()->push(obj);
2033  }
2034  return control != JVMTI_ITERATION_ABORT;
2035}
2036
2037// invoke basic style object reference callback
2038inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
2039                                                                    oop referrer,
2040                                                                    oop referree,
2041                                                                    jint index) {
2042
2043  assert(ServiceUtil::visible_oop(referrer), "checking");
2044  assert(ServiceUtil::visible_oop(referree), "checking");
2045
2046  BasicHeapWalkContext* context = basic_context();
2047
2048  // callback requires the referrer's tag. If it's the same referrer
2049  // as the last call then we use the cached value.
2050  jlong referrer_tag;
2051  if (referrer == context->last_referrer()) {
2052    referrer_tag = context->last_referrer_tag();
2053  } else {
2054    referrer_tag = tag_for(tag_map(), referrer);
2055  }
2056
2057  // do the callback
2058  CallbackWrapper wrapper(tag_map(), referree);
2059  jvmtiObjectReferenceCallback cb = context->object_ref_callback();
2060  jvmtiIterationControl control = (*cb)(ref_kind,
2061                                        wrapper.klass_tag(),
2062                                        wrapper.obj_size(),
2063                                        wrapper.obj_tag_p(),
2064                                        referrer_tag,
2065                                        index,
2066                                        (void*)user_data());
2067
2068  // record referrer and referrer tag. For self-references record the
2069  // tag value from the callback as this might differ from referrer_tag.
2070  context->set_last_referrer(referrer);
2071  if (referrer == referree) {
2072    context->set_last_referrer_tag(*wrapper.obj_tag_p());
2073  } else {
2074    context->set_last_referrer_tag(referrer_tag);
2075  }
2076
2077  if (control == JVMTI_ITERATION_CONTINUE) {
2078    return check_for_visit(referree);
2079  } else {
2080    return control != JVMTI_ITERATION_ABORT;
2081  }
2082}
2083
2084// invoke advanced style heap root callback
2085inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2086                                                                oop obj) {
2087  assert(ServiceUtil::visible_oop(obj), "checking");
2088
2089  AdvancedHeapWalkContext* context = advanced_context();
2090
2091  // check that callback is provided
2092  jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2093  if (cb == NULL) {
2094    return check_for_visit(obj);
2095  }
2096
2097  // apply class filter
2098  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2099    return check_for_visit(obj);
2100  }
2101
2102  // setup the callback wrapper
2103  CallbackWrapper wrapper(tag_map(), obj);
2104
2105  // apply tag filter
2106  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2107                                 wrapper.klass_tag(),
2108                                 context->heap_filter())) {
2109    return check_for_visit(obj);
2110  }
2111
2112  // for arrays we need the length, otherwise -1
2113  jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2114
2115  // invoke the callback
2116  jint res  = (*cb)(ref_kind,
2117                    NULL, // referrer info
2118                    wrapper.klass_tag(),
2119                    0,    // referrer_class_tag is 0 for heap root
2120                    wrapper.obj_size(),
2121                    wrapper.obj_tag_p(),
2122                    NULL, // referrer_tag_p
2123                    len,
2124                    (void*)user_data());
2125  if (res & JVMTI_VISIT_ABORT) {
2126    return false;// referrer class tag
2127  }
2128  if (res & JVMTI_VISIT_OBJECTS) {
2129    check_for_visit(obj);
2130  }
2131  return true;
2132}
2133
2134// report a reference from a thread stack to an object
2135inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2136                                                                jlong thread_tag,
2137                                                                jlong tid,
2138                                                                int depth,
2139                                                                jmethodID method,
2140                                                                jlocation bci,
2141                                                                jint slot,
2142                                                                oop obj) {
2143  assert(ServiceUtil::visible_oop(obj), "checking");
2144
2145  AdvancedHeapWalkContext* context = advanced_context();
2146
2147  // check that callback is provider
2148  jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2149  if (cb == NULL) {
2150    return check_for_visit(obj);
2151  }
2152
2153  // apply class filter
2154  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2155    return check_for_visit(obj);
2156  }
2157
2158  // setup the callback wrapper
2159  CallbackWrapper wrapper(tag_map(), obj);
2160
2161  // apply tag filter
2162  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2163                                 wrapper.klass_tag(),
2164                                 context->heap_filter())) {
2165    return check_for_visit(obj);
2166  }
2167
2168  // setup the referrer info
2169  jvmtiHeapReferenceInfo reference_info;
2170  reference_info.stack_local.thread_tag = thread_tag;
2171  reference_info.stack_local.thread_id = tid;
2172  reference_info.stack_local.depth = depth;
2173  reference_info.stack_local.method = method;
2174  reference_info.stack_local.location = bci;
2175  reference_info.stack_local.slot = slot;
2176
2177  // for arrays we need the length, otherwise -1
2178  jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2179
2180  // call into the agent
2181  int res = (*cb)(ref_kind,
2182                  &reference_info,
2183                  wrapper.klass_tag(),
2184                  0,    // referrer_class_tag is 0 for heap root (stack)
2185                  wrapper.obj_size(),
2186                  wrapper.obj_tag_p(),
2187                  NULL, // referrer_tag is 0 for root
2188                  len,
2189                  (void*)user_data());
2190
2191  if (res & JVMTI_VISIT_ABORT) {
2192    return false;
2193  }
2194  if (res & JVMTI_VISIT_OBJECTS) {
2195    check_for_visit(obj);
2196  }
2197  return true;
2198}
2199
2200// This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2201// only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
2202#define REF_INFO_MASK  ((1 << JVMTI_HEAP_REFERENCE_FIELD)         \
2203                      | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD)  \
2204                      | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2205                      | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2206                      | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL)   \
2207                      | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2208
2209// invoke the object reference callback to report a reference
2210inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2211                                                                       oop referrer,
2212                                                                       oop obj,
2213                                                                       jint index)
2214{
2215  // field index is only valid field in reference_info
2216  static jvmtiHeapReferenceInfo reference_info = { 0 };
2217
2218  assert(ServiceUtil::visible_oop(referrer), "checking");
2219  assert(ServiceUtil::visible_oop(obj), "checking");
2220
2221  AdvancedHeapWalkContext* context = advanced_context();
2222
2223  // check that callback is provider
2224  jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2225  if (cb == NULL) {
2226    return check_for_visit(obj);
2227  }
2228
2229  // apply class filter
2230  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2231    return check_for_visit(obj);
2232  }
2233
2234  // setup the callback wrapper
2235  TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2236
2237  // apply tag filter
2238  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2239                                 wrapper.klass_tag(),
2240                                 context->heap_filter())) {
2241    return check_for_visit(obj);
2242  }
2243
2244  // field index is only valid field in reference_info
2245  reference_info.field.index = index;
2246
2247  // for arrays we need the length, otherwise -1
2248  jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2249
2250  // invoke the callback
2251  int res = (*cb)(ref_kind,
2252                  (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
2253                  wrapper.klass_tag(),
2254                  wrapper.referrer_klass_tag(),
2255                  wrapper.obj_size(),
2256                  wrapper.obj_tag_p(),
2257                  wrapper.referrer_tag_p(),
2258                  len,
2259                  (void*)user_data());
2260
2261  if (res & JVMTI_VISIT_ABORT) {
2262    return false;
2263  }
2264  if (res & JVMTI_VISIT_OBJECTS) {
2265    check_for_visit(obj);
2266  }
2267  return true;
2268}
2269
2270// report a "simple root"
2271inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
2272  assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2273         kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2274  assert(ServiceUtil::visible_oop(obj), "checking");
2275
2276  if (is_basic_heap_walk()) {
2277    // map to old style root kind
2278    jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2279    return invoke_basic_heap_root_callback(root_kind, obj);
2280  } else {
2281    assert(is_advanced_heap_walk(), "wrong heap walk type");
2282    return invoke_advanced_heap_root_callback(kind, obj);
2283  }
2284}
2285
2286
2287// invoke the primitive array values
2288inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
2289  assert(obj->is_typeArray(), "not a primitive array");
2290
2291  AdvancedHeapWalkContext* context = advanced_context();
2292  assert(context->array_primitive_value_callback() != NULL, "no callback");
2293
2294  // apply class filter
2295  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2296    return true;
2297  }
2298
2299  CallbackWrapper wrapper(tag_map(), obj);
2300
2301  // apply tag filter
2302  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2303                                 wrapper.klass_tag(),
2304                                 context->heap_filter())) {
2305    return true;
2306  }
2307
2308  // invoke the callback
2309  int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2310                                                  &wrapper,
2311                                                  obj,
2312                                                  (void*)user_data());
2313  return (!(res & JVMTI_VISIT_ABORT));
2314}
2315
2316// invoke the string value callback
2317inline bool CallbackInvoker::report_string_value(oop str) {
2318  assert(str->klass() == SystemDictionary::String_klass(), "not a string");
2319
2320  AdvancedHeapWalkContext* context = advanced_context();
2321  assert(context->string_primitive_value_callback() != NULL, "no callback");
2322
2323  // apply class filter
2324  if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2325    return true;
2326  }
2327
2328  CallbackWrapper wrapper(tag_map(), str);
2329
2330  // apply tag filter
2331  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2332                                 wrapper.klass_tag(),
2333                                 context->heap_filter())) {
2334    return true;
2335  }
2336
2337  // invoke the callback
2338  int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2339                                         &wrapper,
2340                                         str,
2341                                         (void*)user_data());
2342  return (!(res & JVMTI_VISIT_ABORT));
2343}
2344
2345// invoke the primitive field callback
2346inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2347                                                    oop obj,
2348                                                    jint index,
2349                                                    address addr,
2350                                                    char type)
2351{
2352  // for primitive fields only the index will be set
2353  static jvmtiHeapReferenceInfo reference_info = { 0 };
2354
2355  AdvancedHeapWalkContext* context = advanced_context();
2356  assert(context->primitive_field_callback() != NULL, "no callback");
2357
2358  // apply class filter
2359  if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2360    return true;
2361  }
2362
2363  CallbackWrapper wrapper(tag_map(), obj);
2364
2365  // apply tag filter
2366  if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2367                                 wrapper.klass_tag(),
2368                                 context->heap_filter())) {
2369    return true;
2370  }
2371
2372  // the field index in the referrer
2373  reference_info.field.index = index;
2374
2375  // map the type
2376  jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2377
2378  // setup the jvalue
2379  jvalue value;
2380  copy_to_jvalue(&value, addr, value_type);
2381
2382  jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2383  int res = (*cb)(ref_kind,
2384                  &reference_info,
2385                  wrapper.klass_tag(),
2386                  wrapper.obj_tag_p(),
2387                  value,
2388                  value_type,
2389                  (void*)user_data());
2390  return (!(res & JVMTI_VISIT_ABORT));
2391}
2392
2393
2394// instance field
2395inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2396                                                             jint index,
2397                                                             address value,
2398                                                             char type) {
2399  return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2400                                obj,
2401                                index,
2402                                value,
2403                                type);
2404}
2405
2406// static field
2407inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2408                                                           jint index,
2409                                                           address value,
2410                                                           char type) {
2411  return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2412                                obj,
2413                                index,
2414                                value,
2415                                type);
2416}
2417
2418// report a JNI local (root object) to the profiler
2419inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2420  if (is_basic_heap_walk()) {
2421    return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2422                                           thread_tag,
2423                                           depth,
2424                                           m,
2425                                           -1,
2426                                           obj);
2427  } else {
2428    return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2429                                              thread_tag, tid,
2430                                              depth,
2431                                              m,
2432                                              (jlocation)-1,
2433                                              -1,
2434                                              obj);
2435  }
2436}
2437
2438
2439// report a local (stack reference, root object)
2440inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2441                                                   jlong tid,
2442                                                   jint depth,
2443                                                   jmethodID method,
2444                                                   jlocation bci,
2445                                                   jint slot,
2446                                                   oop obj) {
2447  if (is_basic_heap_walk()) {
2448    return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2449                                           thread_tag,
2450                                           depth,
2451                                           method,
2452                                           slot,
2453                                           obj);
2454  } else {
2455    return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2456                                              thread_tag,
2457                                              tid,
2458                                              depth,
2459                                              method,
2460                                              bci,
2461                                              slot,
2462                                              obj);
2463  }
2464}
2465
2466// report an object referencing a class.
2467inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2468  if (is_basic_heap_walk()) {
2469    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2470  } else {
2471    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2472  }
2473}
2474
2475// report a class referencing its class loader.
2476inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2477  if (is_basic_heap_walk()) {
2478    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2479  } else {
2480    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2481  }
2482}
2483
2484// report a class referencing its signers.
2485inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2486  if (is_basic_heap_walk()) {
2487    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2488  } else {
2489    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2490  }
2491}
2492
2493// report a class referencing its protection domain..
2494inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2495  if (is_basic_heap_walk()) {
2496    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2497  } else {
2498    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2499  }
2500}
2501
2502// report a class referencing its superclass.
2503inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2504  if (is_basic_heap_walk()) {
2505    // Send this to be consistent with past implementation
2506    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2507  } else {
2508    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2509  }
2510}
2511
2512// report a class referencing one of its interfaces.
2513inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2514  if (is_basic_heap_walk()) {
2515    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2516  } else {
2517    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2518  }
2519}
2520
2521// report a class referencing one of its static fields.
2522inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2523  if (is_basic_heap_walk()) {
2524    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2525  } else {
2526    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2527  }
2528}
2529
2530// report an array referencing an element object
2531inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2532  if (is_basic_heap_walk()) {
2533    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2534  } else {
2535    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2536  }
2537}
2538
2539// report an object referencing an instance field object
2540inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2541  if (is_basic_heap_walk()) {
2542    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2543  } else {
2544    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2545  }
2546}
2547
2548// report an array referencing an element object
2549inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2550  if (is_basic_heap_walk()) {
2551    return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2552  } else {
2553    return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2554  }
2555}
2556
2557// A supporting closure used to process simple roots
2558class SimpleRootsClosure : public OopClosure {
2559 private:
2560  jvmtiHeapReferenceKind _kind;
2561  bool _continue;
2562
2563  jvmtiHeapReferenceKind root_kind()    { return _kind; }
2564
2565 public:
2566  void set_kind(jvmtiHeapReferenceKind kind) {
2567    _kind = kind;
2568    _continue = true;
2569  }
2570
2571  inline bool stopped() {
2572    return !_continue;
2573  }
2574
2575  void do_oop(oop* obj_p) {
2576    // iteration has terminated
2577    if (stopped()) {
2578      return;
2579    }
2580
2581    // ignore null or deleted handles
2582    oop o = *obj_p;
2583    if (o == NULL || o == JNIHandles::deleted_handle()) {
2584      return;
2585    }
2586
2587    assert(Universe::heap()->is_in_reserved(o), "should be impossible");
2588
2589    jvmtiHeapReferenceKind kind = root_kind();
2590    if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
2591      // SystemDictionary::always_strong_oops_do reports the application
2592      // class loader as a root. We want this root to be reported as
2593      // a root kind of "OTHER" rather than "SYSTEM_CLASS".
2594      if (!o->is_instance() || !InstanceKlass::cast(o->klass())->is_mirror_instance_klass()) {
2595        kind = JVMTI_HEAP_REFERENCE_OTHER;
2596      }
2597    }
2598
2599    // some objects are ignored - in the case of simple
2600    // roots it's mostly Symbol*s that we are skipping
2601    // here.
2602    if (!ServiceUtil::visible_oop(o)) {
2603      return;
2604    }
2605
2606    // invoke the callback
2607    _continue = CallbackInvoker::report_simple_root(kind, o);
2608
2609  }
2610  virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2611};
2612
2613// A supporting closure used to process JNI locals
2614class JNILocalRootsClosure : public OopClosure {
2615 private:
2616  jlong _thread_tag;
2617  jlong _tid;
2618  jint _depth;
2619  jmethodID _method;
2620  bool _continue;
2621 public:
2622  void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2623    _thread_tag = thread_tag;
2624    _tid = tid;
2625    _depth = depth;
2626    _method = method;
2627    _continue = true;
2628  }
2629
2630  inline bool stopped() {
2631    return !_continue;
2632  }
2633
2634  void do_oop(oop* obj_p) {
2635    // iteration has terminated
2636    if (stopped()) {
2637      return;
2638    }
2639
2640    // ignore null or deleted handles
2641    oop o = *obj_p;
2642    if (o == NULL || o == JNIHandles::deleted_handle()) {
2643      return;
2644    }
2645
2646    if (!ServiceUtil::visible_oop(o)) {
2647      return;
2648    }
2649
2650    // invoke the callback
2651    _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2652  }
2653  virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2654};
2655
2656
2657// A VM operation to iterate over objects that are reachable from
2658// a set of roots or an initial object.
2659//
2660// For VM_HeapWalkOperation the set of roots used is :-
2661//
2662// - All JNI global references
2663// - All inflated monitors
2664// - All classes loaded by the boot class loader (or all classes
2665//     in the event that class unloading is disabled)
2666// - All java threads
2667// - For each java thread then all locals and JNI local references
2668//      on the thread's execution stack
2669// - All visible/explainable objects from Universes::oops_do
2670//
2671class VM_HeapWalkOperation: public VM_Operation {
2672 private:
2673  enum {
2674    initial_visit_stack_size = 4000
2675  };
2676
2677  bool _is_advanced_heap_walk;                      // indicates FollowReferences
2678  JvmtiTagMap* _tag_map;
2679  Handle _initial_object;
2680  GrowableArray<oop>* _visit_stack;                 // the visit stack
2681
2682  bool _collecting_heap_roots;                      // are we collecting roots
2683  bool _following_object_refs;                      // are we following object references
2684
2685  bool _reporting_primitive_fields;                 // optional reporting
2686  bool _reporting_primitive_array_values;
2687  bool _reporting_string_values;
2688
2689  GrowableArray<oop>* create_visit_stack() {
2690    return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true);
2691  }
2692
2693  // accessors
2694  bool is_advanced_heap_walk() const               { return _is_advanced_heap_walk; }
2695  JvmtiTagMap* tag_map() const                     { return _tag_map; }
2696  Handle initial_object() const                    { return _initial_object; }
2697
2698  bool is_following_references() const             { return _following_object_refs; }
2699
2700  bool is_reporting_primitive_fields()  const      { return _reporting_primitive_fields; }
2701  bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2702  bool is_reporting_string_values() const          { return _reporting_string_values; }
2703
2704  GrowableArray<oop>* visit_stack() const          { return _visit_stack; }
2705
2706  // iterate over the various object types
2707  inline bool iterate_over_array(oop o);
2708  inline bool iterate_over_type_array(oop o);
2709  inline bool iterate_over_class(oop o);
2710  inline bool iterate_over_object(oop o);
2711
2712  // root collection
2713  inline bool collect_simple_roots();
2714  inline bool collect_stack_roots();
2715  inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2716
2717  // visit an object
2718  inline bool visit(oop o);
2719
2720 public:
2721  VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2722                       Handle initial_object,
2723                       BasicHeapWalkContext callbacks,
2724                       const void* user_data);
2725
2726  VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2727                       Handle initial_object,
2728                       AdvancedHeapWalkContext callbacks,
2729                       const void* user_data);
2730
2731  ~VM_HeapWalkOperation();
2732
2733  VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2734  void doit();
2735};
2736
2737
2738VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2739                                           Handle initial_object,
2740                                           BasicHeapWalkContext callbacks,
2741                                           const void* user_data) {
2742  _is_advanced_heap_walk = false;
2743  _tag_map = tag_map;
2744  _initial_object = initial_object;
2745  _following_object_refs = (callbacks.object_ref_callback() != NULL);
2746  _reporting_primitive_fields = false;
2747  _reporting_primitive_array_values = false;
2748  _reporting_string_values = false;
2749  _visit_stack = create_visit_stack();
2750
2751
2752  CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2753}
2754
2755VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2756                                           Handle initial_object,
2757                                           AdvancedHeapWalkContext callbacks,
2758                                           const void* user_data) {
2759  _is_advanced_heap_walk = true;
2760  _tag_map = tag_map;
2761  _initial_object = initial_object;
2762  _following_object_refs = true;
2763  _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2764  _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2765  _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2766  _visit_stack = create_visit_stack();
2767
2768  CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2769}
2770
2771VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2772  if (_following_object_refs) {
2773    assert(_visit_stack != NULL, "checking");
2774    delete _visit_stack;
2775    _visit_stack = NULL;
2776  }
2777}
2778
2779// an array references its class and has a reference to
2780// each element in the array
2781inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2782  objArrayOop array = objArrayOop(o);
2783
2784  // array reference to its class
2785  oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2786  if (!CallbackInvoker::report_class_reference(o, mirror)) {
2787    return false;
2788  }
2789
2790  // iterate over the array and report each reference to a
2791  // non-null element
2792  for (int index=0; index<array->length(); index++) {
2793    oop elem = array->obj_at(index);
2794    if (elem == NULL) {
2795      continue;
2796    }
2797
2798    // report the array reference o[index] = elem
2799    if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2800      return false;
2801    }
2802  }
2803  return true;
2804}
2805
2806// a type array references its class
2807inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2808  Klass* k = o->klass();
2809  oop mirror = k->java_mirror();
2810  if (!CallbackInvoker::report_class_reference(o, mirror)) {
2811    return false;
2812  }
2813
2814  // report the array contents if required
2815  if (is_reporting_primitive_array_values()) {
2816    if (!CallbackInvoker::report_primitive_array_values(o)) {
2817      return false;
2818    }
2819  }
2820  return true;
2821}
2822
2823#ifdef ASSERT
2824// verify that a static oop field is in range
2825static inline bool verify_static_oop(InstanceKlass* ik,
2826                                     oop mirror, int offset) {
2827  address obj_p = (address)mirror + offset;
2828  address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2829  address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2830  assert(end >= start, "sanity check");
2831
2832  if (obj_p >= start && obj_p < end) {
2833    return true;
2834  } else {
2835    return false;
2836  }
2837}
2838#endif // #ifdef ASSERT
2839
2840// a class references its super class, interfaces, class loader, ...
2841// and finally its static fields
2842inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2843  int i;
2844  Klass* klass = java_lang_Class::as_Klass(java_class);
2845
2846  if (klass->is_instance_klass()) {
2847    InstanceKlass* ik = InstanceKlass::cast(klass);
2848
2849    // Ignore the class if it hasn't been initialized yet
2850    if (!ik->is_linked()) {
2851      return true;
2852    }
2853
2854    // get the java mirror
2855    oop mirror = klass->java_mirror();
2856
2857    // super (only if something more interesting than java.lang.Object)
2858    Klass* java_super = ik->java_super();
2859    if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
2860      oop super = java_super->java_mirror();
2861      if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2862        return false;
2863      }
2864    }
2865
2866    // class loader
2867    oop cl = ik->class_loader();
2868    if (cl != NULL) {
2869      if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2870        return false;
2871      }
2872    }
2873
2874    // protection domain
2875    oop pd = ik->protection_domain();
2876    if (pd != NULL) {
2877      if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2878        return false;
2879      }
2880    }
2881
2882    // signers
2883    oop signers = ik->signers();
2884    if (signers != NULL) {
2885      if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2886        return false;
2887      }
2888    }
2889
2890    // references from the constant pool
2891    {
2892      ConstantPool* pool = ik->constants();
2893      for (int i = 1; i < pool->length(); i++) {
2894        constantTag tag = pool->tag_at(i).value();
2895        if (tag.is_string() || tag.is_klass()) {
2896          oop entry;
2897          if (tag.is_string()) {
2898            entry = pool->resolved_string_at(i);
2899            // If the entry is non-null it is resolved.
2900            if (entry == NULL) continue;
2901          } else {
2902            entry = pool->resolved_klass_at(i)->java_mirror();
2903          }
2904          if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2905            return false;
2906          }
2907        }
2908      }
2909    }
2910
2911    // interfaces
2912    // (These will already have been reported as references from the constant pool
2913    //  but are specified by IterateOverReachableObjects and must be reported).
2914    Array<Klass*>* interfaces = ik->local_interfaces();
2915    for (i = 0; i < interfaces->length(); i++) {
2916      oop interf = ((Klass*)interfaces->at(i))->java_mirror();
2917      if (interf == NULL) {
2918        continue;
2919      }
2920      if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2921        return false;
2922      }
2923    }
2924
2925    // iterate over the static fields
2926
2927    ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2928    for (i=0; i<field_map->field_count(); i++) {
2929      ClassFieldDescriptor* field = field_map->field_at(i);
2930      char type = field->field_type();
2931      if (!is_primitive_field_type(type)) {
2932        oop fld_o = mirror->obj_field(field->field_offset());
2933        assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2934        if (fld_o != NULL) {
2935          int slot = field->field_index();
2936          if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2937            delete field_map;
2938            return false;
2939          }
2940        }
2941      } else {
2942         if (is_reporting_primitive_fields()) {
2943           address addr = (address)mirror + field->field_offset();
2944           int slot = field->field_index();
2945           if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2946             delete field_map;
2947             return false;
2948          }
2949        }
2950      }
2951    }
2952    delete field_map;
2953
2954    return true;
2955  }
2956
2957  return true;
2958}
2959
2960// an object references a class and its instance fields
2961// (static fields are ignored here as we report these as
2962// references from the class).
2963inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2964  // reference to the class
2965  if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2966    return false;
2967  }
2968
2969  // iterate over instance fields
2970  ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2971  for (int i=0; i<field_map->field_count(); i++) {
2972    ClassFieldDescriptor* field = field_map->field_at(i);
2973    char type = field->field_type();
2974    if (!is_primitive_field_type(type)) {
2975      oop fld_o = o->obj_field(field->field_offset());
2976      // ignore any objects that aren't visible to profiler
2977      if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) {
2978        assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not "
2979               "have references to Klass* anymore");
2980        int slot = field->field_index();
2981        if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2982          return false;
2983        }
2984      }
2985    } else {
2986      if (is_reporting_primitive_fields()) {
2987        // primitive instance field
2988        address addr = (address)o + field->field_offset();
2989        int slot = field->field_index();
2990        if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2991          return false;
2992        }
2993      }
2994    }
2995  }
2996
2997  // if the object is a java.lang.String
2998  if (is_reporting_string_values() &&
2999      o->klass() == SystemDictionary::String_klass()) {
3000    if (!CallbackInvoker::report_string_value(o)) {
3001      return false;
3002    }
3003  }
3004  return true;
3005}
3006
3007
3008// Collects all simple (non-stack) roots except for threads;
3009// threads are handled in collect_stack_roots() as an optimization.
3010// if there's a heap root callback provided then the callback is
3011// invoked for each simple root.
3012// if an object reference callback is provided then all simple
3013// roots are pushed onto the marking stack so that they can be
3014// processed later
3015//
3016inline bool VM_HeapWalkOperation::collect_simple_roots() {
3017  SimpleRootsClosure blk;
3018
3019  // JNI globals
3020  blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
3021  JNIHandles::oops_do(&blk);
3022  if (blk.stopped()) {
3023    return false;
3024  }
3025
3026  // Preloaded classes and loader from the system dictionary
3027  blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
3028  SystemDictionary::always_strong_oops_do(&blk);
3029  KlassToOopClosure klass_blk(&blk);
3030  ClassLoaderDataGraph::always_strong_oops_do(&blk, &klass_blk, false);
3031  if (blk.stopped()) {
3032    return false;
3033  }
3034
3035  // Inflated monitors
3036  blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
3037  ObjectSynchronizer::oops_do(&blk);
3038  if (blk.stopped()) {
3039    return false;
3040  }
3041
3042  // threads are now handled in collect_stack_roots()
3043
3044  // Other kinds of roots maintained by HotSpot
3045  // Many of these won't be visible but others (such as instances of important
3046  // exceptions) will be visible.
3047  blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3048  Universe::oops_do(&blk);
3049
3050  // If there are any non-perm roots in the code cache, visit them.
3051  blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3052  CodeBlobToOopClosure look_in_blobs(&blk, !CodeBlobToOopClosure::FixRelocations);
3053  CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
3054
3055  return true;
3056}
3057
3058// Walk the stack of a given thread and find all references (locals
3059// and JNI calls) and report these as stack references
3060inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
3061                                                      JNILocalRootsClosure* blk)
3062{
3063  oop threadObj = java_thread->threadObj();
3064  assert(threadObj != NULL, "sanity check");
3065
3066  // only need to get the thread's tag once per thread
3067  jlong thread_tag = tag_for(_tag_map, threadObj);
3068
3069  // also need the thread id
3070  jlong tid = java_lang_Thread::thread_id(threadObj);
3071
3072
3073  if (java_thread->has_last_Java_frame()) {
3074
3075    // vframes are resource allocated
3076    Thread* current_thread = Thread::current();
3077    ResourceMark rm(current_thread);
3078    HandleMark hm(current_thread);
3079
3080    RegisterMap reg_map(java_thread);
3081    frame f = java_thread->last_frame();
3082    vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
3083
3084    bool is_top_frame = true;
3085    int depth = 0;
3086    frame* last_entry_frame = NULL;
3087
3088    while (vf != NULL) {
3089      if (vf->is_java_frame()) {
3090
3091        // java frame (interpreted, compiled, ...)
3092        javaVFrame *jvf = javaVFrame::cast(vf);
3093
3094        // the jmethodID
3095        jmethodID method = jvf->method()->jmethod_id();
3096
3097        if (!(jvf->method()->is_native())) {
3098          jlocation bci = (jlocation)jvf->bci();
3099          StackValueCollection* locals = jvf->locals();
3100          for (int slot=0; slot<locals->size(); slot++) {
3101            if (locals->at(slot)->type() == T_OBJECT) {
3102              oop o = locals->obj_at(slot)();
3103              if (o == NULL) {
3104                continue;
3105              }
3106
3107              // stack reference
3108              if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3109                                                   bci, slot, o)) {
3110                return false;
3111              }
3112            }
3113          }
3114
3115          StackValueCollection* exprs = jvf->expressions();
3116          for (int index=0; index < exprs->size(); index++) {
3117            if (exprs->at(index)->type() == T_OBJECT) {
3118              oop o = exprs->obj_at(index)();
3119              if (o == NULL) {
3120                continue;
3121              }
3122
3123              // stack reference
3124              if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3125                                                   bci, locals->size() + index, o)) {
3126                return false;
3127              }
3128            }
3129          }
3130
3131          // Follow oops from compiled nmethod
3132          if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) {
3133            blk->set_context(thread_tag, tid, depth, method);
3134            jvf->cb()->as_nmethod()->oops_do(blk);
3135          }
3136        } else {
3137          blk->set_context(thread_tag, tid, depth, method);
3138          if (is_top_frame) {
3139            // JNI locals for the top frame.
3140            java_thread->active_handles()->oops_do(blk);
3141          } else {
3142            if (last_entry_frame != NULL) {
3143              // JNI locals for the entry frame
3144              assert(last_entry_frame->is_entry_frame(), "checking");
3145              last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
3146            }
3147          }
3148        }
3149        last_entry_frame = NULL;
3150        depth++;
3151      } else {
3152        // externalVFrame - for an entry frame then we report the JNI locals
3153        // when we find the corresponding javaVFrame
3154        frame* fr = vf->frame_pointer();
3155        assert(fr != NULL, "sanity check");
3156        if (fr->is_entry_frame()) {
3157          last_entry_frame = fr;
3158        }
3159      }
3160
3161      vf = vf->sender();
3162      is_top_frame = false;
3163    }
3164  } else {
3165    // no last java frame but there may be JNI locals
3166    blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
3167    java_thread->active_handles()->oops_do(blk);
3168  }
3169  return true;
3170}
3171
3172
3173// Collects the simple roots for all threads and collects all
3174// stack roots - for each thread it walks the execution
3175// stack to find all references and local JNI refs.
3176inline bool VM_HeapWalkOperation::collect_stack_roots() {
3177  JNILocalRootsClosure blk;
3178  for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
3179    oop threadObj = thread->threadObj();
3180    if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3181      // Collect the simple root for this thread before we
3182      // collect its stack roots
3183      if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
3184                                               threadObj)) {
3185        return false;
3186      }
3187      if (!collect_stack_roots(thread, &blk)) {
3188        return false;
3189      }
3190    }
3191  }
3192  return true;
3193}
3194
3195// visit an object
3196// first mark the object as visited
3197// second get all the outbound references from this object (in other words, all
3198// the objects referenced by this object).
3199//
3200bool VM_HeapWalkOperation::visit(oop o) {
3201  // mark object as visited
3202  assert(!ObjectMarker::visited(o), "can't visit same object more than once");
3203  ObjectMarker::mark(o);
3204
3205  // instance
3206  if (o->is_instance()) {
3207    if (o->klass() == SystemDictionary::Class_klass()) {
3208      if (!java_lang_Class::is_primitive(o)) {
3209        // a java.lang.Class
3210        return iterate_over_class(o);
3211      }
3212    } else {
3213      return iterate_over_object(o);
3214    }
3215  }
3216
3217  // object array
3218  if (o->is_objArray()) {
3219    return iterate_over_array(o);
3220  }
3221
3222  // type array
3223  if (o->is_typeArray()) {
3224    return iterate_over_type_array(o);
3225  }
3226
3227  return true;
3228}
3229
3230void VM_HeapWalkOperation::doit() {
3231  ResourceMark rm;
3232  ObjectMarkerController marker;
3233  ClassFieldMapCacheMark cm;
3234
3235  assert(visit_stack()->is_empty(), "visit stack must be empty");
3236
3237  // the heap walk starts with an initial object or the heap roots
3238  if (initial_object().is_null()) {
3239    // If either collect_stack_roots() or collect_simple_roots()
3240    // returns false at this point, then there are no mark bits
3241    // to reset.
3242    ObjectMarker::set_needs_reset(false);
3243
3244    // Calling collect_stack_roots() before collect_simple_roots()
3245    // can result in a big performance boost for an agent that is
3246    // focused on analyzing references in the thread stacks.
3247    if (!collect_stack_roots()) return;
3248
3249    if (!collect_simple_roots()) return;
3250
3251    // no early return so enable heap traversal to reset the mark bits
3252    ObjectMarker::set_needs_reset(true);
3253  } else {
3254    visit_stack()->push(initial_object()());
3255  }
3256
3257  // object references required
3258  if (is_following_references()) {
3259
3260    // visit each object until all reachable objects have been
3261    // visited or the callback asked to terminate the iteration.
3262    while (!visit_stack()->is_empty()) {
3263      oop o = visit_stack()->pop();
3264      if (!ObjectMarker::visited(o)) {
3265        if (!visit(o)) {
3266          break;
3267        }
3268      }
3269    }
3270  }
3271}
3272
3273// iterate over all objects that are reachable from a set of roots
3274void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3275                                                 jvmtiStackReferenceCallback stack_ref_callback,
3276                                                 jvmtiObjectReferenceCallback object_ref_callback,
3277                                                 const void* user_data) {
3278  MutexLocker ml(Heap_lock);
3279  BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3280  VM_HeapWalkOperation op(this, Handle(), context, user_data);
3281  VMThread::execute(&op);
3282}
3283
3284// iterate over all objects that are reachable from a given object
3285void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3286                                                             jvmtiObjectReferenceCallback object_ref_callback,
3287                                                             const void* user_data) {
3288  oop obj = JNIHandles::resolve(object);
3289  Handle initial_object(Thread::current(), obj);
3290
3291  MutexLocker ml(Heap_lock);
3292  BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
3293  VM_HeapWalkOperation op(this, initial_object, context, user_data);
3294  VMThread::execute(&op);
3295}
3296
3297// follow references from an initial object or the GC roots
3298void JvmtiTagMap::follow_references(jint heap_filter,
3299                                    KlassHandle klass,
3300                                    jobject object,
3301                                    const jvmtiHeapCallbacks* callbacks,
3302                                    const void* user_data)
3303{
3304  oop obj = JNIHandles::resolve(object);
3305  Handle initial_object(Thread::current(), obj);
3306
3307  MutexLocker ml(Heap_lock);
3308  AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3309  VM_HeapWalkOperation op(this, initial_object, context, user_data);
3310  VMThread::execute(&op);
3311}
3312
3313
3314void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
3315  // No locks during VM bring-up (0 threads) and no safepoints after main
3316  // thread creation and before VMThread creation (1 thread); initial GC
3317  // verification can happen in that window which gets to here.
3318  assert(Threads::number_of_threads() <= 1 ||
3319         SafepointSynchronize::is_at_safepoint(),
3320         "must be executed at a safepoint");
3321  if (JvmtiEnv::environments_might_exist()) {
3322    JvmtiEnvIterator it;
3323    for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3324      JvmtiTagMap* tag_map = env->tag_map();
3325      if (tag_map != NULL && !tag_map->is_empty()) {
3326        tag_map->do_weak_oops(is_alive, f);
3327      }
3328    }
3329  }
3330}
3331
3332void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) {
3333
3334  // does this environment have the OBJECT_FREE event enabled
3335  bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
3336
3337  // counters used for trace message
3338  int freed = 0;
3339  int moved = 0;
3340
3341  JvmtiTagHashmap* hashmap = this->hashmap();
3342
3343  // reenable sizing (if disabled)
3344  hashmap->set_resizing_enabled(true);
3345
3346  // if the hashmap is empty then we can skip it
3347  if (hashmap->_entry_count == 0) {
3348    return;
3349  }
3350
3351  // now iterate through each entry in the table
3352
3353  JvmtiTagHashmapEntry** table = hashmap->table();
3354  int size = hashmap->size();
3355
3356  JvmtiTagHashmapEntry* delayed_add = NULL;
3357
3358  for (int pos = 0; pos < size; ++pos) {
3359    JvmtiTagHashmapEntry* entry = table[pos];
3360    JvmtiTagHashmapEntry* prev = NULL;
3361
3362    while (entry != NULL) {
3363      JvmtiTagHashmapEntry* next = entry->next();
3364
3365      oop* obj = entry->object_addr();
3366
3367      // has object been GC'ed
3368      if (!is_alive->do_object_b(entry->object())) {
3369        // grab the tag
3370        jlong tag = entry->tag();
3371        guarantee(tag != 0, "checking");
3372
3373        // remove GC'ed entry from hashmap and return the
3374        // entry to the free list
3375        hashmap->remove(prev, pos, entry);
3376        destroy_entry(entry);
3377
3378        // post the event to the profiler
3379        if (post_object_free) {
3380          JvmtiExport::post_object_free(env(), tag);
3381        }
3382
3383        ++freed;
3384      } else {
3385        f->do_oop(entry->object_addr());
3386        oop new_oop = entry->object();
3387
3388        // if the object has moved then re-hash it and move its
3389        // entry to its new location.
3390        unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size);
3391        if (new_pos != (unsigned int)pos) {
3392          if (prev == NULL) {
3393            table[pos] = next;
3394          } else {
3395            prev->set_next(next);
3396          }
3397          if (new_pos < (unsigned int)pos) {
3398            entry->set_next(table[new_pos]);
3399            table[new_pos] = entry;
3400          } else {
3401            // Delay adding this entry to it's new position as we'd end up
3402            // hitting it again during this iteration.
3403            entry->set_next(delayed_add);
3404            delayed_add = entry;
3405          }
3406          moved++;
3407        } else {
3408          // object didn't move
3409          prev = entry;
3410        }
3411      }
3412
3413      entry = next;
3414    }
3415  }
3416
3417  // Re-add all the entries which were kept aside
3418  while (delayed_add != NULL) {
3419    JvmtiTagHashmapEntry* next = delayed_add->next();
3420    unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object(), size);
3421    delayed_add->set_next(table[pos]);
3422    table[pos] = delayed_add;
3423    delayed_add = next;
3424  }
3425
3426  log_debug(jvmti, objecttagging)("(%d->%d, %d freed, %d total moves)",
3427                                  hashmap->_entry_count + freed, hashmap->_entry_count, freed, moved);
3428}
3429