instanceKlass.cpp revision 605:98cb887364d3
1/*
2 * Copyright 1997-2008 Sun Microsystems, Inc.  All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
24
25# include "incls/_precompiled.incl"
26# include "incls/_instanceKlass.cpp.incl"
27
28bool instanceKlass::should_be_initialized() const {
29  return !is_initialized();
30}
31
32klassVtable* instanceKlass::vtable() const {
33  return new klassVtable(as_klassOop(), start_of_vtable(), vtable_length() / vtableEntry::size());
34}
35
36klassItable* instanceKlass::itable() const {
37  return new klassItable(as_klassOop());
38}
39
40void instanceKlass::eager_initialize(Thread *thread) {
41  if (!EagerInitialization) return;
42
43  if (this->is_not_initialized()) {
44    // abort if the the class has a class initializer
45    if (this->class_initializer() != NULL) return;
46
47    // abort if it is java.lang.Object (initialization is handled in genesis)
48    klassOop super = this->super();
49    if (super == NULL) return;
50
51    // abort if the super class should be initialized
52    if (!instanceKlass::cast(super)->is_initialized()) return;
53
54    // call body to expose the this pointer
55    instanceKlassHandle this_oop(thread, this->as_klassOop());
56    eager_initialize_impl(this_oop);
57  }
58}
59
60
61void instanceKlass::eager_initialize_impl(instanceKlassHandle this_oop) {
62  EXCEPTION_MARK;
63  ObjectLocker ol(this_oop, THREAD);
64
65  // abort if someone beat us to the initialization
66  if (!this_oop->is_not_initialized()) return;  // note: not equivalent to is_initialized()
67
68  ClassState old_state = this_oop->_init_state;
69  link_class_impl(this_oop, true, THREAD);
70  if (HAS_PENDING_EXCEPTION) {
71    CLEAR_PENDING_EXCEPTION;
72    // Abort if linking the class throws an exception.
73
74    // Use a test to avoid redundantly resetting the state if there's
75    // no change.  Set_init_state() asserts that state changes make
76    // progress, whereas here we might just be spinning in place.
77    if( old_state != this_oop->_init_state )
78      this_oop->set_init_state (old_state);
79  } else {
80    // linking successfull, mark class as initialized
81    this_oop->set_init_state (fully_initialized);
82    // trace
83    if (TraceClassInitialization) {
84      ResourceMark rm(THREAD);
85      tty->print_cr("[Initialized %s without side effects]", this_oop->external_name());
86    }
87  }
88}
89
90
91// See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
92// process. The step comments refers to the procedure described in that section.
93// Note: implementation moved to static method to expose the this pointer.
94void instanceKlass::initialize(TRAPS) {
95  if (this->should_be_initialized()) {
96    HandleMark hm(THREAD);
97    instanceKlassHandle this_oop(THREAD, this->as_klassOop());
98    initialize_impl(this_oop, CHECK);
99    // Note: at this point the class may be initialized
100    //       OR it may be in the state of being initialized
101    //       in case of recursive initialization!
102  } else {
103    assert(is_initialized(), "sanity check");
104  }
105}
106
107
108bool instanceKlass::verify_code(
109    instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
110  // 1) Verify the bytecodes
111  Verifier::Mode mode =
112    throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
113  return Verifier::verify(this_oop, mode, CHECK_false);
114}
115
116
117// Used exclusively by the shared spaces dump mechanism to prevent
118// classes mapped into the shared regions in new VMs from appearing linked.
119
120void instanceKlass::unlink_class() {
121  assert(is_linked(), "must be linked");
122  _init_state = loaded;
123}
124
125void instanceKlass::link_class(TRAPS) {
126  assert(is_loaded(), "must be loaded");
127  if (!is_linked()) {
128    instanceKlassHandle this_oop(THREAD, this->as_klassOop());
129    link_class_impl(this_oop, true, CHECK);
130  }
131}
132
133// Called to verify that a class can link during initialization, without
134// throwing a VerifyError.
135bool instanceKlass::link_class_or_fail(TRAPS) {
136  assert(is_loaded(), "must be loaded");
137  if (!is_linked()) {
138    instanceKlassHandle this_oop(THREAD, this->as_klassOop());
139    link_class_impl(this_oop, false, CHECK_false);
140  }
141  return is_linked();
142}
143
144bool instanceKlass::link_class_impl(
145    instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
146  // check for error state
147  if (this_oop->is_in_error_state()) {
148    ResourceMark rm(THREAD);
149    THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(),
150               this_oop->external_name(), false);
151  }
152  // return if already verified
153  if (this_oop->is_linked()) {
154    return true;
155  }
156
157  // Timing
158  // timer handles recursion
159  assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
160  JavaThread* jt = (JavaThread*)THREAD;
161  PerfTraceTimedEvent vmtimer(ClassLoader::perf_class_link_time(),
162                        ClassLoader::perf_classes_linked(),
163                        jt->get_thread_stat()->class_link_recursion_count_addr());
164
165  // link super class before linking this class
166  instanceKlassHandle super(THREAD, this_oop->super());
167  if (super.not_null()) {
168    if (super->is_interface()) {  // check if super class is an interface
169      ResourceMark rm(THREAD);
170      Exceptions::fthrow(
171        THREAD_AND_LOCATION,
172        vmSymbolHandles::java_lang_IncompatibleClassChangeError(),
173        "class %s has interface %s as super class",
174        this_oop->external_name(),
175        super->external_name()
176      );
177      return false;
178    }
179
180    link_class_impl(super, throw_verifyerror, CHECK_false);
181  }
182
183  // link all interfaces implemented by this class before linking this class
184  objArrayHandle interfaces (THREAD, this_oop->local_interfaces());
185  int num_interfaces = interfaces->length();
186  for (int index = 0; index < num_interfaces; index++) {
187    HandleMark hm(THREAD);
188    instanceKlassHandle ih(THREAD, klassOop(interfaces->obj_at(index)));
189    link_class_impl(ih, throw_verifyerror, CHECK_false);
190  }
191
192  // in case the class is linked in the process of linking its superclasses
193  if (this_oop->is_linked()) {
194    return true;
195  }
196
197  // verification & rewriting
198  {
199    ObjectLocker ol(this_oop, THREAD);
200    // rewritten will have been set if loader constraint error found
201    // on an earlier link attempt
202    // don't verify or rewrite if already rewritten
203    if (!this_oop->is_linked()) {
204      if (!this_oop->is_rewritten()) {
205        {
206          assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
207          JavaThread* jt = (JavaThread*)THREAD;
208          // Timer includes any side effects of class verification (resolution,
209          // etc), but not recursive entry into verify_code().
210          PerfTraceTime timer(ClassLoader::perf_class_verify_time(),
211                            jt->get_thread_stat()->class_verify_recursion_count_addr());
212          bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD);
213          if (!verify_ok) {
214            return false;
215          }
216        }
217
218        // Just in case a side-effect of verify linked this class already
219        // (which can sometimes happen since the verifier loads classes
220        // using custom class loaders, which are free to initialize things)
221        if (this_oop->is_linked()) {
222          return true;
223        }
224
225        // also sets rewritten
226        this_oop->rewrite_class(CHECK_false);
227      }
228
229      // Initialize the vtable and interface table after
230      // methods have been rewritten since rewrite may
231      // fabricate new methodOops.
232      // also does loader constraint checking
233      if (!this_oop()->is_shared()) {
234        ResourceMark rm(THREAD);
235        this_oop->vtable()->initialize_vtable(true, CHECK_false);
236        this_oop->itable()->initialize_itable(true, CHECK_false);
237      }
238#ifdef ASSERT
239      else {
240        ResourceMark rm(THREAD);
241        this_oop->vtable()->verify(tty, true);
242        // In case itable verification is ever added.
243        // this_oop->itable()->verify(tty, true);
244      }
245#endif
246      this_oop->set_init_state(linked);
247      if (JvmtiExport::should_post_class_prepare()) {
248        Thread *thread = THREAD;
249        assert(thread->is_Java_thread(), "thread->is_Java_thread()");
250        JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop());
251      }
252    }
253  }
254  return true;
255}
256
257
258// Rewrite the byte codes of all of the methods of a class.
259// Three cases:
260//    During the link of a newly loaded class.
261//    During the preloading of classes to be written to the shared spaces.
262//      - Rewrite the methods and update the method entry points.
263//
264//    During the link of a class in the shared spaces.
265//      - The methods were already rewritten, update the metho entry points.
266//
267// The rewriter must be called exactly once. Rewriting must happen after
268// verification but before the first method of the class is executed.
269
270void instanceKlass::rewrite_class(TRAPS) {
271  assert(is_loaded(), "must be loaded");
272  instanceKlassHandle this_oop(THREAD, this->as_klassOop());
273  if (this_oop->is_rewritten()) {
274    assert(this_oop()->is_shared(), "rewriting an unshared class?");
275    return;
276  }
277  Rewriter::rewrite(this_oop, CHECK); // No exception can happen here
278  this_oop->set_rewritten();
279}
280
281
282void instanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
283  // Make sure klass is linked (verified) before initialization
284  // A class could already be verified, since it has been reflected upon.
285  this_oop->link_class(CHECK);
286
287  // refer to the JVM book page 47 for description of steps
288  // Step 1
289  { ObjectLocker ol(this_oop, THREAD);
290
291    Thread *self = THREAD; // it's passed the current thread
292
293    // Step 2
294    // If we were to use wait() instead of waitInterruptibly() then
295    // we might end up throwing IE from link/symbol resolution sites
296    // that aren't expected to throw.  This would wreak havoc.  See 6320309.
297    while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) {
298      ol.waitUninterruptibly(CHECK);
299    }
300
301    // Step 3
302    if (this_oop->is_being_initialized() && this_oop->is_reentrant_initialization(self))
303      return;
304
305    // Step 4
306    if (this_oop->is_initialized())
307      return;
308
309    // Step 5
310    if (this_oop->is_in_error_state()) {
311      ResourceMark rm(THREAD);
312      const char* desc = "Could not initialize class ";
313      const char* className = this_oop->external_name();
314      size_t msglen = strlen(desc) + strlen(className) + 1;
315      char* message = NEW_C_HEAP_ARRAY(char, msglen);
316      if (NULL == message) {
317        // Out of memory: can't create detailed error message
318        THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
319      } else {
320        jio_snprintf(message, msglen, "%s%s", desc, className);
321        THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
322      }
323    }
324
325    // Step 6
326    this_oop->set_init_state(being_initialized);
327    this_oop->set_init_thread(self);
328  }
329
330  // Step 7
331  klassOop super_klass = this_oop->super();
332  if (super_klass != NULL && !this_oop->is_interface() && Klass::cast(super_klass)->should_be_initialized()) {
333    Klass::cast(super_klass)->initialize(THREAD);
334
335    if (HAS_PENDING_EXCEPTION) {
336      Handle e(THREAD, PENDING_EXCEPTION);
337      CLEAR_PENDING_EXCEPTION;
338      {
339        EXCEPTION_MARK;
340        this_oop->set_initialization_state_and_notify(initialization_error, THREAD); // Locks object, set state, and notify all waiting threads
341        CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, superclass initialization error is thrown below
342      }
343      THROW_OOP(e());
344    }
345  }
346
347  // Step 8
348  {
349    assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
350    JavaThread* jt = (JavaThread*)THREAD;
351    // Timer includes any side effects of class initialization (resolution,
352    // etc), but not recursive entry into call_class_initializer().
353    PerfTraceTimedEvent timer(ClassLoader::perf_class_init_time(),
354                              ClassLoader::perf_classes_inited(),
355                              jt->get_thread_stat()->class_init_recursion_count_addr());
356    this_oop->call_class_initializer(THREAD);
357  }
358
359  // Step 9
360  if (!HAS_PENDING_EXCEPTION) {
361    this_oop->set_initialization_state_and_notify(fully_initialized, CHECK);
362    { ResourceMark rm(THREAD);
363      debug_only(this_oop->vtable()->verify(tty, true);)
364    }
365  }
366  else {
367    // Step 10 and 11
368    Handle e(THREAD, PENDING_EXCEPTION);
369    CLEAR_PENDING_EXCEPTION;
370    {
371      EXCEPTION_MARK;
372      this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
373      CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
374    }
375    if (e->is_a(SystemDictionary::error_klass())) {
376      THROW_OOP(e());
377    } else {
378      JavaCallArguments args(e);
379      THROW_ARG(vmSymbolHandles::java_lang_ExceptionInInitializerError(),
380                vmSymbolHandles::throwable_void_signature(),
381                &args);
382    }
383  }
384}
385
386
387// Note: implementation moved to static method to expose the this pointer.
388void instanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
389  instanceKlassHandle kh(THREAD, this->as_klassOop());
390  set_initialization_state_and_notify_impl(kh, state, CHECK);
391}
392
393void instanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_oop, ClassState state, TRAPS) {
394  ObjectLocker ol(this_oop, THREAD);
395  this_oop->set_init_state(state);
396  ol.notify_all(CHECK);
397}
398
399void instanceKlass::add_implementor(klassOop k) {
400  assert(Compile_lock->owned_by_self(), "");
401  // Filter out my subinterfaces.
402  // (Note: Interfaces are never on the subklass list.)
403  if (instanceKlass::cast(k)->is_interface()) return;
404
405  // Filter out subclasses whose supers already implement me.
406  // (Note: CHA must walk subclasses of direct implementors
407  // in order to locate indirect implementors.)
408  klassOop sk = instanceKlass::cast(k)->super();
409  if (sk != NULL && instanceKlass::cast(sk)->implements_interface(as_klassOop()))
410    // We only need to check one immediate superclass, since the
411    // implements_interface query looks at transitive_interfaces.
412    // Any supers of the super have the same (or fewer) transitive_interfaces.
413    return;
414
415  // Update number of implementors
416  int i = _nof_implementors++;
417
418  // Record this implementor, if there are not too many already
419  if (i < implementors_limit) {
420    assert(_implementors[i] == NULL, "should be exactly one implementor");
421    oop_store_without_check((oop*)&_implementors[i], k);
422  } else if (i == implementors_limit) {
423    // clear out the list on first overflow
424    for (int i2 = 0; i2 < implementors_limit; i2++)
425      oop_store_without_check((oop*)&_implementors[i2], NULL);
426  }
427
428  // The implementor also implements the transitive_interfaces
429  for (int index = 0; index < local_interfaces()->length(); index++) {
430    instanceKlass::cast(klassOop(local_interfaces()->obj_at(index)))->add_implementor(k);
431  }
432}
433
434void instanceKlass::init_implementor() {
435  for (int i = 0; i < implementors_limit; i++)
436    oop_store_without_check((oop*)&_implementors[i], NULL);
437  _nof_implementors = 0;
438}
439
440
441void instanceKlass::process_interfaces(Thread *thread) {
442  // link this class into the implementors list of every interface it implements
443  KlassHandle this_as_oop (thread, this->as_klassOop());
444  for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
445    assert(local_interfaces()->obj_at(i)->is_klass(), "must be a klass");
446    instanceKlass* interf = instanceKlass::cast(klassOop(local_interfaces()->obj_at(i)));
447    assert(interf->is_interface(), "expected interface");
448    interf->add_implementor(this_as_oop());
449  }
450}
451
452bool instanceKlass::can_be_primary_super_slow() const {
453  if (is_interface())
454    return false;
455  else
456    return Klass::can_be_primary_super_slow();
457}
458
459objArrayOop instanceKlass::compute_secondary_supers(int num_extra_slots, TRAPS) {
460  // The secondaries are the implemented interfaces.
461  instanceKlass* ik = instanceKlass::cast(as_klassOop());
462  objArrayHandle interfaces (THREAD, ik->transitive_interfaces());
463  int num_secondaries = num_extra_slots + interfaces->length();
464  if (num_secondaries == 0) {
465    return Universe::the_empty_system_obj_array();
466  } else if (num_extra_slots == 0) {
467    return interfaces();
468  } else {
469    // a mix of both
470    objArrayOop secondaries = oopFactory::new_system_objArray(num_secondaries, CHECK_NULL);
471    for (int i = 0; i < interfaces->length(); i++) {
472      secondaries->obj_at_put(num_extra_slots+i, interfaces->obj_at(i));
473    }
474    return secondaries;
475  }
476}
477
478bool instanceKlass::compute_is_subtype_of(klassOop k) {
479  if (Klass::cast(k)->is_interface()) {
480    return implements_interface(k);
481  } else {
482    return Klass::compute_is_subtype_of(k);
483  }
484}
485
486bool instanceKlass::implements_interface(klassOop k) const {
487  if (as_klassOop() == k) return true;
488  assert(Klass::cast(k)->is_interface(), "should be an interface class");
489  for (int i = 0; i < transitive_interfaces()->length(); i++) {
490    if (transitive_interfaces()->obj_at(i) == k) {
491      return true;
492    }
493  }
494  return false;
495}
496
497objArrayOop instanceKlass::allocate_objArray(int n, int length, TRAPS) {
498  if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
499  if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
500    THROW_OOP_0(Universe::out_of_memory_error_array_size());
501  }
502  int size = objArrayOopDesc::object_size(length);
503  klassOop ak = array_klass(n, CHECK_NULL);
504  KlassHandle h_ak (THREAD, ak);
505  objArrayOop o =
506    (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL);
507  return o;
508}
509
510instanceOop instanceKlass::register_finalizer(instanceOop i, TRAPS) {
511  if (TraceFinalizerRegistration) {
512    tty->print("Registered ");
513    i->print_value_on(tty);
514    tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", (address)i);
515  }
516  instanceHandle h_i(THREAD, i);
517  // Pass the handle as argument, JavaCalls::call expects oop as jobjects
518  JavaValue result(T_VOID);
519  JavaCallArguments args(h_i);
520  methodHandle mh (THREAD, Universe::finalizer_register_method());
521  JavaCalls::call(&result, mh, &args, CHECK_NULL);
522  return h_i();
523}
524
525instanceOop instanceKlass::allocate_instance(TRAPS) {
526  bool has_finalizer_flag = has_finalizer(); // Query before possible GC
527  int size = size_helper();  // Query before forming handle.
528
529  KlassHandle h_k(THREAD, as_klassOop());
530
531  instanceOop i;
532
533  i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
534  if (has_finalizer_flag && !RegisterFinalizersAtInit) {
535    i = register_finalizer(i, CHECK_NULL);
536  }
537  return i;
538}
539
540instanceOop instanceKlass::allocate_permanent_instance(TRAPS) {
541  // Finalizer registration occurs in the Object.<init> constructor
542  // and constructors normally aren't run when allocating perm
543  // instances so simply disallow finalizable perm objects.  This can
544  // be relaxed if a need for it is found.
545  assert(!has_finalizer(), "perm objects not allowed to have finalizers");
546  int size = size_helper();  // Query before forming handle.
547  KlassHandle h_k(THREAD, as_klassOop());
548  instanceOop i = (instanceOop)
549    CollectedHeap::permanent_obj_allocate(h_k, size, CHECK_NULL);
550  return i;
551}
552
553void instanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
554  if (is_interface() || is_abstract()) {
555    ResourceMark rm(THREAD);
556    THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
557              : vmSymbols::java_lang_InstantiationException(), external_name());
558  }
559  if (as_klassOop() == SystemDictionary::class_klass()) {
560    ResourceMark rm(THREAD);
561    THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
562              : vmSymbols::java_lang_IllegalAccessException(), external_name());
563  }
564}
565
566klassOop instanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
567  instanceKlassHandle this_oop(THREAD, as_klassOop());
568  return array_klass_impl(this_oop, or_null, n, THREAD);
569}
570
571klassOop instanceKlass::array_klass_impl(instanceKlassHandle this_oop, bool or_null, int n, TRAPS) {
572  if (this_oop->array_klasses() == NULL) {
573    if (or_null) return NULL;
574
575    ResourceMark rm;
576    JavaThread *jt = (JavaThread *)THREAD;
577    {
578      // Atomic creation of array_klasses
579      MutexLocker mc(Compile_lock, THREAD);   // for vtables
580      MutexLocker ma(MultiArray_lock, THREAD);
581
582      // Check if update has already taken place
583      if (this_oop->array_klasses() == NULL) {
584        objArrayKlassKlass* oakk =
585          (objArrayKlassKlass*)Universe::objArrayKlassKlassObj()->klass_part();
586
587        klassOop  k = oakk->allocate_objArray_klass(1, this_oop, CHECK_NULL);
588        this_oop->set_array_klasses(k);
589      }
590    }
591  }
592  // _this will always be set at this point
593  objArrayKlass* oak = (objArrayKlass*)this_oop->array_klasses()->klass_part();
594  if (or_null) {
595    return oak->array_klass_or_null(n);
596  }
597  return oak->array_klass(n, CHECK_NULL);
598}
599
600klassOop instanceKlass::array_klass_impl(bool or_null, TRAPS) {
601  return array_klass_impl(or_null, 1, THREAD);
602}
603
604void instanceKlass::call_class_initializer(TRAPS) {
605  instanceKlassHandle ik (THREAD, as_klassOop());
606  call_class_initializer_impl(ik, THREAD);
607}
608
609static int call_class_initializer_impl_counter = 0;   // for debugging
610
611methodOop instanceKlass::class_initializer() {
612  return find_method(vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
613}
614
615void instanceKlass::call_class_initializer_impl(instanceKlassHandle this_oop, TRAPS) {
616  methodHandle h_method(THREAD, this_oop->class_initializer());
617  assert(!this_oop->is_initialized(), "we cannot initialize twice");
618  if (TraceClassInitialization) {
619    tty->print("%d Initializing ", call_class_initializer_impl_counter++);
620    this_oop->name()->print_value();
621    tty->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", (address)this_oop());
622  }
623  if (h_method() != NULL) {
624    JavaCallArguments args; // No arguments
625    JavaValue result(T_VOID);
626    JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
627  }
628}
629
630
631void instanceKlass::mask_for(methodHandle method, int bci,
632  InterpreterOopMap* entry_for) {
633  // Dirty read, then double-check under a lock.
634  if (_oop_map_cache == NULL) {
635    // Otherwise, allocate a new one.
636    MutexLocker x(OopMapCacheAlloc_lock);
637    // First time use. Allocate a cache in C heap
638    if (_oop_map_cache == NULL) {
639      _oop_map_cache = new OopMapCache();
640    }
641  }
642  // _oop_map_cache is constant after init; lookup below does is own locking.
643  _oop_map_cache->lookup(method, bci, entry_for);
644}
645
646
647bool instanceKlass::find_local_field(symbolOop name, symbolOop sig, fieldDescriptor* fd) const {
648  const int n = fields()->length();
649  for (int i = 0; i < n; i += next_offset ) {
650    int name_index = fields()->ushort_at(i + name_index_offset);
651    int sig_index  = fields()->ushort_at(i + signature_index_offset);
652    symbolOop f_name = constants()->symbol_at(name_index);
653    symbolOop f_sig  = constants()->symbol_at(sig_index);
654    if (f_name == name && f_sig == sig) {
655      fd->initialize(as_klassOop(), i);
656      return true;
657    }
658  }
659  return false;
660}
661
662
663void instanceKlass::field_names_and_sigs_iterate(OopClosure* closure) {
664  const int n = fields()->length();
665  for (int i = 0; i < n; i += next_offset ) {
666    int name_index = fields()->ushort_at(i + name_index_offset);
667    symbolOop name = constants()->symbol_at(name_index);
668    closure->do_oop((oop*)&name);
669
670    int sig_index  = fields()->ushort_at(i + signature_index_offset);
671    symbolOop sig = constants()->symbol_at(sig_index);
672    closure->do_oop((oop*)&sig);
673  }
674}
675
676
677klassOop instanceKlass::find_interface_field(symbolOop name, symbolOop sig, fieldDescriptor* fd) const {
678  const int n = local_interfaces()->length();
679  for (int i = 0; i < n; i++) {
680    klassOop intf1 = klassOop(local_interfaces()->obj_at(i));
681    assert(Klass::cast(intf1)->is_interface(), "just checking type");
682    // search for field in current interface
683    if (instanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
684      assert(fd->is_static(), "interface field must be static");
685      return intf1;
686    }
687    // search for field in direct superinterfaces
688    klassOop intf2 = instanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
689    if (intf2 != NULL) return intf2;
690  }
691  // otherwise field lookup fails
692  return NULL;
693}
694
695
696klassOop instanceKlass::find_field(symbolOop name, symbolOop sig, fieldDescriptor* fd) const {
697  // search order according to newest JVM spec (5.4.3.2, p.167).
698  // 1) search for field in current klass
699  if (find_local_field(name, sig, fd)) {
700    return as_klassOop();
701  }
702  // 2) search for field recursively in direct superinterfaces
703  { klassOop intf = find_interface_field(name, sig, fd);
704    if (intf != NULL) return intf;
705  }
706  // 3) apply field lookup recursively if superclass exists
707  { klassOop supr = super();
708    if (supr != NULL) return instanceKlass::cast(supr)->find_field(name, sig, fd);
709  }
710  // 4) otherwise field lookup fails
711  return NULL;
712}
713
714
715klassOop instanceKlass::find_field(symbolOop name, symbolOop sig, bool is_static, fieldDescriptor* fd) const {
716  // search order according to newest JVM spec (5.4.3.2, p.167).
717  // 1) search for field in current klass
718  if (find_local_field(name, sig, fd)) {
719    if (fd->is_static() == is_static) return as_klassOop();
720  }
721  // 2) search for field recursively in direct superinterfaces
722  if (is_static) {
723    klassOop intf = find_interface_field(name, sig, fd);
724    if (intf != NULL) return intf;
725  }
726  // 3) apply field lookup recursively if superclass exists
727  { klassOop supr = super();
728    if (supr != NULL) return instanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
729  }
730  // 4) otherwise field lookup fails
731  return NULL;
732}
733
734
735bool instanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
736  int length = fields()->length();
737  for (int i = 0; i < length; i += next_offset) {
738    if (offset_from_fields( i ) == offset) {
739      fd->initialize(as_klassOop(), i);
740      if (fd->is_static() == is_static) return true;
741    }
742  }
743  return false;
744}
745
746
747bool instanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
748  klassOop klass = as_klassOop();
749  while (klass != NULL) {
750    if (instanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
751      return true;
752    }
753    klass = Klass::cast(klass)->super();
754  }
755  return false;
756}
757
758
759void instanceKlass::methods_do(void f(methodOop method)) {
760  int len = methods()->length();
761  for (int index = 0; index < len; index++) {
762    methodOop m = methodOop(methods()->obj_at(index));
763    assert(m->is_method(), "must be method");
764    f(m);
765  }
766}
767
768void instanceKlass::do_local_static_fields(FieldClosure* cl) {
769  fieldDescriptor fd;
770  int length = fields()->length();
771  for (int i = 0; i < length; i += next_offset) {
772    fd.initialize(as_klassOop(), i);
773    if (fd.is_static()) cl->do_field(&fd);
774  }
775}
776
777
778void instanceKlass::do_local_static_fields(void f(fieldDescriptor*, TRAPS), TRAPS) {
779  instanceKlassHandle h_this(THREAD, as_klassOop());
780  do_local_static_fields_impl(h_this, f, CHECK);
781}
782
783
784void instanceKlass::do_local_static_fields_impl(instanceKlassHandle this_oop, void f(fieldDescriptor* fd, TRAPS), TRAPS) {
785  fieldDescriptor fd;
786  int length = this_oop->fields()->length();
787  for (int i = 0; i < length; i += next_offset) {
788    fd.initialize(this_oop(), i);
789    if (fd.is_static()) { f(&fd, CHECK); } // Do NOT remove {}! (CHECK macro expands into several statements)
790  }
791}
792
793
794static int compare_fields_by_offset(int* a, int* b) {
795  return a[0] - b[0];
796}
797
798void instanceKlass::do_nonstatic_fields(FieldClosure* cl) {
799  instanceKlass* super = superklass();
800  if (super != NULL) {
801    super->do_nonstatic_fields(cl);
802  }
803  fieldDescriptor fd;
804  int length = fields()->length();
805  // In DebugInfo nonstatic fields are sorted by offset.
806  int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1));
807  int j = 0;
808  for (int i = 0; i < length; i += next_offset) {
809    fd.initialize(as_klassOop(), i);
810    if (!fd.is_static()) {
811      fields_sorted[j + 0] = fd.offset();
812      fields_sorted[j + 1] = i;
813      j += 2;
814    }
815  }
816  if (j > 0) {
817    length = j;
818    // _sort_Fn is defined in growableArray.hpp.
819    qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset);
820    for (int i = 0; i < length; i += 2) {
821      fd.initialize(as_klassOop(), fields_sorted[i + 1]);
822      assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields");
823      cl->do_field(&fd);
824    }
825  }
826  FREE_C_HEAP_ARRAY(int, fields_sorted);
827}
828
829
830void instanceKlass::array_klasses_do(void f(klassOop k)) {
831  if (array_klasses() != NULL)
832    arrayKlass::cast(array_klasses())->array_klasses_do(f);
833}
834
835
836void instanceKlass::with_array_klasses_do(void f(klassOop k)) {
837  f(as_klassOop());
838  array_klasses_do(f);
839}
840
841#ifdef ASSERT
842static int linear_search(objArrayOop methods, symbolOop name, symbolOop signature) {
843  int len = methods->length();
844  for (int index = 0; index < len; index++) {
845    methodOop m = (methodOop)(methods->obj_at(index));
846    assert(m->is_method(), "must be method");
847    if (m->signature() == signature && m->name() == name) {
848       return index;
849    }
850  }
851  return -1;
852}
853#endif
854
855methodOop instanceKlass::find_method(symbolOop name, symbolOop signature) const {
856  return instanceKlass::find_method(methods(), name, signature);
857}
858
859methodOop instanceKlass::find_method(objArrayOop methods, symbolOop name, symbolOop signature) {
860  int len = methods->length();
861  // methods are sorted, so do binary search
862  int l = 0;
863  int h = len - 1;
864  while (l <= h) {
865    int mid = (l + h) >> 1;
866    methodOop m = (methodOop)methods->obj_at(mid);
867    assert(m->is_method(), "must be method");
868    int res = m->name()->fast_compare(name);
869    if (res == 0) {
870      // found matching name; do linear search to find matching signature
871      // first, quick check for common case
872      if (m->signature() == signature) return m;
873      // search downwards through overloaded methods
874      int i;
875      for (i = mid - 1; i >= l; i--) {
876        methodOop m = (methodOop)methods->obj_at(i);
877        assert(m->is_method(), "must be method");
878        if (m->name() != name) break;
879        if (m->signature() == signature) return m;
880      }
881      // search upwards
882      for (i = mid + 1; i <= h; i++) {
883        methodOop m = (methodOop)methods->obj_at(i);
884        assert(m->is_method(), "must be method");
885        if (m->name() != name) break;
886        if (m->signature() == signature) return m;
887      }
888      // not found
889#ifdef ASSERT
890      int index = linear_search(methods, name, signature);
891      if (index != -1) fatal1("binary search bug: should have found entry %d", index);
892#endif
893      return NULL;
894    } else if (res < 0) {
895      l = mid + 1;
896    } else {
897      h = mid - 1;
898    }
899  }
900#ifdef ASSERT
901  int index = linear_search(methods, name, signature);
902  if (index != -1) fatal1("binary search bug: should have found entry %d", index);
903#endif
904  return NULL;
905}
906
907methodOop instanceKlass::uncached_lookup_method(symbolOop name, symbolOop signature) const {
908  klassOop klass = as_klassOop();
909  while (klass != NULL) {
910    methodOop method = instanceKlass::cast(klass)->find_method(name, signature);
911    if (method != NULL) return method;
912    klass = instanceKlass::cast(klass)->super();
913  }
914  return NULL;
915}
916
917// lookup a method in all the interfaces that this class implements
918methodOop instanceKlass::lookup_method_in_all_interfaces(symbolOop name,
919                                                         symbolOop signature) const {
920  objArrayOop all_ifs = instanceKlass::cast(as_klassOop())->transitive_interfaces();
921  int num_ifs = all_ifs->length();
922  instanceKlass *ik = NULL;
923  for (int i = 0; i < num_ifs; i++) {
924    ik = instanceKlass::cast(klassOop(all_ifs->obj_at(i)));
925    methodOop m = ik->lookup_method(name, signature);
926    if (m != NULL) {
927      return m;
928    }
929  }
930  return NULL;
931}
932
933/* jni_id_for_impl for jfieldIds only */
934JNIid* instanceKlass::jni_id_for_impl(instanceKlassHandle this_oop, int offset) {
935  MutexLocker ml(JfieldIdCreation_lock);
936  // Retry lookup after we got the lock
937  JNIid* probe = this_oop->jni_ids() == NULL ? NULL : this_oop->jni_ids()->find(offset);
938  if (probe == NULL) {
939    // Slow case, allocate new static field identifier
940    probe = new JNIid(this_oop->as_klassOop(), offset, this_oop->jni_ids());
941    this_oop->set_jni_ids(probe);
942  }
943  return probe;
944}
945
946
947/* jni_id_for for jfieldIds only */
948JNIid* instanceKlass::jni_id_for(int offset) {
949  JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
950  if (probe == NULL) {
951    probe = jni_id_for_impl(this->as_klassOop(), offset);
952  }
953  return probe;
954}
955
956
957// Lookup or create a jmethodID.
958// This code can be called by the VM thread.  For this reason it is critical that
959// there are no blocking operations (safepoints) while the lock is held -- or a
960// deadlock can occur.
961jmethodID instanceKlass::jmethod_id_for_impl(instanceKlassHandle ik_h, methodHandle method_h) {
962  size_t idnum = (size_t)method_h->method_idnum();
963  jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
964  size_t length = 0;
965  jmethodID id = NULL;
966  // array length stored in first element, other elements offset by one
967  if (jmeths == NULL ||                         // If there is no jmethodID array,
968      (length = (size_t)jmeths[0]) <= idnum ||  // or if it is too short,
969      (id = jmeths[idnum+1]) == NULL) {         // or if this jmethodID isn't allocated
970
971    // Do all the safepointing things (allocations) before grabbing the lock.
972    // These allocations will have to be freed if they are unused.
973
974    // Allocate a new array of methods.
975    jmethodID* new_jmeths = NULL;
976    if (length <= idnum) {
977      // A new array will be needed (unless some other thread beats us to it)
978      size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count());
979      new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1);
980      memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
981      new_jmeths[0] =(jmethodID)size;  // array size held in the first element
982    }
983
984    // Allocate a new method ID.
985    jmethodID new_id = NULL;
986    if (method_h->is_old() && !method_h->is_obsolete()) {
987      // The method passed in is old (but not obsolete), we need to use the current version
988      methodOop current_method = ik_h->method_with_idnum((int)idnum);
989      assert(current_method != NULL, "old and but not obsolete, so should exist");
990      methodHandle current_method_h(current_method == NULL? method_h() : current_method);
991      new_id = JNIHandles::make_jmethod_id(current_method_h);
992    } else {
993      // It is the current version of the method or an obsolete method,
994      // use the version passed in
995      new_id = JNIHandles::make_jmethod_id(method_h);
996    }
997
998    if (Threads::number_of_threads() == 0 || SafepointSynchronize::is_at_safepoint()) {
999      // No need and unsafe to lock the JmethodIdCreation_lock at safepoint.
1000      id = get_jmethod_id(ik_h, idnum, new_id, new_jmeths);
1001    } else {
1002      MutexLocker ml(JmethodIdCreation_lock);
1003      id = get_jmethod_id(ik_h, idnum, new_id, new_jmeths);
1004    }
1005  }
1006  return id;
1007}
1008
1009
1010jmethodID instanceKlass::get_jmethod_id(instanceKlassHandle ik_h, size_t idnum,
1011                                        jmethodID new_id, jmethodID* new_jmeths) {
1012  // Retry lookup after we got the lock or ensured we are at safepoint
1013  jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
1014  jmethodID  id                = NULL;
1015  jmethodID  to_dealloc_id     = NULL;
1016  jmethodID* to_dealloc_jmeths = NULL;
1017  size_t     length;
1018
1019  if (jmeths == NULL || (length = (size_t)jmeths[0]) <= idnum) {
1020    if (jmeths != NULL) {
1021      // We have grown the array: copy the existing entries, and delete the old array
1022      for (size_t index = 0; index < length; index++) {
1023        new_jmeths[index+1] = jmeths[index+1];
1024      }
1025      to_dealloc_jmeths = jmeths; // using the new jmeths, deallocate the old one
1026    }
1027    ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths);
1028  } else {
1029    id = jmeths[idnum+1];
1030    to_dealloc_jmeths = new_jmeths; // using the old jmeths, deallocate the new one
1031  }
1032  if (id == NULL) {
1033    id = new_id;
1034    jmeths[idnum+1] = id;  // install the new method ID
1035  } else {
1036    to_dealloc_id = new_id; // the new id wasn't used, mark it for deallocation
1037  }
1038
1039  // Free up unneeded or no longer needed resources
1040  FreeHeap(to_dealloc_jmeths);
1041  if (to_dealloc_id != NULL) {
1042    JNIHandles::destroy_jmethod_id(to_dealloc_id);
1043  }
1044  return id;
1045}
1046
1047
1048// Lookup a jmethodID, NULL if not found.  Do no blocking, no allocations, no handles
1049jmethodID instanceKlass::jmethod_id_or_null(methodOop method) {
1050  size_t idnum = (size_t)method->method_idnum();
1051  jmethodID* jmeths = methods_jmethod_ids_acquire();
1052  size_t length;                                // length assigned as debugging crumb
1053  jmethodID id = NULL;
1054  if (jmeths != NULL &&                         // If there is a jmethodID array,
1055      (length = (size_t)jmeths[0]) > idnum) {   // and if it is long enough,
1056    id = jmeths[idnum+1];                       // Look up the id (may be NULL)
1057  }
1058  return id;
1059}
1060
1061
1062// Cache an itable index
1063void instanceKlass::set_cached_itable_index(size_t idnum, int index) {
1064  int* indices = methods_cached_itable_indices_acquire();
1065  if (indices == NULL ||                         // If there is no index array,
1066      ((size_t)indices[0]) <= idnum) {           // or if it is too short
1067    // Lock before we allocate the array so we don't leak
1068    MutexLocker ml(JNICachedItableIndex_lock);
1069    // Retry lookup after we got the lock
1070    indices = methods_cached_itable_indices_acquire();
1071    size_t length = 0;
1072    // array length stored in first element, other elements offset by one
1073    if (indices == NULL || (length = (size_t)indices[0]) <= idnum) {
1074      size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
1075      int* new_indices = NEW_C_HEAP_ARRAY(int, size+1);
1076      // Copy the existing entries, if any
1077      size_t i;
1078      for (i = 0; i < length; i++) {
1079        new_indices[i+1] = indices[i+1];
1080      }
1081      // Set all the rest to -1
1082      for (i = length; i < size; i++) {
1083        new_indices[i+1] = -1;
1084      }
1085      if (indices != NULL) {
1086        FreeHeap(indices);  // delete any old indices
1087      }
1088      release_set_methods_cached_itable_indices(indices = new_indices);
1089    }
1090  } else {
1091    CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
1092  }
1093  // This is a cache, if there is a race to set it, it doesn't matter
1094  indices[idnum+1] = index;
1095}
1096
1097
1098// Retrieve a cached itable index
1099int instanceKlass::cached_itable_index(size_t idnum) {
1100  int* indices = methods_cached_itable_indices_acquire();
1101  if (indices != NULL && ((size_t)indices[0]) > idnum) {
1102     // indices exist and are long enough, retrieve possible cached
1103    return indices[idnum+1];
1104  }
1105  return -1;
1106}
1107
1108
1109//
1110// nmethodBucket is used to record dependent nmethods for
1111// deoptimization.  nmethod dependencies are actually <klass, method>
1112// pairs but we really only care about the klass part for purposes of
1113// finding nmethods which might need to be deoptimized.  Instead of
1114// recording the method, a count of how many times a particular nmethod
1115// was recorded is kept.  This ensures that any recording errors are
1116// noticed since an nmethod should be removed as many times are it's
1117// added.
1118//
1119class nmethodBucket {
1120 private:
1121  nmethod*       _nmethod;
1122  int            _count;
1123  nmethodBucket* _next;
1124
1125 public:
1126  nmethodBucket(nmethod* nmethod, nmethodBucket* next) {
1127    _nmethod = nmethod;
1128    _next = next;
1129    _count = 1;
1130  }
1131  int count()                             { return _count; }
1132  int increment()                         { _count += 1; return _count; }
1133  int decrement()                         { _count -= 1; assert(_count >= 0, "don't underflow"); return _count; }
1134  nmethodBucket* next()                   { return _next; }
1135  void set_next(nmethodBucket* b)         { _next = b; }
1136  nmethod* get_nmethod()                  { return _nmethod; }
1137};
1138
1139
1140//
1141// Walk the list of dependent nmethods searching for nmethods which
1142// are dependent on the klassOop that was passed in and mark them for
1143// deoptimization.  Returns the number of nmethods found.
1144//
1145int instanceKlass::mark_dependent_nmethods(DepChange& changes) {
1146  assert_locked_or_safepoint(CodeCache_lock);
1147  int found = 0;
1148  nmethodBucket* b = _dependencies;
1149  while (b != NULL) {
1150    nmethod* nm = b->get_nmethod();
1151    // since dependencies aren't removed until an nmethod becomes a zombie,
1152    // the dependency list may contain nmethods which aren't alive.
1153    if (nm->is_alive() && !nm->is_marked_for_deoptimization() && nm->check_dependency_on(changes)) {
1154      if (TraceDependencies) {
1155        ResourceMark rm;
1156        tty->print_cr("Marked for deoptimization");
1157        tty->print_cr("  context = %s", this->external_name());
1158        changes.print();
1159        nm->print();
1160        nm->print_dependencies();
1161      }
1162      nm->mark_for_deoptimization();
1163      found++;
1164    }
1165    b = b->next();
1166  }
1167  return found;
1168}
1169
1170
1171//
1172// Add an nmethodBucket to the list of dependencies for this nmethod.
1173// It's possible that an nmethod has multiple dependencies on this klass
1174// so a count is kept for each bucket to guarantee that creation and
1175// deletion of dependencies is consistent.
1176//
1177void instanceKlass::add_dependent_nmethod(nmethod* nm) {
1178  assert_locked_or_safepoint(CodeCache_lock);
1179  nmethodBucket* b = _dependencies;
1180  nmethodBucket* last = NULL;
1181  while (b != NULL) {
1182    if (nm == b->get_nmethod()) {
1183      b->increment();
1184      return;
1185    }
1186    b = b->next();
1187  }
1188  _dependencies = new nmethodBucket(nm, _dependencies);
1189}
1190
1191
1192//
1193// Decrement count of the nmethod in the dependency list and remove
1194// the bucket competely when the count goes to 0.  This method must
1195// find a corresponding bucket otherwise there's a bug in the
1196// recording of dependecies.
1197//
1198void instanceKlass::remove_dependent_nmethod(nmethod* nm) {
1199  assert_locked_or_safepoint(CodeCache_lock);
1200  nmethodBucket* b = _dependencies;
1201  nmethodBucket* last = NULL;
1202  while (b != NULL) {
1203    if (nm == b->get_nmethod()) {
1204      if (b->decrement() == 0) {
1205        if (last == NULL) {
1206          _dependencies = b->next();
1207        } else {
1208          last->set_next(b->next());
1209        }
1210        delete b;
1211      }
1212      return;
1213    }
1214    last = b;
1215    b = b->next();
1216  }
1217#ifdef ASSERT
1218  tty->print_cr("### %s can't find dependent nmethod:", this->external_name());
1219  nm->print();
1220#endif // ASSERT
1221  ShouldNotReachHere();
1222}
1223
1224
1225#ifndef PRODUCT
1226void instanceKlass::print_dependent_nmethods(bool verbose) {
1227  nmethodBucket* b = _dependencies;
1228  int idx = 0;
1229  while (b != NULL) {
1230    nmethod* nm = b->get_nmethod();
1231    tty->print("[%d] count=%d { ", idx++, b->count());
1232    if (!verbose) {
1233      nm->print_on(tty, "nmethod");
1234      tty->print_cr(" } ");
1235    } else {
1236      nm->print();
1237      nm->print_dependencies();
1238      tty->print_cr("--- } ");
1239    }
1240    b = b->next();
1241  }
1242}
1243
1244
1245bool instanceKlass::is_dependent_nmethod(nmethod* nm) {
1246  nmethodBucket* b = _dependencies;
1247  while (b != NULL) {
1248    if (nm == b->get_nmethod()) {
1249      return true;
1250    }
1251    b = b->next();
1252  }
1253  return false;
1254}
1255#endif //PRODUCT
1256
1257
1258#ifdef ASSERT
1259template <class T> void assert_is_in(T *p) {
1260  T heap_oop = oopDesc::load_heap_oop(p);
1261  if (!oopDesc::is_null(heap_oop)) {
1262    oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1263    assert(Universe::heap()->is_in(o), "should be in heap");
1264  }
1265}
1266template <class T> void assert_is_in_closed_subset(T *p) {
1267  T heap_oop = oopDesc::load_heap_oop(p);
1268  if (!oopDesc::is_null(heap_oop)) {
1269    oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1270    assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
1271  }
1272}
1273template <class T> void assert_is_in_reserved(T *p) {
1274  T heap_oop = oopDesc::load_heap_oop(p);
1275  if (!oopDesc::is_null(heap_oop)) {
1276    oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1277    assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
1278  }
1279}
1280template <class T> void assert_nothing(T *p) {}
1281
1282#else
1283template <class T> void assert_is_in(T *p) {}
1284template <class T> void assert_is_in_closed_subset(T *p) {}
1285template <class T> void assert_is_in_reserved(T *p) {}
1286template <class T> void assert_nothing(T *p) {}
1287#endif // ASSERT
1288
1289//
1290// Macros that iterate over areas of oops which are specialized on type of
1291// oop pointer either narrow or wide, depending on UseCompressedOops
1292//
1293// Parameters are:
1294//   T         - type of oop to point to (either oop or narrowOop)
1295//   start_p   - starting pointer for region to iterate over
1296//   count     - number of oops or narrowOops to iterate over
1297//   do_oop    - action to perform on each oop (it's arbitrary C code which
1298//               makes it more efficient to put in a macro rather than making
1299//               it a template function)
1300//   assert_fn - assert function which is template function because performance
1301//               doesn't matter when enabled.
1302#define InstanceKlass_SPECIALIZED_OOP_ITERATE( \
1303  T, start_p, count, do_oop,                \
1304  assert_fn)                                \
1305{                                           \
1306  T* p         = (T*)(start_p);             \
1307  T* const end = p + (count);               \
1308  while (p < end) {                         \
1309    (assert_fn)(p);                         \
1310    do_oop;                                 \
1311    ++p;                                    \
1312  }                                         \
1313}
1314
1315#define InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE( \
1316  T, start_p, count, do_oop,                \
1317  assert_fn)                                \
1318{                                           \
1319  T* const start = (T*)(start_p);           \
1320  T*       p     = start + (count);         \
1321  while (start < p) {                       \
1322    --p;                                    \
1323    (assert_fn)(p);                         \
1324    do_oop;                                 \
1325  }                                         \
1326}
1327
1328#define InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
1329  T, start_p, count, low, high,             \
1330  do_oop, assert_fn)                        \
1331{                                           \
1332  T* const l = (T*)(low);                   \
1333  T* const h = (T*)(high);                  \
1334  assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
1335         mask_bits((intptr_t)h, sizeof(T)-1) == 0,   \
1336         "bounded region must be properly aligned"); \
1337  T* p       = (T*)(start_p);               \
1338  T* end     = p + (count);                 \
1339  if (p < l) p = l;                         \
1340  if (end > h) end = h;                     \
1341  while (p < end) {                         \
1342    (assert_fn)(p);                         \
1343    do_oop;                                 \
1344    ++p;                                    \
1345  }                                         \
1346}
1347
1348
1349// The following macros call specialized macros, passing either oop or
1350// narrowOop as the specialization type.  These test the UseCompressedOops
1351// flag.
1352#define InstanceKlass_OOP_ITERATE(start_p, count,    \
1353                                  do_oop, assert_fn) \
1354{                                                    \
1355  if (UseCompressedOops) {                           \
1356    InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
1357      start_p, count,                                \
1358      do_oop, assert_fn)                             \
1359  } else {                                           \
1360    InstanceKlass_SPECIALIZED_OOP_ITERATE(oop,       \
1361      start_p, count,                                \
1362      do_oop, assert_fn)                             \
1363  }                                                  \
1364}
1365
1366#define InstanceKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high,    \
1367                                          do_oop, assert_fn) \
1368{                                                            \
1369  if (UseCompressedOops) {                                   \
1370    InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \
1371      start_p, count,                                        \
1372      low, high,                                             \
1373      do_oop, assert_fn)                                     \
1374  } else {                                                   \
1375    InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,       \
1376      start_p, count,                                        \
1377      low, high,                                             \
1378      do_oop, assert_fn)                                     \
1379  }                                                          \
1380}
1381
1382#define InstanceKlass_OOP_MAP_ITERATE(obj, do_oop, assert_fn)            \
1383{                                                                        \
1384  /* Compute oopmap block range. The common case                         \
1385     is nonstatic_oop_map_size == 1. */                                  \
1386  OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
1387  OopMapBlock* const end_map = map + nonstatic_oop_map_size();           \
1388  if (UseCompressedOops) {                                               \
1389    while (map < end_map) {                                              \
1390      InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop,                   \
1391        obj->obj_field_addr<narrowOop>(map->offset()), map->length(),    \
1392        do_oop, assert_fn)                                               \
1393      ++map;                                                             \
1394    }                                                                    \
1395  } else {                                                               \
1396    while (map < end_map) {                                              \
1397      InstanceKlass_SPECIALIZED_OOP_ITERATE(oop,                         \
1398        obj->obj_field_addr<oop>(map->offset()), map->length(),          \
1399        do_oop, assert_fn)                                               \
1400      ++map;                                                             \
1401    }                                                                    \
1402  }                                                                      \
1403}
1404
1405#define InstanceKlass_OOP_MAP_REVERSE_ITERATE(obj, do_oop, assert_fn)    \
1406{                                                                        \
1407  OopMapBlock* const start_map = start_of_nonstatic_oop_maps();          \
1408  OopMapBlock* map             = start_map + nonstatic_oop_map_size();   \
1409  if (UseCompressedOops) {                                               \
1410    while (start_map < map) {                                            \
1411      --map;                                                             \
1412      InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(narrowOop,           \
1413        obj->obj_field_addr<narrowOop>(map->offset()), map->length(),    \
1414        do_oop, assert_fn)                                               \
1415    }                                                                    \
1416  } else {                                                               \
1417    while (start_map < map) {                                            \
1418      --map;                                                             \
1419      InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(oop,                 \
1420        obj->obj_field_addr<oop>(map->offset()), map->length(),          \
1421        do_oop, assert_fn)                                               \
1422    }                                                                    \
1423  }                                                                      \
1424}
1425
1426#define InstanceKlass_BOUNDED_OOP_MAP_ITERATE(obj, low, high, do_oop,    \
1427                                              assert_fn)                 \
1428{                                                                        \
1429  /* Compute oopmap block range. The common case is                      \
1430     nonstatic_oop_map_size == 1, so we accept the                       \
1431     usually non-existent extra overhead of examining                    \
1432     all the maps. */                                                    \
1433  OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
1434  OopMapBlock* const end_map = map + nonstatic_oop_map_size();           \
1435  if (UseCompressedOops) {                                               \
1436    while (map < end_map) {                                              \
1437      InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,           \
1438        obj->obj_field_addr<narrowOop>(map->offset()), map->length(),    \
1439        low, high,                                                       \
1440        do_oop, assert_fn)                                               \
1441      ++map;                                                             \
1442    }                                                                    \
1443  } else {                                                               \
1444    while (map < end_map) {                                              \
1445      InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,                 \
1446        obj->obj_field_addr<oop>(map->offset()), map->length(),          \
1447        low, high,                                                       \
1448        do_oop, assert_fn)                                               \
1449      ++map;                                                             \
1450    }                                                                    \
1451  }                                                                      \
1452}
1453
1454void instanceKlass::follow_static_fields() {
1455  InstanceKlass_OOP_ITERATE( \
1456    start_of_static_fields(), static_oop_field_size(), \
1457    MarkSweep::mark_and_push(p), \
1458    assert_is_in_closed_subset)
1459}
1460
1461#ifndef SERIALGC
1462void instanceKlass::follow_static_fields(ParCompactionManager* cm) {
1463  InstanceKlass_OOP_ITERATE( \
1464    start_of_static_fields(), static_oop_field_size(), \
1465    PSParallelCompact::mark_and_push(cm, p), \
1466    assert_is_in)
1467}
1468#endif // SERIALGC
1469
1470void instanceKlass::adjust_static_fields() {
1471  InstanceKlass_OOP_ITERATE( \
1472    start_of_static_fields(), static_oop_field_size(), \
1473    MarkSweep::adjust_pointer(p), \
1474    assert_nothing)
1475}
1476
1477#ifndef SERIALGC
1478void instanceKlass::update_static_fields() {
1479  InstanceKlass_OOP_ITERATE( \
1480    start_of_static_fields(), static_oop_field_size(), \
1481    PSParallelCompact::adjust_pointer(p), \
1482    assert_nothing)
1483}
1484
1485void instanceKlass::update_static_fields(HeapWord* beg_addr, HeapWord* end_addr) {
1486  InstanceKlass_BOUNDED_OOP_ITERATE( \
1487    start_of_static_fields(), static_oop_field_size(), \
1488    beg_addr, end_addr, \
1489    PSParallelCompact::adjust_pointer(p), \
1490    assert_nothing )
1491}
1492#endif // SERIALGC
1493
1494void instanceKlass::oop_follow_contents(oop obj) {
1495  assert(obj != NULL, "can't follow the content of NULL object");
1496  obj->follow_header();
1497  InstanceKlass_OOP_MAP_ITERATE( \
1498    obj, \
1499    MarkSweep::mark_and_push(p), \
1500    assert_is_in_closed_subset)
1501}
1502
1503#ifndef SERIALGC
1504void instanceKlass::oop_follow_contents(ParCompactionManager* cm,
1505                                        oop obj) {
1506  assert(obj != NULL, "can't follow the content of NULL object");
1507  obj->follow_header(cm);
1508  InstanceKlass_OOP_MAP_ITERATE( \
1509    obj, \
1510    PSParallelCompact::mark_and_push(cm, p), \
1511    assert_is_in)
1512}
1513#endif // SERIALGC
1514
1515// closure's do_header() method dicates whether the given closure should be
1516// applied to the klass ptr in the object header.
1517
1518#define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
1519                                                                             \
1520int instanceKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
1521  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
1522  /* header */                                                          \
1523  if (closure->do_header()) {                                           \
1524    obj->oop_iterate_header(closure);                                   \
1525  }                                                                     \
1526  InstanceKlass_OOP_MAP_ITERATE(                                        \
1527    obj,                                                                \
1528    SpecializationStats::                                               \
1529      record_do_oop_call##nv_suffix(SpecializationStats::ik);           \
1530    (closure)->do_oop##nv_suffix(p),                                    \
1531    assert_is_in_closed_subset)                                         \
1532  return size_helper();                                                 \
1533}
1534
1535#ifndef SERIALGC
1536#define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
1537                                                                                \
1538int instanceKlass::oop_oop_iterate_backwards##nv_suffix(oop obj,                \
1539                                              OopClosureType* closure) {        \
1540  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik); \
1541  /* header */                                                                  \
1542  if (closure->do_header()) {                                                   \
1543    obj->oop_iterate_header(closure);                                           \
1544  }                                                                             \
1545  /* instance variables */                                                      \
1546  InstanceKlass_OOP_MAP_REVERSE_ITERATE(                                        \
1547    obj,                                                                        \
1548    SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::ik);\
1549    (closure)->do_oop##nv_suffix(p),                                            \
1550    assert_is_in_closed_subset)                                                 \
1551   return size_helper();                                                        \
1552}
1553#endif // !SERIALGC
1554
1555#define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
1556                                                                        \
1557int instanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj,              \
1558                                                  OopClosureType* closure, \
1559                                                  MemRegion mr) {          \
1560  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
1561  if (closure->do_header()) {                                            \
1562    obj->oop_iterate_header(closure, mr);                                \
1563  }                                                                      \
1564  InstanceKlass_BOUNDED_OOP_MAP_ITERATE(                                 \
1565    obj, mr.start(), mr.end(),                                           \
1566    (closure)->do_oop##nv_suffix(p),                                     \
1567    assert_is_in_closed_subset)                                          \
1568  return size_helper();                                                  \
1569}
1570
1571ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN)
1572ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN)
1573ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
1574ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
1575#ifndef SERIALGC
1576ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
1577ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
1578#endif // !SERIALGC
1579
1580void instanceKlass::iterate_static_fields(OopClosure* closure) {
1581    InstanceKlass_OOP_ITERATE( \
1582      start_of_static_fields(), static_oop_field_size(), \
1583      closure->do_oop(p), \
1584      assert_is_in_reserved)
1585}
1586
1587void instanceKlass::iterate_static_fields(OopClosure* closure,
1588                                          MemRegion mr) {
1589  InstanceKlass_BOUNDED_OOP_ITERATE( \
1590    start_of_static_fields(), static_oop_field_size(), \
1591    mr.start(), mr.end(), \
1592    (closure)->do_oop_v(p), \
1593    assert_is_in_closed_subset)
1594}
1595
1596int instanceKlass::oop_adjust_pointers(oop obj) {
1597  int size = size_helper();
1598  InstanceKlass_OOP_MAP_ITERATE( \
1599    obj, \
1600    MarkSweep::adjust_pointer(p), \
1601    assert_is_in)
1602  obj->adjust_header();
1603  return size;
1604}
1605
1606#ifndef SERIALGC
1607void instanceKlass::oop_copy_contents(PSPromotionManager* pm, oop obj) {
1608  assert(!pm->depth_first(), "invariant");
1609  InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
1610    obj, \
1611    if (PSScavenge::should_scavenge(p)) { \
1612      pm->claim_or_forward_breadth(p); \
1613    }, \
1614    assert_nothing )
1615}
1616
1617void instanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
1618  assert(pm->depth_first(), "invariant");
1619  InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
1620    obj, \
1621    if (PSScavenge::should_scavenge(p)) { \
1622      pm->claim_or_forward_depth(p); \
1623    }, \
1624    assert_nothing )
1625}
1626
1627int instanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
1628  InstanceKlass_OOP_MAP_ITERATE( \
1629    obj, \
1630    PSParallelCompact::adjust_pointer(p), \
1631    assert_nothing)
1632  return size_helper();
1633}
1634
1635int instanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
1636                                       HeapWord* beg_addr, HeapWord* end_addr) {
1637  InstanceKlass_BOUNDED_OOP_MAP_ITERATE( \
1638    obj, beg_addr, end_addr, \
1639    PSParallelCompact::adjust_pointer(p), \
1640    assert_nothing)
1641  return size_helper();
1642}
1643
1644void instanceKlass::copy_static_fields(PSPromotionManager* pm) {
1645  assert(!pm->depth_first(), "invariant");
1646  InstanceKlass_OOP_ITERATE( \
1647    start_of_static_fields(), static_oop_field_size(), \
1648    if (PSScavenge::should_scavenge(p)) { \
1649      pm->claim_or_forward_breadth(p); \
1650    }, \
1651    assert_nothing )
1652}
1653
1654void instanceKlass::push_static_fields(PSPromotionManager* pm) {
1655  assert(pm->depth_first(), "invariant");
1656  InstanceKlass_OOP_ITERATE( \
1657    start_of_static_fields(), static_oop_field_size(), \
1658    if (PSScavenge::should_scavenge(p)) { \
1659      pm->claim_or_forward_depth(p); \
1660    }, \
1661    assert_nothing )
1662}
1663
1664void instanceKlass::copy_static_fields(ParCompactionManager* cm) {
1665  InstanceKlass_OOP_ITERATE( \
1666    start_of_static_fields(), static_oop_field_size(), \
1667    PSParallelCompact::adjust_pointer(p), \
1668    assert_is_in)
1669}
1670#endif // SERIALGC
1671
1672// This klass is alive but the implementor link is not followed/updated.
1673// Subklass and sibling links are handled by Klass::follow_weak_klass_links
1674
1675void instanceKlass::follow_weak_klass_links(
1676  BoolObjectClosure* is_alive, OopClosure* keep_alive) {
1677  assert(is_alive->do_object_b(as_klassOop()), "this oop should be live");
1678  if (ClassUnloading) {
1679    for (int i = 0; i < implementors_limit; i++) {
1680      klassOop impl = _implementors[i];
1681      if (impl == NULL)  break;  // no more in the list
1682      if (!is_alive->do_object_b(impl)) {
1683        // remove this guy from the list by overwriting him with the tail
1684        int lasti = --_nof_implementors;
1685        assert(lasti >= i && lasti < implementors_limit, "just checking");
1686        _implementors[i] = _implementors[lasti];
1687        _implementors[lasti] = NULL;
1688        --i; // rerun the loop at this index
1689      }
1690    }
1691  } else {
1692    for (int i = 0; i < implementors_limit; i++) {
1693      keep_alive->do_oop(&adr_implementors()[i]);
1694    }
1695  }
1696  Klass::follow_weak_klass_links(is_alive, keep_alive);
1697}
1698
1699void instanceKlass::remove_unshareable_info() {
1700  Klass::remove_unshareable_info();
1701  init_implementor();
1702}
1703
1704static void clear_all_breakpoints(methodOop m) {
1705  m->clear_all_breakpoints();
1706}
1707
1708void instanceKlass::release_C_heap_structures() {
1709  // Deallocate oop map cache
1710  if (_oop_map_cache != NULL) {
1711    delete _oop_map_cache;
1712    _oop_map_cache = NULL;
1713  }
1714
1715  // Deallocate JNI identifiers for jfieldIDs
1716  JNIid::deallocate(jni_ids());
1717  set_jni_ids(NULL);
1718
1719  jmethodID* jmeths = methods_jmethod_ids_acquire();
1720  if (jmeths != (jmethodID*)NULL) {
1721    release_set_methods_jmethod_ids(NULL);
1722    FreeHeap(jmeths);
1723  }
1724
1725  int* indices = methods_cached_itable_indices_acquire();
1726  if (indices != (int*)NULL) {
1727    release_set_methods_cached_itable_indices(NULL);
1728    FreeHeap(indices);
1729  }
1730
1731  // release dependencies
1732  nmethodBucket* b = _dependencies;
1733  _dependencies = NULL;
1734  while (b != NULL) {
1735    nmethodBucket* next = b->next();
1736    delete b;
1737    b = next;
1738  }
1739
1740  // Deallocate breakpoint records
1741  if (breakpoints() != 0x0) {
1742    methods_do(clear_all_breakpoints);
1743    assert(breakpoints() == 0x0, "should have cleared breakpoints");
1744  }
1745
1746  // deallocate information about previous versions
1747  if (_previous_versions != NULL) {
1748    for (int i = _previous_versions->length() - 1; i >= 0; i--) {
1749      PreviousVersionNode * pv_node = _previous_versions->at(i);
1750      delete pv_node;
1751    }
1752    delete _previous_versions;
1753    _previous_versions = NULL;
1754  }
1755
1756  // deallocate the cached class file
1757  if (_cached_class_file_bytes != NULL) {
1758    os::free(_cached_class_file_bytes);
1759    _cached_class_file_bytes = NULL;
1760    _cached_class_file_len = 0;
1761  }
1762}
1763
1764char* instanceKlass::signature_name() const {
1765  const char* src = (const char*) (name()->as_C_string());
1766  const int src_length = (int)strlen(src);
1767  char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
1768  int src_index = 0;
1769  int dest_index = 0;
1770  dest[dest_index++] = 'L';
1771  while (src_index < src_length) {
1772    dest[dest_index++] = src[src_index++];
1773  }
1774  dest[dest_index++] = ';';
1775  dest[dest_index] = '\0';
1776  return dest;
1777}
1778
1779// different verisons of is_same_class_package
1780bool instanceKlass::is_same_class_package(klassOop class2) {
1781  klassOop class1 = as_klassOop();
1782  oop classloader1 = instanceKlass::cast(class1)->class_loader();
1783  symbolOop classname1 = Klass::cast(class1)->name();
1784
1785  if (Klass::cast(class2)->oop_is_objArray()) {
1786    class2 = objArrayKlass::cast(class2)->bottom_klass();
1787  }
1788  oop classloader2;
1789  if (Klass::cast(class2)->oop_is_instance()) {
1790    classloader2 = instanceKlass::cast(class2)->class_loader();
1791  } else {
1792    assert(Klass::cast(class2)->oop_is_typeArray(), "should be type array");
1793    classloader2 = NULL;
1794  }
1795  symbolOop classname2 = Klass::cast(class2)->name();
1796
1797  return instanceKlass::is_same_class_package(classloader1, classname1,
1798                                              classloader2, classname2);
1799}
1800
1801bool instanceKlass::is_same_class_package(oop classloader2, symbolOop classname2) {
1802  klassOop class1 = as_klassOop();
1803  oop classloader1 = instanceKlass::cast(class1)->class_loader();
1804  symbolOop classname1 = Klass::cast(class1)->name();
1805
1806  return instanceKlass::is_same_class_package(classloader1, classname1,
1807                                              classloader2, classname2);
1808}
1809
1810// return true if two classes are in the same package, classloader
1811// and classname information is enough to determine a class's package
1812bool instanceKlass::is_same_class_package(oop class_loader1, symbolOop class_name1,
1813                                          oop class_loader2, symbolOop class_name2) {
1814  if (class_loader1 != class_loader2) {
1815    return false;
1816  } else {
1817    ResourceMark rm;
1818
1819    // The symbolOop's are in UTF8 encoding. Since we only need to check explicitly
1820    // for ASCII characters ('/', 'L', '['), we can keep them in UTF8 encoding.
1821    // Otherwise, we just compare jbyte values between the strings.
1822    jbyte *name1 = class_name1->base();
1823    jbyte *name2 = class_name2->base();
1824
1825    jbyte *last_slash1 = UTF8::strrchr(name1, class_name1->utf8_length(), '/');
1826    jbyte *last_slash2 = UTF8::strrchr(name2, class_name2->utf8_length(), '/');
1827
1828    if ((last_slash1 == NULL) || (last_slash2 == NULL)) {
1829      // One of the two doesn't have a package.  Only return true
1830      // if the other one also doesn't have a package.
1831      return last_slash1 == last_slash2;
1832    } else {
1833      // Skip over '['s
1834      if (*name1 == '[') {
1835        do {
1836          name1++;
1837        } while (*name1 == '[');
1838        if (*name1 != 'L') {
1839          // Something is terribly wrong.  Shouldn't be here.
1840          return false;
1841        }
1842      }
1843      if (*name2 == '[') {
1844        do {
1845          name2++;
1846        } while (*name2 == '[');
1847        if (*name2 != 'L') {
1848          // Something is terribly wrong.  Shouldn't be here.
1849          return false;
1850        }
1851      }
1852
1853      // Check that package part is identical
1854      int length1 = last_slash1 - name1;
1855      int length2 = last_slash2 - name2;
1856
1857      return UTF8::equal(name1, length1, name2, length2);
1858    }
1859  }
1860}
1861
1862
1863jint instanceKlass::compute_modifier_flags(TRAPS) const {
1864  klassOop k = as_klassOop();
1865  jint access = access_flags().as_int();
1866
1867  // But check if it happens to be member class.
1868  typeArrayOop inner_class_list = inner_classes();
1869  int length = (inner_class_list == NULL) ? 0 : inner_class_list->length();
1870  assert (length % instanceKlass::inner_class_next_offset == 0, "just checking");
1871  if (length > 0) {
1872    typeArrayHandle inner_class_list_h(THREAD, inner_class_list);
1873    instanceKlassHandle ik(THREAD, k);
1874    for (int i = 0; i < length; i += instanceKlass::inner_class_next_offset) {
1875      int ioff = inner_class_list_h->ushort_at(
1876                      i + instanceKlass::inner_class_inner_class_info_offset);
1877
1878      // Inner class attribute can be zero, skip it.
1879      // Strange but true:  JVM spec. allows null inner class refs.
1880      if (ioff == 0) continue;
1881
1882      // only look at classes that are already loaded
1883      // since we are looking for the flags for our self.
1884      symbolOop inner_name = ik->constants()->klass_name_at(ioff);
1885      if ((ik->name() == inner_name)) {
1886        // This is really a member class.
1887        access = inner_class_list_h->ushort_at(i + instanceKlass::inner_class_access_flags_offset);
1888        break;
1889      }
1890    }
1891  }
1892  // Remember to strip ACC_SUPER bit
1893  return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
1894}
1895
1896jint instanceKlass::jvmti_class_status() const {
1897  jint result = 0;
1898
1899  if (is_linked()) {
1900    result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
1901  }
1902
1903  if (is_initialized()) {
1904    assert(is_linked(), "Class status is not consistent");
1905    result |= JVMTI_CLASS_STATUS_INITIALIZED;
1906  }
1907  if (is_in_error_state()) {
1908    result |= JVMTI_CLASS_STATUS_ERROR;
1909  }
1910  return result;
1911}
1912
1913methodOop instanceKlass::method_at_itable(klassOop holder, int index, TRAPS) {
1914  itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
1915  int method_table_offset_in_words = ioe->offset()/wordSize;
1916  int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
1917                       / itableOffsetEntry::size();
1918
1919  for (int cnt = 0 ; ; cnt ++, ioe ++) {
1920    // If the interface isn't implemented by the receiver class,
1921    // the VM should throw IncompatibleClassChangeError.
1922    if (cnt >= nof_interfaces) {
1923      THROW_OOP_0(vmSymbols::java_lang_IncompatibleClassChangeError());
1924    }
1925
1926    klassOop ik = ioe->interface_klass();
1927    if (ik == holder) break;
1928  }
1929
1930  itableMethodEntry* ime = ioe->first_method_entry(as_klassOop());
1931  methodOop m = ime[index].method();
1932  if (m == NULL) {
1933    THROW_OOP_0(vmSymbols::java_lang_AbstractMethodError());
1934  }
1935  return m;
1936}
1937
1938// On-stack replacement stuff
1939void instanceKlass::add_osr_nmethod(nmethod* n) {
1940  // only one compilation can be active
1941  NEEDS_CLEANUP
1942  // This is a short non-blocking critical region, so the no safepoint check is ok.
1943  OsrList_lock->lock_without_safepoint_check();
1944  assert(n->is_osr_method(), "wrong kind of nmethod");
1945  n->set_link(osr_nmethods_head());
1946  set_osr_nmethods_head(n);
1947  // Remember to unlock again
1948  OsrList_lock->unlock();
1949}
1950
1951
1952void instanceKlass::remove_osr_nmethod(nmethod* n) {
1953  // This is a short non-blocking critical region, so the no safepoint check is ok.
1954  OsrList_lock->lock_without_safepoint_check();
1955  assert(n->is_osr_method(), "wrong kind of nmethod");
1956  nmethod* last = NULL;
1957  nmethod* cur  = osr_nmethods_head();
1958  // Search for match
1959  while(cur != NULL && cur != n) {
1960    last = cur;
1961    cur = cur->link();
1962  }
1963  if (cur == n) {
1964    if (last == NULL) {
1965      // Remove first element
1966      set_osr_nmethods_head(osr_nmethods_head()->link());
1967    } else {
1968      last->set_link(cur->link());
1969    }
1970  }
1971  n->set_link(NULL);
1972  // Remember to unlock again
1973  OsrList_lock->unlock();
1974}
1975
1976nmethod* instanceKlass::lookup_osr_nmethod(const methodOop m, int bci) const {
1977  // This is a short non-blocking critical region, so the no safepoint check is ok.
1978  OsrList_lock->lock_without_safepoint_check();
1979  nmethod* osr = osr_nmethods_head();
1980  while (osr != NULL) {
1981    assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
1982    if (osr->method() == m &&
1983        (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
1984      // Found a match - return it.
1985      OsrList_lock->unlock();
1986      return osr;
1987    }
1988    osr = osr->link();
1989  }
1990  OsrList_lock->unlock();
1991  return NULL;
1992}
1993
1994// -----------------------------------------------------------------------------------------------------
1995#ifndef PRODUCT
1996
1997// Printing
1998
1999void FieldPrinter::do_field(fieldDescriptor* fd) {
2000   if (fd->is_static() == (_obj == NULL)) {
2001     _st->print("   - ");
2002     fd->print_on(_st);
2003     _st->cr();
2004   } else {
2005     fd->print_on_for(_st, _obj);
2006     _st->cr();
2007   }
2008}
2009
2010
2011void instanceKlass::oop_print_on(oop obj, outputStream* st) {
2012  Klass::oop_print_on(obj, st);
2013
2014  if (as_klassOop() == SystemDictionary::string_klass()) {
2015    typeArrayOop value  = java_lang_String::value(obj);
2016    juint        offset = java_lang_String::offset(obj);
2017    juint        length = java_lang_String::length(obj);
2018    if (value != NULL &&
2019        value->is_typeArray() &&
2020        offset          <= (juint) value->length() &&
2021        offset + length <= (juint) value->length()) {
2022      st->print("string: ");
2023      Handle h_obj(obj);
2024      java_lang_String::print(h_obj, st);
2025      st->cr();
2026      if (!WizardMode)  return;  // that is enough
2027    }
2028  }
2029
2030  st->print_cr("fields:");
2031  FieldPrinter print_nonstatic_field(st, obj);
2032  do_nonstatic_fields(&print_nonstatic_field);
2033
2034  if (as_klassOop() == SystemDictionary::class_klass()) {
2035    klassOop mirrored_klass = java_lang_Class::as_klassOop(obj);
2036    st->print("   - fake entry for mirror: ");
2037    mirrored_klass->print_value_on(st);
2038    st->cr();
2039    st->print("   - fake entry resolved_constructor: ");
2040    methodOop ctor = java_lang_Class::resolved_constructor(obj);
2041    ctor->print_value_on(st);
2042    klassOop array_klass = java_lang_Class::array_klass(obj);
2043    st->print("   - fake entry for array: ");
2044    array_klass->print_value_on(st);
2045    st->cr();
2046    st->cr();
2047  }
2048}
2049
2050void instanceKlass::oop_print_value_on(oop obj, outputStream* st) {
2051  st->print("a ");
2052  name()->print_value_on(st);
2053  obj->print_address_on(st);
2054}
2055
2056#endif // ndef PRODUCT
2057
2058const char* instanceKlass::internal_name() const {
2059  return external_name();
2060}
2061
2062// Verification
2063
2064class VerifyFieldClosure: public OopClosure {
2065 protected:
2066  template <class T> void do_oop_work(T* p) {
2067    guarantee(Universe::heap()->is_in_closed_subset(p), "should be in heap");
2068    oop obj = oopDesc::load_decode_heap_oop(p);
2069    if (!obj->is_oop_or_null()) {
2070      tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p, (address)obj);
2071      Universe::print();
2072      guarantee(false, "boom");
2073    }
2074  }
2075 public:
2076  virtual void do_oop(oop* p)       { VerifyFieldClosure::do_oop_work(p); }
2077  virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
2078};
2079
2080void instanceKlass::oop_verify_on(oop obj, outputStream* st) {
2081  Klass::oop_verify_on(obj, st);
2082  VerifyFieldClosure blk;
2083  oop_oop_iterate(obj, &blk);
2084}
2085
2086#ifndef PRODUCT
2087
2088void instanceKlass::verify_class_klass_nonstatic_oop_maps(klassOop k) {
2089  // This verification code is disabled.  JDK_Version::is_gte_jdk14x_version()
2090  // cannot be called since this function is called before the VM is
2091  // able to determine what JDK version is running with.
2092  // The check below always is false since 1.4.
2093  return;
2094
2095  // This verification code temporarily disabled for the 1.4
2096  // reflection implementation since java.lang.Class now has
2097  // Java-level instance fields. Should rewrite this to handle this
2098  // case.
2099  if (!(JDK_Version::is_gte_jdk14x_version() && UseNewReflection)) {
2100    // Verify that java.lang.Class instances have a fake oop field added.
2101    instanceKlass* ik = instanceKlass::cast(k);
2102
2103    // Check that we have the right class
2104    static bool first_time = true;
2105    guarantee(k == SystemDictionary::class_klass() && first_time, "Invalid verify of maps");
2106    first_time = false;
2107    const int extra = java_lang_Class::number_of_fake_oop_fields;
2108    guarantee(ik->nonstatic_field_size() == extra, "just checking");
2109    guarantee(ik->nonstatic_oop_map_size() == 1, "just checking");
2110    guarantee(ik->size_helper() == align_object_size(instanceOopDesc::header_size() + extra), "just checking");
2111
2112    // Check that the map is (2,extra)
2113    int offset = java_lang_Class::klass_offset;
2114
2115    OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
2116    guarantee(map->offset() == offset && map->length() == extra, "just checking");
2117  }
2118}
2119
2120#endif // ndef PRODUCT
2121
2122// JNIid class for jfieldIDs only
2123// Note to reviewers:
2124// These JNI functions are just moved over to column 1 and not changed
2125// in the compressed oops workspace.
2126JNIid::JNIid(klassOop holder, int offset, JNIid* next) {
2127  _holder = holder;
2128  _offset = offset;
2129  _next = next;
2130  debug_only(_is_static_field_id = false;)
2131}
2132
2133
2134JNIid* JNIid::find(int offset) {
2135  JNIid* current = this;
2136  while (current != NULL) {
2137    if (current->offset() == offset) return current;
2138    current = current->next();
2139  }
2140  return NULL;
2141}
2142
2143void JNIid::oops_do(OopClosure* f) {
2144  for (JNIid* cur = this; cur != NULL; cur = cur->next()) {
2145    f->do_oop(cur->holder_addr());
2146  }
2147}
2148
2149void JNIid::deallocate(JNIid* current) {
2150  while (current != NULL) {
2151    JNIid* next = current->next();
2152    delete current;
2153    current = next;
2154  }
2155}
2156
2157
2158void JNIid::verify(klassOop holder) {
2159  int first_field_offset  = instanceKlass::cast(holder)->offset_of_static_fields();
2160  int end_field_offset;
2161  end_field_offset = first_field_offset + (instanceKlass::cast(holder)->static_field_size() * wordSize);
2162
2163  JNIid* current = this;
2164  while (current != NULL) {
2165    guarantee(current->holder() == holder, "Invalid klass in JNIid");
2166#ifdef ASSERT
2167    int o = current->offset();
2168    if (current->is_static_field_id()) {
2169      guarantee(o >= first_field_offset  && o < end_field_offset,  "Invalid static field offset in JNIid");
2170    }
2171#endif
2172    current = current->next();
2173  }
2174}
2175
2176
2177#ifdef ASSERT
2178void instanceKlass::set_init_state(ClassState state) {
2179  bool good_state = as_klassOop()->is_shared() ? (_init_state <= state)
2180                                               : (_init_state < state);
2181  assert(good_state || state == allocated, "illegal state transition");
2182  _init_state = state;
2183}
2184#endif
2185
2186
2187// RedefineClasses() support for previous versions:
2188
2189// Add an information node that contains weak references to the
2190// interesting parts of the previous version of the_class.
2191void instanceKlass::add_previous_version(instanceKlassHandle ikh,
2192       BitMap* emcp_methods, int emcp_method_count) {
2193  assert(Thread::current()->is_VM_thread(),
2194         "only VMThread can add previous versions");
2195
2196  if (_previous_versions == NULL) {
2197    // This is the first previous version so make some space.
2198    // Start with 2 elements under the assumption that the class
2199    // won't be redefined much.
2200    _previous_versions =  new (ResourceObj::C_HEAP)
2201                            GrowableArray<PreviousVersionNode *>(2, true);
2202  }
2203
2204  // RC_TRACE macro has an embedded ResourceMark
2205  RC_TRACE(0x00000100, ("adding previous version ref for %s @%d, EMCP_cnt=%d",
2206    ikh->external_name(), _previous_versions->length(), emcp_method_count));
2207  constantPoolHandle cp_h(ikh->constants());
2208  jobject cp_ref;
2209  if (cp_h->is_shared()) {
2210    // a shared ConstantPool requires a regular reference; a weak
2211    // reference would be collectible
2212    cp_ref = JNIHandles::make_global(cp_h);
2213  } else {
2214    cp_ref = JNIHandles::make_weak_global(cp_h);
2215  }
2216  PreviousVersionNode * pv_node = NULL;
2217  objArrayOop old_methods = ikh->methods();
2218
2219  if (emcp_method_count == 0) {
2220    // non-shared ConstantPool gets a weak reference
2221    pv_node = new PreviousVersionNode(cp_ref, !cp_h->is_shared(), NULL);
2222    RC_TRACE(0x00000400,
2223      ("add: all methods are obsolete; flushing any EMCP weak refs"));
2224  } else {
2225    int local_count = 0;
2226    GrowableArray<jweak>* method_refs = new (ResourceObj::C_HEAP)
2227      GrowableArray<jweak>(emcp_method_count, true);
2228    for (int i = 0; i < old_methods->length(); i++) {
2229      if (emcp_methods->at(i)) {
2230        // this old method is EMCP so save a weak ref
2231        methodOop old_method = (methodOop) old_methods->obj_at(i);
2232        methodHandle old_method_h(old_method);
2233        jweak method_ref = JNIHandles::make_weak_global(old_method_h);
2234        method_refs->append(method_ref);
2235        if (++local_count >= emcp_method_count) {
2236          // no more EMCP methods so bail out now
2237          break;
2238        }
2239      }
2240    }
2241    // non-shared ConstantPool gets a weak reference
2242    pv_node = new PreviousVersionNode(cp_ref, !cp_h->is_shared(), method_refs);
2243  }
2244
2245  _previous_versions->append(pv_node);
2246
2247  // Using weak references allows the interesting parts of previous
2248  // classes to be GC'ed when they are no longer needed. Since the
2249  // caller is the VMThread and we are at a safepoint, this is a good
2250  // time to clear out unused weak references.
2251
2252  RC_TRACE(0x00000400, ("add: previous version length=%d",
2253    _previous_versions->length()));
2254
2255  // skip the last entry since we just added it
2256  for (int i = _previous_versions->length() - 2; i >= 0; i--) {
2257    // check the previous versions array for a GC'ed weak refs
2258    pv_node = _previous_versions->at(i);
2259    cp_ref = pv_node->prev_constant_pool();
2260    assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
2261    if (cp_ref == NULL) {
2262      delete pv_node;
2263      _previous_versions->remove_at(i);
2264      // Since we are traversing the array backwards, we don't have to
2265      // do anything special with the index.
2266      continue;  // robustness
2267    }
2268
2269    constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2270    if (cp == NULL) {
2271      // this entry has been GC'ed so remove it
2272      delete pv_node;
2273      _previous_versions->remove_at(i);
2274      // Since we are traversing the array backwards, we don't have to
2275      // do anything special with the index.
2276      continue;
2277    } else {
2278      RC_TRACE(0x00000400, ("add: previous version @%d is alive", i));
2279    }
2280
2281    GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
2282    if (method_refs != NULL) {
2283      RC_TRACE(0x00000400, ("add: previous methods length=%d",
2284        method_refs->length()));
2285      for (int j = method_refs->length() - 1; j >= 0; j--) {
2286        jweak method_ref = method_refs->at(j);
2287        assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
2288        if (method_ref == NULL) {
2289          method_refs->remove_at(j);
2290          // Since we are traversing the array backwards, we don't have to
2291          // do anything special with the index.
2292          continue;  // robustness
2293        }
2294
2295        methodOop method = (methodOop)JNIHandles::resolve(method_ref);
2296        if (method == NULL || emcp_method_count == 0) {
2297          // This method entry has been GC'ed or the current
2298          // RedefineClasses() call has made all methods obsolete
2299          // so remove it.
2300          JNIHandles::destroy_weak_global(method_ref);
2301          method_refs->remove_at(j);
2302        } else {
2303          // RC_TRACE macro has an embedded ResourceMark
2304          RC_TRACE(0x00000400,
2305            ("add: %s(%s): previous method @%d in version @%d is alive",
2306            method->name()->as_C_string(), method->signature()->as_C_string(),
2307            j, i));
2308        }
2309      }
2310    }
2311  }
2312
2313  int obsolete_method_count = old_methods->length() - emcp_method_count;
2314
2315  if (emcp_method_count != 0 && obsolete_method_count != 0 &&
2316      _previous_versions->length() > 1) {
2317    // We have a mix of obsolete and EMCP methods. If there is more
2318    // than the previous version that we just added, then we have to
2319    // clear out any matching EMCP method entries the hard way.
2320    int local_count = 0;
2321    for (int i = 0; i < old_methods->length(); i++) {
2322      if (!emcp_methods->at(i)) {
2323        // only obsolete methods are interesting
2324        methodOop old_method = (methodOop) old_methods->obj_at(i);
2325        symbolOop m_name = old_method->name();
2326        symbolOop m_signature = old_method->signature();
2327
2328        // skip the last entry since we just added it
2329        for (int j = _previous_versions->length() - 2; j >= 0; j--) {
2330          // check the previous versions array for a GC'ed weak refs
2331          pv_node = _previous_versions->at(j);
2332          cp_ref = pv_node->prev_constant_pool();
2333          assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
2334          if (cp_ref == NULL) {
2335            delete pv_node;
2336            _previous_versions->remove_at(j);
2337            // Since we are traversing the array backwards, we don't have to
2338            // do anything special with the index.
2339            continue;  // robustness
2340          }
2341
2342          constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2343          if (cp == NULL) {
2344            // this entry has been GC'ed so remove it
2345            delete pv_node;
2346            _previous_versions->remove_at(j);
2347            // Since we are traversing the array backwards, we don't have to
2348            // do anything special with the index.
2349            continue;
2350          }
2351
2352          GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
2353          if (method_refs == NULL) {
2354            // We have run into a PreviousVersion generation where
2355            // all methods were made obsolete during that generation's
2356            // RedefineClasses() operation. At the time of that
2357            // operation, all EMCP methods were flushed so we don't
2358            // have to go back any further.
2359            //
2360            // A NULL method_refs is different than an empty method_refs.
2361            // We cannot infer any optimizations about older generations
2362            // from an empty method_refs for the current generation.
2363            break;
2364          }
2365
2366          for (int k = method_refs->length() - 1; k >= 0; k--) {
2367            jweak method_ref = method_refs->at(k);
2368            assert(method_ref != NULL,
2369              "weak method ref was unexpectedly cleared");
2370            if (method_ref == NULL) {
2371              method_refs->remove_at(k);
2372              // Since we are traversing the array backwards, we don't
2373              // have to do anything special with the index.
2374              continue;  // robustness
2375            }
2376
2377            methodOop method = (methodOop)JNIHandles::resolve(method_ref);
2378            if (method == NULL) {
2379              // this method entry has been GC'ed so skip it
2380              JNIHandles::destroy_weak_global(method_ref);
2381              method_refs->remove_at(k);
2382              continue;
2383            }
2384
2385            if (method->name() == m_name &&
2386                method->signature() == m_signature) {
2387              // The current RedefineClasses() call has made all EMCP
2388              // versions of this method obsolete so mark it as obsolete
2389              // and remove the weak ref.
2390              RC_TRACE(0x00000400,
2391                ("add: %s(%s): flush obsolete method @%d in version @%d",
2392                m_name->as_C_string(), m_signature->as_C_string(), k, j));
2393
2394              method->set_is_obsolete();
2395              JNIHandles::destroy_weak_global(method_ref);
2396              method_refs->remove_at(k);
2397              break;
2398            }
2399          }
2400
2401          // The previous loop may not find a matching EMCP method, but
2402          // that doesn't mean that we can optimize and not go any
2403          // further back in the PreviousVersion generations. The EMCP
2404          // method for this generation could have already been GC'ed,
2405          // but there still may be an older EMCP method that has not
2406          // been GC'ed.
2407        }
2408
2409        if (++local_count >= obsolete_method_count) {
2410          // no more obsolete methods so bail out now
2411          break;
2412        }
2413      }
2414    }
2415  }
2416} // end add_previous_version()
2417
2418
2419// Determine if instanceKlass has a previous version.
2420bool instanceKlass::has_previous_version() const {
2421  if (_previous_versions == NULL) {
2422    // no previous versions array so answer is easy
2423    return false;
2424  }
2425
2426  for (int i = _previous_versions->length() - 1; i >= 0; i--) {
2427    // Check the previous versions array for an info node that hasn't
2428    // been GC'ed
2429    PreviousVersionNode * pv_node = _previous_versions->at(i);
2430
2431    jobject cp_ref = pv_node->prev_constant_pool();
2432    assert(cp_ref != NULL, "cp reference was unexpectedly cleared");
2433    if (cp_ref == NULL) {
2434      continue;  // robustness
2435    }
2436
2437    constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2438    if (cp != NULL) {
2439      // we have at least one previous version
2440      return true;
2441    }
2442
2443    // We don't have to check the method refs. If the constant pool has
2444    // been GC'ed then so have the methods.
2445  }
2446
2447  // all of the underlying nodes' info has been GC'ed
2448  return false;
2449} // end has_previous_version()
2450
2451methodOop instanceKlass::method_with_idnum(int idnum) {
2452  methodOop m = NULL;
2453  if (idnum < methods()->length()) {
2454    m = (methodOop) methods()->obj_at(idnum);
2455  }
2456  if (m == NULL || m->method_idnum() != idnum) {
2457    for (int index = 0; index < methods()->length(); ++index) {
2458      m = (methodOop) methods()->obj_at(index);
2459      if (m->method_idnum() == idnum) {
2460        return m;
2461      }
2462    }
2463  }
2464  return m;
2465}
2466
2467
2468// Set the annotation at 'idnum' to 'anno'.
2469// We don't want to create or extend the array if 'anno' is NULL, since that is the
2470// default value.  However, if the array exists and is long enough, we must set NULL values.
2471void instanceKlass::set_methods_annotations_of(int idnum, typeArrayOop anno, objArrayOop* md_p) {
2472  objArrayOop md = *md_p;
2473  if (md != NULL && md->length() > idnum) {
2474    md->obj_at_put(idnum, anno);
2475  } else if (anno != NULL) {
2476    // create the array
2477    int length = MAX2(idnum+1, (int)_idnum_allocated_count);
2478    md = oopFactory::new_system_objArray(length, Thread::current());
2479    if (*md_p != NULL) {
2480      // copy the existing entries
2481      for (int index = 0; index < (*md_p)->length(); index++) {
2482        md->obj_at_put(index, (*md_p)->obj_at(index));
2483      }
2484    }
2485    set_annotations(md, md_p);
2486    md->obj_at_put(idnum, anno);
2487  } // if no array and idnum isn't included there is nothing to do
2488}
2489
2490// Construct a PreviousVersionNode entry for the array hung off
2491// the instanceKlass.
2492PreviousVersionNode::PreviousVersionNode(jobject prev_constant_pool,
2493  bool prev_cp_is_weak, GrowableArray<jweak>* prev_EMCP_methods) {
2494
2495  _prev_constant_pool = prev_constant_pool;
2496  _prev_cp_is_weak = prev_cp_is_weak;
2497  _prev_EMCP_methods = prev_EMCP_methods;
2498}
2499
2500
2501// Destroy a PreviousVersionNode
2502PreviousVersionNode::~PreviousVersionNode() {
2503  if (_prev_constant_pool != NULL) {
2504    if (_prev_cp_is_weak) {
2505      JNIHandles::destroy_weak_global(_prev_constant_pool);
2506    } else {
2507      JNIHandles::destroy_global(_prev_constant_pool);
2508    }
2509  }
2510
2511  if (_prev_EMCP_methods != NULL) {
2512    for (int i = _prev_EMCP_methods->length() - 1; i >= 0; i--) {
2513      jweak method_ref = _prev_EMCP_methods->at(i);
2514      if (method_ref != NULL) {
2515        JNIHandles::destroy_weak_global(method_ref);
2516      }
2517    }
2518    delete _prev_EMCP_methods;
2519  }
2520}
2521
2522
2523// Construct a PreviousVersionInfo entry
2524PreviousVersionInfo::PreviousVersionInfo(PreviousVersionNode *pv_node) {
2525  _prev_constant_pool_handle = constantPoolHandle();  // NULL handle
2526  _prev_EMCP_method_handles = NULL;
2527
2528  jobject cp_ref = pv_node->prev_constant_pool();
2529  assert(cp_ref != NULL, "constant pool ref was unexpectedly cleared");
2530  if (cp_ref == NULL) {
2531    return;  // robustness
2532  }
2533
2534  constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2535  if (cp == NULL) {
2536    // Weak reference has been GC'ed. Since the constant pool has been
2537    // GC'ed, the methods have also been GC'ed.
2538    return;
2539  }
2540
2541  // make the constantPoolOop safe to return
2542  _prev_constant_pool_handle = constantPoolHandle(cp);
2543
2544  GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
2545  if (method_refs == NULL) {
2546    // the instanceKlass did not have any EMCP methods
2547    return;
2548  }
2549
2550  _prev_EMCP_method_handles = new GrowableArray<methodHandle>(10);
2551
2552  int n_methods = method_refs->length();
2553  for (int i = 0; i < n_methods; i++) {
2554    jweak method_ref = method_refs->at(i);
2555    assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
2556    if (method_ref == NULL) {
2557      continue;  // robustness
2558    }
2559
2560    methodOop method = (methodOop)JNIHandles::resolve(method_ref);
2561    if (method == NULL) {
2562      // this entry has been GC'ed so skip it
2563      continue;
2564    }
2565
2566    // make the methodOop safe to return
2567    _prev_EMCP_method_handles->append(methodHandle(method));
2568  }
2569}
2570
2571
2572// Destroy a PreviousVersionInfo
2573PreviousVersionInfo::~PreviousVersionInfo() {
2574  // Since _prev_EMCP_method_handles is not C-heap allocated, we
2575  // don't have to delete it.
2576}
2577
2578
2579// Construct a helper for walking the previous versions array
2580PreviousVersionWalker::PreviousVersionWalker(instanceKlass *ik) {
2581  _previous_versions = ik->previous_versions();
2582  _current_index = 0;
2583  // _hm needs no initialization
2584  _current_p = NULL;
2585}
2586
2587
2588// Destroy a PreviousVersionWalker
2589PreviousVersionWalker::~PreviousVersionWalker() {
2590  // Delete the current info just in case the caller didn't walk to
2591  // the end of the previous versions list. No harm if _current_p is
2592  // already NULL.
2593  delete _current_p;
2594
2595  // When _hm is destroyed, all the Handles returned in
2596  // PreviousVersionInfo objects will be destroyed.
2597  // Also, after this destructor is finished it will be
2598  // safe to delete the GrowableArray allocated in the
2599  // PreviousVersionInfo objects.
2600}
2601
2602
2603// Return the interesting information for the next previous version
2604// of the klass. Returns NULL if there are no more previous versions.
2605PreviousVersionInfo* PreviousVersionWalker::next_previous_version() {
2606  if (_previous_versions == NULL) {
2607    // no previous versions so nothing to return
2608    return NULL;
2609  }
2610
2611  delete _current_p;  // cleanup the previous info for the caller
2612  _current_p = NULL;  // reset to NULL so we don't delete same object twice
2613
2614  int length = _previous_versions->length();
2615
2616  while (_current_index < length) {
2617    PreviousVersionNode * pv_node = _previous_versions->at(_current_index++);
2618    PreviousVersionInfo * pv_info = new (ResourceObj::C_HEAP)
2619                                          PreviousVersionInfo(pv_node);
2620
2621    constantPoolHandle cp_h = pv_info->prev_constant_pool_handle();
2622    if (cp_h.is_null()) {
2623      delete pv_info;
2624
2625      // The underlying node's info has been GC'ed so try the next one.
2626      // We don't have to check the methods. If the constant pool has
2627      // GC'ed then so have the methods.
2628      continue;
2629    }
2630
2631    // Found a node with non GC'ed info so return it. The caller will
2632    // need to delete pv_info when they are done with it.
2633    _current_p = pv_info;
2634    return pv_info;
2635  }
2636
2637  // all of the underlying nodes' info has been GC'ed
2638  return NULL;
2639} // end next_previous_version()
2640